Merge branch 'master' into jira/solr-13350
diff --git a/dev-tools/maven/README.maven b/dev-tools/maven/README.maven
index 816bf41..0d9c7b7 100644
--- a/dev-tools/maven/README.maven
+++ b/dev-tools/maven/README.maven
@@ -16,7 +16,7 @@
    The most recently produced nightly Jenkins-built Lucene and Solr Maven
    snapshot artifacts are available in the Apache Snapshot repository here:
 
-      http://repository.apache.org/snapshots
+      https://repository.apache.org/snapshots
 
    An example POM snippet:
 
@@ -27,7 +27,7 @@
          <repository>
            <id>apache.snapshots</id>
            <name>Apache Snapshot Repository</name>
-           <url>http://repository.apache.org/snapshots</url>
+           <url>https://repository.apache.org/snapshots</url>
            <releases>
              <enabled>false</enabled>
            </releases>
@@ -57,12 +57,12 @@
    as in B. above, with the addition of two system properties:
 
       ant -Dm2.repository.id=my-repo-id \
-          -Dm2.repository.url=http://example.org/my/repo \
+          -Dm2.repository.url=https://example.org/my/repo \
           generate-maven-artifacts
 
    The repository ID given in the above command corresponds to a <server>
    entry in either your ~/.m2/settings.xml or ~/.ant/settings.xml.  See
-   <http://maven.apache.org/settings.html#Servers> for more information.
+   <https://maven.apache.org/settings.html#Servers> for more information.
    (Note that as of version 2.1.3, Maven Ant Tasks cannot handle encrypted
    passwords.)
 
diff --git a/dev-tools/maven/pom.xml.template b/dev-tools/maven/pom.xml.template
index 0b90eed..0214783 100644
--- a/dev-tools/maven/pom.xml.template
+++ b/dev-tools/maven/pom.xml.template
@@ -23,7 +23,7 @@
   <parent>
     <groupId>org.apache</groupId>
     <artifactId>apache</artifactId>
-    <version>13</version>
+    <version>21</version>
     <relativePath/>
   </parent>
   <groupId>org.apache.lucene</groupId>
@@ -32,15 +32,15 @@
   <packaging>pom</packaging>
   <name>Grandparent POM for Apache Lucene Core and Apache Solr</name>
   <description>Grandparent POM for Apache Lucene Core and Apache Solr</description>
-  <url>http://lucene.apache.org</url>
+  <url>https://lucene.apache.org</url>
   <modules>
     <module>lucene</module>
     <module>solr</module>
   </modules>
   <properties>
-    <vc-anonymous-base-url>http://git-wip-us.apache.org/repos/asf/lucene-solr.git</vc-anonymous-base-url>
-    <vc-dev-base-url>https://git-wip-us.apache.org/repos/asf/lucene-solr.git</vc-dev-base-url>
-    <vc-browse-base-url>https://git1-us-west.apache.org/repos/asf?p=lucene-solr.git;a=tree</vc-browse-base-url>
+    <vc-anonymous-base-url>https://gitbox.apache.org/repos/asf/lucene-solr.git</vc-anonymous-base-url>
+    <vc-dev-base-url>https://gitbox.apache.org/repos/asf/lucene-solr.git</vc-dev-base-url>
+    <vc-browse-base-url>https://gitbox.apache.org/repos/asf?p=lucene-solr.git</vc-browse-base-url>
     <specification.version>@spec.version@</specification.version>
     <maven.build.timestamp.format>yyyy-MM-dd HH:mm:ss</maven.build.timestamp.format>
     <java.compat.version>11</java.compat.version>
@@ -80,7 +80,7 @@
       <subscribe>general-subscribe@lucene.apache.org</subscribe>
       <unsubscribe>general-unsubscribe@lucene.apache.org</unsubscribe>
       <archive>
-        http://mail-archives.apache.org/mod_mbox/lucene-general/
+        https://mail-archives.apache.org/mod_mbox/lucene-general/
       </archive>
     </mailingList>
     <mailingList>
@@ -88,21 +88,21 @@
       <subscribe>java-user-subscribe@lucene.apache.org</subscribe>
       <unsubscribe>java-user-unsubscribe@lucene.apache.org</unsubscribe>
       <archive>
-        http://mail-archives.apache.org/mod_mbox/lucene-java-user/
+        https://mail-archives.apache.org/mod_mbox/lucene-java-user/
       </archive>
     </mailingList>
     <mailingList>
       <name>Java Developer List</name>
       <subscribe>dev-subscribe@lucene.apache.org</subscribe>
       <unsubscribe>dev-unsubscribe@lucene.apache.org</unsubscribe>
-      <archive>http://mail-archives.apache.org/mod_mbox/lucene-dev/</archive>
+      <archive>https://mail-archives.apache.org/mod_mbox/lucene-dev/</archive>
     </mailingList>
     <mailingList>
       <name>Java Commits List</name>
       <subscribe>commits-subscribe@lucene.apache.org</subscribe>
       <unsubscribe>commits-unsubscribe@lucene.apache.org</unsubscribe>
       <archive>
-        http://mail-archives.apache.org/mod_mbox/lucene-java-commits/
+        https://mail-archives.apache.org/mod_mbox/lucene-java-commits/
       </archive>
     </mailingList>
   </mailingLists>
@@ -122,7 +122,7 @@
     <repository>
       <id>apache.snapshots</id>
       <name>Apache Snapshot Repository</name>
-      <url>http://repository.apache.org/snapshots</url>
+      <url>https://repository.apache.org/snapshots</url>
       <releases>
         <enabled>false</enabled>
       </releases>
@@ -267,7 +267,7 @@
               <tempDir>.</tempDir>
               <java.awt.headless>true</java.awt.headless>
 
-              <!-- See <http://wiki.apache.org/lucene-java/RunningTests>
+              <!-- See <https://cwiki.apache.org/confluence/display/lucene/RunningTests>
                    for a description of the tests.* system properties. -->
 
               <!-- RandomizedTesting library system properties -->
diff --git a/dev-tools/maven/solr/pom.xml.template b/dev-tools/maven/solr/pom.xml.template
index 56aa1c5..827eb26 100644
--- a/dev-tools/maven/solr/pom.xml.template
+++ b/dev-tools/maven/solr/pom.xml.template
@@ -55,21 +55,21 @@
       <subscribe>solr-user-subscribe@lucene.apache.org</subscribe>
       <unsubscribe>solr-user-unsubscribe@lucene.apache.org</unsubscribe>
       <archive>
-        http://mail-archives.apache.org/mod_mbox/solr-user/
+        https://mail-archives.apache.org/mod_mbox/solr-user/
       </archive>
     </mailingList>
     <mailingList>
       <name>Java Developer List</name>
       <subscribe>dev-subscribe@lucene.apache.org</subscribe>
       <unsubscribe>dev-unsubscribe@lucene.apache.org</unsubscribe>
-      <archive>http://mail-archives.apache.org/mod_mbox/lucene-dev/</archive>
+      <archive>https://mail-archives.apache.org/mod_mbox/lucene-dev/</archive>
     </mailingList>
     <mailingList>
       <name>Java Commits List</name>
       <subscribe>commits-subscribe@lucene.apache.org</subscribe>
       <unsubscribe>commits-unsubscribe@lucene.apache.org</unsubscribe>
       <archive>
-        http://mail-archives.apache.org/mod_mbox/lucene-java-commits/
+        https://mail-archives.apache.org/mod_mbox/lucene-java-commits/
       </archive>
     </mailingList>
   </mailingLists>
@@ -78,12 +78,12 @@
     <repository>
       <id>maven-restlet</id>
       <name>Public online Restlet repository</name>
-      <url>http://maven.restlet.org</url>
+      <url>https://maven.restlet.com</url>
     </repository>
     <repository>
       <id>releases.cloudera.com</id>
       <name>Cloudera Releases</name>
-      <url>https://repository.cloudera.com/artifactory/libs-release</url>
+      <url>https://repository.cloudera.com/artifactory/libs-release-local/</url>
     </repository>
   </repositories>
   <build>
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 661377f..07e5776 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -52,6 +52,8 @@
 
 * LUCENE-8937: Avoid agressive stemming on numbers in the FrenchMinimalStemmer.
   (Adrien Gallou via Tomoko Uchida)
+  
+* LUCENE-8984: MoreLikeThis MLT is biased for uncommon fields (Andy Hind via Anshum Gupta)
 
 Bug fixes
 
@@ -62,6 +64,32 @@
 
 * LUCENE-8768: Fix Javadocs build in Java 11. (Namgyu Kim)
 
+======================= Lucene 8.4.0 =======================
+
+API Changes
+---------------------
+(No changes)
+
+New Features
+---------------------
+(No changes)
+
+Improvements
+---------------------
+(No changes)
+
+Optimizations
+---------------------
+(No changes)
+
+Bug Fixes
+---------------------
+(No changes)
+
+Other
+---------------------
+(No changes)
+
 ======================= Lucene 8.3.0 =======================
 
 API Changes
@@ -81,6 +109,10 @@
   And don't call if docFreq <= 0.  The previous implementation survives as deprecated and final.  It's removed in 9.0.
   (Bruno Roustant, David Smiley, Alan Woodward)
 
+* LUCENE-8990: PointValues#estimateDocCount(visitor) estimates the number of documents that would be matched by
+  the given IntersectVisitor. THe method is used to compute the cost() of ScorerSuppliers instead of
+  PointValues#estimatePointCount(visitor). (Ignacio Vera, Adrien Grand)
+
 New Features
 
 * LUCENE-8936: Add SpanishMinimalStemFilter (vinod kumar via Tomoko Uchida)
@@ -152,12 +184,18 @@
 * LUCENE-8755: spatial-extras quad and packed quad prefix trees now index points faster.
   (Chongchen Chen, David Smiley)
 
+* LUCENE-8860: add additional leaf node level optimizations in LatLonShapeBoundingBoxQuery.
+  (Igor Motov via Ignacio Vera)
+  
 * LUCENE-8968: Improve performance of WITHIN and DISJOINT queries for Shape queries by
   doing just one pass whenever possible. (Ignacio Vera)
 
 * LUCENE-8939: Introduce shared count based early termination across multiple slices
   (Atri Sharma)
 
+* LUCENE-8980: Blocktree's seekExact now short-circuits false if the term isn't in the min-max range of the segment.
+  Large perf gain for ID/time like data when populated sequentially.  (Guoqiang Jiang)
+
 Bug Fixes
 
 * LUCENE-8755: spatial-extras quad and packed quad prefix trees could throw a
@@ -172,6 +210,14 @@
 
 * LUCENE-8975: Code Cleanup: Use entryset for map iteration wherever possible.
 
+* LUCENE-8993, LUCENE-8807: Changed all repository and download references in build files
+  to HTTPS. (Uwe Schindler)
+
+* LUCENE-8998: Fix OverviewImplTest.testIsOptimized reproducible failure. (Tomoko Uchida)
+
+* LUCENE-8999: LuceneTestCase.expectThrows now propogates assert/assumption failures up to the test
+  w/o wrapping in a new assertion failure unless the caller has explicitly expected them (hossman)
+
 ======================= Lucene 8.2.0 =======================
 
 API Changes
diff --git a/lucene/common-build.xml b/lucene/common-build.xml
index ff2ad42..465975c 100644
--- a/lucene/common-build.xml
+++ b/lucene/common-build.xml
@@ -135,7 +135,25 @@
   <property name="tests.asserts" value="true" />
   <property name="tests.policy" location="${common.dir}/tools/junit4/tests.policy"/>
 
-  <condition property="tests.asserts.args" value="-ea -esa" else="">
+  <condition property="tests.asserts.bug.jdk8205399" value="-da:java.util.HashMap" else="">
+    <!-- LUCENE-8991 / JDK-8205399: HashMap assertion bug until java12-->
+    <and>
+      <or>
+        <contains string="${java.vm.name}" substring="hotspot" casesensitive="false"/>
+        <contains string="${java.vm.name}" substring="openjdk" casesensitive="false"/>
+        <contains string="${java.vm.name}" substring="jrockit" casesensitive="false"/>
+      </or>
+      <or>
+        <equals arg1="${java.specification.version}" arg2="1.8"/>
+        <equals arg1="${java.specification.version}" arg2="9"/>
+        <equals arg1="${java.specification.version}" arg2="10"/>
+        <equals arg1="${java.specification.version}" arg2="11"/>
+      </or>
+      <isfalse value="${tests.asserts.hashmap}" />
+    </and>
+  </condition>
+  
+  <condition property="tests.asserts.args" value="-ea -esa ${tests.asserts.bug.jdk8205399}" else="">
     <istrue value="${tests.asserts}"/>
   </condition>
 
diff --git a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java
index 92888d0..587aaeb 100644
--- a/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java
+++ b/lucene/core/src/java/org/apache/lucene/codecs/blocktree/SegmentTermsEnum.java
@@ -321,6 +321,10 @@
       throw new IllegalStateException("terms index was not loaded");
     }
 
+    if (fr.size() > 0 && (target.compareTo(fr.getMin()) < 0 || target.compareTo(fr.getMax()) > 0)) {
+        return false;
+    }
+
     term.grow(1 + target.length);
 
     assert clearEOF();
diff --git a/lucene/core/src/java/org/apache/lucene/document/FeatureField.java b/lucene/core/src/java/org/apache/lucene/document/FeatureField.java
index 229e057..2ca048c 100644
--- a/lucene/core/src/java/org/apache/lucene/document/FeatureField.java
+++ b/lucene/core/src/java/org/apache/lucene/document/FeatureField.java
@@ -197,7 +197,7 @@
     }
   }
 
-  private static final int MAX_FREQ = Float.floatToIntBits(Float.MAX_VALUE) >>> 15;
+  static final int MAX_FREQ = Float.floatToIntBits(Float.MAX_VALUE) >>> 15;
 
   static float decodeFeatureValue(float freq) {
     if (freq > MAX_FREQ) {
diff --git a/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java b/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java
index 5e08b45..79fabf3 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/document/LatLonPointDistanceQuery.java
@@ -177,7 +177,7 @@
           @Override
           public long cost() {
             if (cost == -1) {
-              cost = values.estimatePointCount(visitor);
+              cost = values.estimateDocCount(visitor);
             }
             assert cost >= 0;
             return cost;
diff --git a/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java b/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java
index 6d9d4ea..4be40bb 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/document/LatLonPointInPolygonQuery.java
@@ -190,7 +190,7 @@
           public long cost() {
             if (cost == -1) {
                // Computing the cost may be expensive, so only do it if necessary
-              cost = values.estimatePointCount(visitor);
+              cost = values.estimateDocCount(visitor);
               assert cost >= 0;
             }
             return cost;
diff --git a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
index e6baa32..28a6a59 100644
--- a/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
+++ b/lucene/core/src/java/org/apache/lucene/document/LongPoint.java
@@ -20,12 +20,12 @@
 import java.util.Collection;
 
 import org.apache.lucene.index.PointValues;
-import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.lucene.search.BooleanQuery;
 import org.apache.lucene.search.BoostQuery;
 import org.apache.lucene.search.PointInSetQuery;
 import org.apache.lucene.search.PointRangeQuery;
 import org.apache.lucene.search.Query;
+import org.apache.lucene.search.BooleanClause.Occur;
 import org.apache.lucene.util.BytesRef;
 import org.apache.lucene.util.NumericUtils;
 
diff --git a/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java b/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java
index 6cba7de..e903b00 100644
--- a/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/document/RangeFieldQuery.java
@@ -361,7 +361,7 @@
             public long cost() {
               if (cost == -1) {
                 // Computing the cost may be expensive, so only do it if necessary
-                cost = values.estimatePointCount(visitor);
+                cost = values.estimateDocCount(visitor);
                 assert cost >= 0;
               }
               return cost;
diff --git a/lucene/core/src/java/org/apache/lucene/index/PointValues.java b/lucene/core/src/java/org/apache/lucene/index/PointValues.java
index 9aa0e5d..87ac802 100644
--- a/lucene/core/src/java/org/apache/lucene/index/PointValues.java
+++ b/lucene/core/src/java/org/apache/lucene/index/PointValues.java
@@ -233,9 +233,35 @@
 
   /** Estimate the number of points that would be visited by {@link #intersect}
    * with the given {@link IntersectVisitor}. This should run many times faster
+   * than {@link #intersect(IntersectVisitor)}. */
+  public abstract long estimatePointCount(IntersectVisitor visitor);
+
+  /** Estimate the number of documents that would be matched by {@link #intersect}
+   * with the given {@link IntersectVisitor}. This should run many times faster
    * than {@link #intersect(IntersectVisitor)}.
    * @see DocIdSetIterator#cost */
-  public abstract long estimatePointCount(IntersectVisitor visitor);
+  public long estimateDocCount(IntersectVisitor visitor) {
+    long estimatedPointCount = estimatePointCount(visitor);
+    int docCount = getDocCount();
+    double size = size();
+    if (estimatedPointCount >= size) {
+      // math all docs
+      return docCount;
+    } else if (size == docCount || estimatedPointCount == 0L ) {
+      // if the point count estimate is 0 or we have only single values
+      // return this estimate
+      return  estimatedPointCount;
+    } else {
+      // in case of multi values estimate the number of docs using the solution provided in
+      // https://math.stackexchange.com/questions/1175295/urn-problem-probability-of-drawing-balls-of-k-unique-colors
+      // then approximate the solution for points per doc << size() which results in the expression
+      // D * (1 - ((N - n) / N)^(N/D))
+      // where D is the total number of docs, N the total number of points and n the estimated point count
+      long docEstimate = (long) (docCount * (1d - Math.pow((size - estimatedPointCount) / size, size / docCount)));
+      return docEstimate == 0L ? 1L : docEstimate;
+    }
+  }
+
 
   /** Returns minimum value for each dimension, packed, or null if {@link #size} is <code>0</code> */
   public abstract byte[] getMinPackedValue() throws IOException;
diff --git a/lucene/core/src/java/org/apache/lucene/search/BottomValueChecker.java b/lucene/core/src/java/org/apache/lucene/search/BottomValueChecker.java
new file mode 100644
index 0000000..174de35
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/search/BottomValueChecker.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.search;
+
+/**
+ * Maintains the bottom value across multiple collectors
+ */
+abstract class BottomValueChecker {
+  /** Maintains global bottom score as the maximum of all bottom scores */
+  private static class MaximumBottomScoreChecker extends BottomValueChecker {
+    private volatile float maxMinScore;
+
+    @Override
+    public void updateThreadLocalBottomValue(float value) {
+      if (value <= maxMinScore) {
+        return;
+      }
+      synchronized (this) {
+        if (value > maxMinScore) {
+          maxMinScore = value;
+        }
+      }
+    }
+
+    @Override
+    public float getBottomValue() {
+      return maxMinScore;
+    }
+  }
+
+  public static BottomValueChecker createMaxBottomScoreChecker() {
+    return new MaximumBottomScoreChecker();
+  }
+
+  public abstract void updateThreadLocalBottomValue(float value);
+  public abstract float getBottomValue();
+}
\ No newline at end of file
diff --git a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
index 7130059..e0f0cdf 100644
--- a/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
+++ b/lucene/core/src/java/org/apache/lucene/search/IndexSearcher.java
@@ -31,6 +31,7 @@
 import java.util.concurrent.Executor;
 import java.util.concurrent.Future;
 import java.util.concurrent.FutureTask;
+import java.util.concurrent.RejectedExecutionException;
 
 import org.apache.lucene.document.Document;
 import org.apache.lucene.index.DirectoryReader;
@@ -468,9 +469,12 @@
 
       private final HitsThresholdChecker hitsThresholdChecker = (executor == null || leafSlices.length <= 1) ? HitsThresholdChecker.create(TOTAL_HITS_THRESHOLD) :
           HitsThresholdChecker.createShared(TOTAL_HITS_THRESHOLD);
+
+      private final BottomValueChecker bottomValueChecker = BottomValueChecker.createMaxBottomScoreChecker();
+
       @Override
       public TopScoreDocCollector newCollector() throws IOException {
-        return TopScoreDocCollector.create(cappedNumHits, after, hitsThresholdChecker);
+        return TopScoreDocCollector.create(cappedNumHits, after, hitsThresholdChecker, bottomValueChecker);
       }
 
       @Override
@@ -664,8 +668,20 @@
           search(Arrays.asList(leaves), weight, collector);
           return collector;
         });
-        executor.execute(task);
-        topDocsFutures.add(task);
+        boolean executedOnCallerThread = false;
+        try {
+          executor.execute(task);
+        } catch (RejectedExecutionException e) {
+          // Execute on caller thread
+          search(Arrays.asList(leaves), weight, collector);
+          topDocsFutures.add(CompletableFuture.completedFuture(collector));
+          executedOnCallerThread = true;
+        }
+
+        // Do not add the task's future if it was not used
+        if (executedOnCallerThread == false) {
+          topDocsFutures.add(task);
+        }
       }
       final LeafReaderContext[] leaves = leafSlices[leafSlices.length - 1].leaves;
       final C collector = collectors.get(leafSlices.length - 1);
diff --git a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
index b8914c7..c8b1623 100644
--- a/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
+++ b/lucene/core/src/java/org/apache/lucene/search/PointRangeQuery.java
@@ -316,7 +316,7 @@
             public long cost() {
               if (cost == -1) {
                 // Computing the cost may be expensive, so only do it if necessary
-                cost = values.estimatePointCount(visitor);
+                cost = values.estimateDocCount(visitor);
                 assert cost >= 0;
               }
               return cost;
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
index eb855c1..10c4fc4 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopScoreDocCollector.java
@@ -49,8 +49,9 @@
 
   private static class SimpleTopScoreDocCollector extends TopScoreDocCollector {
 
-    SimpleTopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker) {
-      super(numHits, hitsThresholdChecker);
+    SimpleTopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker,
+                               BottomValueChecker bottomValueChecker) {
+      super(numHits, hitsThresholdChecker, bottomValueChecker);
     }
 
     @Override
@@ -100,8 +101,9 @@
     private final ScoreDoc after;
     private int collectedHits;
 
-    PagingTopScoreDocCollector(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker) {
-      super(numHits, hitsThresholdChecker);
+    PagingTopScoreDocCollector(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker,
+                               BottomValueChecker bottomValueChecker) {
+      super(numHits, hitsThresholdChecker, bottomValueChecker);
       this.after = after;
       this.collectedHits = 0;
     }
@@ -195,10 +197,11 @@
    * objects.
    */
   public static TopScoreDocCollector create(int numHits, ScoreDoc after, int totalHitsThreshold) {
-    return create(numHits, after, HitsThresholdChecker.create(totalHitsThreshold));
+    return create(numHits, after, HitsThresholdChecker.create(totalHitsThreshold), null);
   }
 
-  static TopScoreDocCollector create(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker) {
+  static TopScoreDocCollector create(int numHits, ScoreDoc after, HitsThresholdChecker hitsThresholdChecker,
+                                     BottomValueChecker bottomValueChecker) {
 
     if (numHits <= 0) {
       throw new IllegalArgumentException("numHits must be > 0; please use TotalHitCountCollector if you just need the total hit count");
@@ -209,9 +212,9 @@
     }
 
     if (after == null) {
-      return new SimpleTopScoreDocCollector(numHits, hitsThresholdChecker);
+      return new SimpleTopScoreDocCollector(numHits, hitsThresholdChecker, bottomValueChecker);
     } else {
-      return new PagingTopScoreDocCollector(numHits, after, hitsThresholdChecker);
+      return new PagingTopScoreDocCollector(numHits, after, hitsThresholdChecker, bottomValueChecker);
     }
   }
 
@@ -223,10 +226,11 @@
     return new CollectorManager<>() {
 
       private final HitsThresholdChecker hitsThresholdChecker = HitsThresholdChecker.createShared(totalHitsThreshold);
+      private final BottomValueChecker bottomValueChecker = BottomValueChecker.createMaxBottomScoreChecker();
 
       @Override
       public TopScoreDocCollector newCollector() throws IOException {
-        return TopScoreDocCollector.create(numHits, after, hitsThresholdChecker);
+        return TopScoreDocCollector.create(numHits, after, hitsThresholdChecker, bottomValueChecker);
       }
 
       @Override
@@ -244,9 +248,11 @@
 
   ScoreDoc pqTop;
   final HitsThresholdChecker hitsThresholdChecker;
+  final BottomValueChecker bottomValueChecker;
 
   // prevents instantiation
-  TopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker) {
+  TopScoreDocCollector(int numHits, HitsThresholdChecker hitsThresholdChecker,
+                       BottomValueChecker bottomValueChecker) {
     super(new HitQueue(numHits, true));
     assert hitsThresholdChecker != null;
 
@@ -254,6 +260,7 @@
     // that at this point top() is already initialized.
     pqTop = pq.top();
     this.hitsThresholdChecker = hitsThresholdChecker;
+    this.bottomValueChecker = bottomValueChecker;
   }
 
   @Override
@@ -272,11 +279,28 @@
 
   protected void updateMinCompetitiveScore(Scorable scorer) throws IOException {
     if (hitsThresholdChecker.isThresholdReached()
-          && pqTop != null
-          && pqTop.score != Float.NEGATIVE_INFINITY) { // -Infinity is the score of sentinels
+          && ((bottomValueChecker != null && bottomValueChecker.getBottomValue() > 0)
+          || (pqTop != null && pqTop.score != Float.NEGATIVE_INFINITY))) { // -Infinity is the score of sentinels
       // since we tie-break on doc id and collect in doc id order, we can require
       // the next float
-      scorer.setMinCompetitiveScore(Math.nextUp(pqTop.score));
+      float bottomScore = Float.NEGATIVE_INFINITY;
+
+      if (pqTop != null && pqTop.score != Float.NEGATIVE_INFINITY) {
+        bottomScore = Math.nextUp(pqTop.score);
+
+        if (bottomValueChecker != null) {
+          bottomValueChecker.updateThreadLocalBottomValue(pqTop.score);
+        }
+      }
+
+      // Global bottom can only be greater than or equal to the local bottom score
+      // The updating of global bottom score for this hit before getting here should
+      // ensure that
+      if (bottomValueChecker != null && bottomValueChecker.getBottomValue() > bottomScore) {
+        bottomScore = bottomValueChecker.getBottomValue();
+      }
+
+      scorer.setMinCompetitiveScore(bottomScore);
       totalHitsRelation = TotalHits.Relation.GREATER_THAN_OR_EQUAL_TO;
     }
   }
diff --git a/lucene/core/src/java/org/apache/lucene/util/Version.java b/lucene/core/src/java/org/apache/lucene/util/Version.java
index 3e8ca80..81632d6 100644
--- a/lucene/core/src/java/org/apache/lucene/util/Version.java
+++ b/lucene/core/src/java/org/apache/lucene/util/Version.java
@@ -68,6 +68,13 @@
   public static final Version LUCENE_8_3_0 = new Version(8, 3, 0);
 
   /**
+   * Match settings and bugs in Lucene's 8.4.0 release.
+   * @deprecated Use latest
+   */
+  @Deprecated
+  public static final Version LUCENE_8_4_0 = new Version(8, 4, 0);
+
+  /**
    * Match settings and bugs in Lucene's 9.0.0 release.
    * <p>
    * Use this to get the latest &amp; greatest settings, bug
diff --git a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java b/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java
index 7275f65..87b4702 100644
--- a/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java
+++ b/lucene/core/src/test/org/apache/lucene/codecs/lucene60/TestLucene60PointsFormat.java
@@ -109,15 +109,19 @@
     byte[] uniquePointValue = new byte[3];
     random().nextBytes(uniquePointValue);
     final int numDocs = atLeast(10000); // make sure we have several leaves
+    final boolean multiValues = random().nextBoolean();
     for (int i = 0; i < numDocs; ++i) {
       Document doc = new Document();
       if (i == numDocs / 2) {
         doc.add(new BinaryPoint("f", uniquePointValue));
       } else {
-        do {
-          random().nextBytes(pointValue);
-        } while (Arrays.equals(pointValue, uniquePointValue));
-        doc.add(new BinaryPoint("f", pointValue));
+        final int numValues = (multiValues) ? TestUtil.nextInt(random(), 2, 100) : 1;
+        for (int j = 0; j < numValues; j ++) {
+          do {
+            random().nextBytes(pointValue);
+          } while (Arrays.equals(pointValue, uniquePointValue));
+          doc.add(new BinaryPoint("f", pointValue));
+        }
       }
       w.addDocument(doc);
     }
@@ -128,58 +132,72 @@
     PointValues points = lr.getPointValues("f");
 
     // If all points match, then the point count is numLeaves * maxPointsInLeafNode
-    final int numLeaves = (int) Math.ceil((double) numDocs / maxPointsInLeafNode);
-    assertEquals(numLeaves * maxPointsInLeafNode,
-        points.estimatePointCount(new IntersectVisitor() {
-          @Override
-          public void visit(int docID, byte[] packedValue) throws IOException {}
-          
-          @Override
-          public void visit(int docID) throws IOException {}
-          
-          @Override
-          public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
-            return Relation.CELL_INSIDE_QUERY;
-          }
-        }));
+    final int numLeaves = (int) Math.ceil((double) points.size() / maxPointsInLeafNode);
+
+    IntersectVisitor allPointsVisitor = new IntersectVisitor() {
+      @Override
+      public void visit(int docID, byte[] packedValue) throws IOException {}
+
+      @Override
+      public void visit(int docID) throws IOException {}
+
+      @Override
+      public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+        return Relation.CELL_INSIDE_QUERY;
+      }
+    };
+
+    assertEquals(numLeaves * maxPointsInLeafNode, points.estimatePointCount(allPointsVisitor));
+    assertEquals(numDocs, points.estimateDocCount(allPointsVisitor));
+
+    IntersectVisitor noPointsVisitor = new IntersectVisitor() {
+      @Override
+      public void visit(int docID, byte[] packedValue) throws IOException {}
+
+      @Override
+      public void visit(int docID) throws IOException {}
+
+      @Override
+      public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+        return Relation.CELL_OUTSIDE_QUERY;
+      }
+    };
 
     // Return 0 if no points match
-    assertEquals(0,
-        points.estimatePointCount(new IntersectVisitor() {
-          @Override
-          public void visit(int docID, byte[] packedValue) throws IOException {}
-          
-          @Override
-          public void visit(int docID) throws IOException {}
-          
-          @Override
-          public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
-            return Relation.CELL_OUTSIDE_QUERY;
-          }
-        }));
+    assertEquals(0, points.estimatePointCount(noPointsVisitor));
+    assertEquals(0, points.estimateDocCount(noPointsVisitor));
+
+    IntersectVisitor onePointMatchVisitor = new IntersectVisitor() {
+      @Override
+      public void visit(int docID, byte[] packedValue) throws IOException {}
+
+      @Override
+      public void visit(int docID) throws IOException {}
+
+      @Override
+      public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+        if (Arrays.compareUnsigned(uniquePointValue, 0, 3, maxPackedValue, 0, 3) > 0 ||
+            Arrays.compareUnsigned(uniquePointValue, 0, 3, minPackedValue, 0, 3) < 0) {
+          return Relation.CELL_OUTSIDE_QUERY;
+        }
+        return Relation.CELL_CROSSES_QUERY;
+      }
+    };
 
     // If only one point matches, then the point count is (maxPointsInLeafNode + 1) / 2
     // in general, or maybe 2x that if the point is a split value
-    final long pointCount = points.estimatePointCount(new IntersectVisitor() {
-          @Override
-          public void visit(int docID, byte[] packedValue) throws IOException {}
-          
-          @Override
-          public void visit(int docID) throws IOException {}
-          
-          @Override
-          public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
-            if (Arrays.compareUnsigned(uniquePointValue, 0, 3, maxPackedValue, 0, 3) > 0 ||
-                Arrays.compareUnsigned(uniquePointValue, 0, 3, minPackedValue, 0, 3) < 0) {
-              return Relation.CELL_OUTSIDE_QUERY;
-            }
-            return Relation.CELL_CROSSES_QUERY;
-          }
-        });
+    final long pointCount = points.estimatePointCount(onePointMatchVisitor);
     assertTrue(""+pointCount,
         pointCount == (maxPointsInLeafNode + 1) / 2 || // common case
         pointCount == 2*((maxPointsInLeafNode + 1) / 2)); // if the point is a split value
 
+    final long docCount = points.estimateDocCount(onePointMatchVisitor);
+
+    if (multiValues) {
+      assertEquals(docCount, (long) (docCount * (1d - Math.pow( (numDocs -  pointCount) / points.size() , points.size() / docCount))));
+    } else {
+      assertEquals(pointCount, docCount);
+    }
     r.close();
     dir.close();
   }
@@ -198,16 +216,20 @@
     random().nextBytes(uniquePointValue[0]);
     random().nextBytes(uniquePointValue[1]);
     final int numDocs = atLeast(10000); // make sure we have several leaves
+    final boolean multiValues = random().nextBoolean();
     for (int i = 0; i < numDocs; ++i) {
       Document doc = new Document();
       if (i == numDocs / 2) {
         doc.add(new BinaryPoint("f", uniquePointValue));
       } else {
-        do {
-          random().nextBytes(pointValue[0]);
-          random().nextBytes(pointValue[1]);
-        } while (Arrays.equals(pointValue[0], uniquePointValue[0]) || Arrays.equals(pointValue[1], uniquePointValue[1]));
-        doc.add(new BinaryPoint("f", pointValue));
+        final int numValues = (multiValues) ? TestUtil.nextInt(random(), 2, 100) : 1;
+        for (int j = 0; j < numValues; j ++) {
+          do {
+            random().nextBytes(pointValue[0]);
+            random().nextBytes(pointValue[1]);
+          } while (Arrays.equals(pointValue[0], uniquePointValue[0]) || Arrays.equals(pointValue[1], uniquePointValue[1]));
+          doc.add(new BinaryPoint("f", pointValue));
+        }
       }
       w.addDocument(doc);
     }
@@ -218,67 +240,161 @@
     PointValues points = lr.getPointValues("f");
 
     // With >1 dims, the tree is balanced
-    int actualMaxPointsInLeafNode = numDocs;
+    long actualMaxPointsInLeafNode = points.size();
     while (actualMaxPointsInLeafNode > maxPointsInLeafNode) {
       actualMaxPointsInLeafNode = (actualMaxPointsInLeafNode + 1) / 2;
     }
 
+    IntersectVisitor allPointsVisitor = new IntersectVisitor() {
+      @Override
+      public void visit(int docID, byte[] packedValue) throws IOException {}
+
+      @Override
+      public void visit(int docID) throws IOException {}
+
+      @Override
+      public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+        return Relation.CELL_INSIDE_QUERY;
+      }
+    };
+
     // If all points match, then the point count is numLeaves * maxPointsInLeafNode
-    final int numLeaves = Integer.highestOneBit((numDocs - 1) / actualMaxPointsInLeafNode) << 1;
-    assertEquals(numLeaves * actualMaxPointsInLeafNode,
-        points.estimatePointCount(new IntersectVisitor() {
-          @Override
-          public void visit(int docID, byte[] packedValue) throws IOException {}
-          
-          @Override
-          public void visit(int docID) throws IOException {}
-          
-          @Override
-          public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
-            return Relation.CELL_INSIDE_QUERY;
-          }
-        }));
+    final int numLeaves = (int) Long.highestOneBit( ((points.size() - 1) / actualMaxPointsInLeafNode)) << 1;
+
+    assertEquals(numLeaves * actualMaxPointsInLeafNode, points.estimatePointCount(allPointsVisitor));
+    assertEquals(numDocs, points.estimateDocCount(allPointsVisitor));
+
+    IntersectVisitor noPointsVisitor = new IntersectVisitor() {
+      @Override
+      public void visit(int docID, byte[] packedValue) throws IOException {}
+
+      @Override
+      public void visit(int docID) throws IOException {}
+
+      @Override
+      public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+        return Relation.CELL_OUTSIDE_QUERY;
+      }
+    };
 
     // Return 0 if no points match
-    assertEquals(0,
-        points.estimatePointCount(new IntersectVisitor() {
-          @Override
-          public void visit(int docID, byte[] packedValue) throws IOException {}
-          
-          @Override
-          public void visit(int docID) throws IOException {}
-          
-          @Override
-          public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+    assertEquals(0, points.estimatePointCount(noPointsVisitor));
+    assertEquals(0, points.estimateDocCount(noPointsVisitor));
+
+    IntersectVisitor onePointMatchVisitor = new IntersectVisitor() {
+      @Override
+      public void visit(int docID, byte[] packedValue) throws IOException {}
+
+      @Override
+      public void visit(int docID) throws IOException {}
+
+      @Override
+      public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
+        for (int dim = 0; dim < 2; ++dim) {
+          if (Arrays.compareUnsigned(uniquePointValue[dim], 0, 3, maxPackedValue, dim * 3, dim * 3 + 3) > 0 ||
+              Arrays.compareUnsigned(uniquePointValue[dim], 0, 3, minPackedValue, dim * 3, dim * 3 + 3) < 0) {
             return Relation.CELL_OUTSIDE_QUERY;
           }
-        }));
-
+        }
+        return Relation.CELL_CROSSES_QUERY;
+      }
+    };
     // If only one point matches, then the point count is (actualMaxPointsInLeafNode + 1) / 2
     // in general, or maybe 2x that if the point is a split value
-    final long pointCount = points.estimatePointCount(new IntersectVisitor() {
-        @Override
-        public void visit(int docID, byte[] packedValue) throws IOException {}
-        
-        @Override
-        public void visit(int docID) throws IOException {}
-        
-        @Override
-        public Relation compare(byte[] minPackedValue, byte[] maxPackedValue) {
-          for (int dim = 0; dim < 2; ++dim) {
-            if (Arrays.compareUnsigned(uniquePointValue[dim], 0, 3, maxPackedValue, dim * 3, dim * 3 + 3) > 0 ||
-                Arrays.compareUnsigned(uniquePointValue[dim], 0, 3, minPackedValue, dim * 3, dim * 3 + 3) < 0) {
-              return Relation.CELL_OUTSIDE_QUERY;
-            }
-          }
-          return Relation.CELL_CROSSES_QUERY;
-        }
-      });
+    final long pointCount = points.estimatePointCount(onePointMatchVisitor);
     assertTrue(""+pointCount,
         pointCount == (actualMaxPointsInLeafNode + 1) / 2 || // common case
         pointCount == 2*((actualMaxPointsInLeafNode + 1) / 2)); // if the point is a split value
 
+    final long docCount = points.estimateDocCount(onePointMatchVisitor);
+    if (multiValues) {
+      assertEquals(docCount, (long) (docCount * (1d - Math.pow( (numDocs -  pointCount) / points.size() , points.size() / docCount))));
+    } else {
+      assertEquals(pointCount, docCount);
+    }
     r.close();
     dir.close();
   }
+
+  public void testDocCountEdgeCases() {
+    PointValues values = getPointValues(Long.MAX_VALUE, 1, Long.MAX_VALUE);
+    long docs = values.estimateDocCount(null);
+    assertEquals(1, docs);
+    values = getPointValues(Long.MAX_VALUE, 1, 1);
+    docs = values.estimateDocCount(null);
+    assertEquals(1, docs);
+    values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE);
+    docs = values.estimateDocCount(null);
+    assertEquals(Integer.MAX_VALUE, docs);
+    values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE / 2);
+    docs = values.estimateDocCount(null);
+    assertEquals(Integer.MAX_VALUE, docs);
+    values = getPointValues(Long.MAX_VALUE, Integer.MAX_VALUE, 1);
+    docs = values.estimateDocCount(null);
+    assertEquals(1, docs);
+  }
+
+  public void testRandomDocCount() {
+    for (int i = 0; i < 100; i++) {
+      long size = TestUtil.nextLong(random(), 1, Long.MAX_VALUE);
+      int maxDoc = (size > Integer.MAX_VALUE) ? Integer.MAX_VALUE : Math.toIntExact(size);
+      int docCount = TestUtil.nextInt(random(), 1, maxDoc);
+      long estimatedPointCount = TestUtil.nextLong(random(), 0, size);
+      PointValues values = getPointValues(size, docCount, estimatedPointCount);
+      long docs = values.estimateDocCount(null);
+      assertTrue(docs <= estimatedPointCount);
+      assertTrue(docs <= maxDoc);
+      assertTrue(docs >= estimatedPointCount / (size/docCount));
+    }
+  }
+
+
+  private PointValues getPointValues(long size, int docCount, long estimatedPointCount) {
+    return new PointValues() {
+      @Override
+      public void intersect(IntersectVisitor visitor) {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public long estimatePointCount(IntersectVisitor visitor) {
+        return estimatedPointCount;
+      }
+
+      @Override
+      public byte[] getMinPackedValue() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public byte[] getMaxPackedValue() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public int getNumDataDimensions() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public int getNumIndexDimensions() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public int getBytesPerDimension() throws IOException {
+        throw new UnsupportedOperationException();
+      }
+
+      @Override
+      public long size() {
+        return size;
+      }
+
+      @Override
+      public int getDocCount() {
+        return docCount;
+      }
+    };
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java
index 3c8fef1..1090e5a 100644
--- a/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java
+++ b/lucene/core/src/test/org/apache/lucene/document/TestFeatureSort.java
@@ -230,7 +230,7 @@
       if (random().nextBoolean()) {
         float f;
         do {
-          int freq = TestUtil.nextInt(random(), 1, (1 << 16) - 1);
+          int freq = TestUtil.nextInt(random(), 1, FeatureField.MAX_FREQ);
           f = FeatureField.decodeFeatureValue(freq);
         } while (f < Float.MIN_NORMAL);
         doc.add(new NumericDocValuesField("float", Float.floatToIntBits(f)));
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java b/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
index d784b12..de5d947 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestIndexOrDocValuesQuery.java
@@ -22,6 +22,7 @@
 import org.apache.lucene.document.Field.Store;
 import org.apache.lucene.document.LongPoint;
 import org.apache.lucene.document.NumericDocValuesField;
+import org.apache.lucene.document.SortedNumericDocValuesField;
 import org.apache.lucene.document.StringField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
@@ -86,4 +87,70 @@
     dir.close();
   }
 
+  public void testUseIndexForSelectiveMultiValueQueries() throws IOException {
+    Directory dir = newDirectory();
+    IndexWriter w = new IndexWriter(dir, newIndexWriterConfig()
+        // relies on costs and PointValues.estimateCost so we need the default codec
+        .setCodec(TestUtil.getDefaultCodec()));
+    for (int i = 0; i < 2000; ++i) {
+      Document doc = new Document();
+      if (i < 1000) {
+        doc.add(new StringField("f1", "bar", Store.NO));
+        for (int j =0; j < 500; j++) {
+          doc.add(new LongPoint("f2", 42L));
+          doc.add(new SortedNumericDocValuesField("f2", 42L));
+        }
+      } else if (i == 1001) {
+        doc.add(new StringField("f1", "foo", Store.NO));
+        doc.add(new LongPoint("f2", 2L));
+        doc.add(new SortedNumericDocValuesField("f2", 42L));
+      } else {
+        doc.add(new StringField("f1", "bar", Store.NO));
+        for (int j =0; j < 100; j++) {
+          doc.add(new LongPoint("f2", 2L));
+          doc.add(new SortedNumericDocValuesField("f2", 2L));
+        }
+      }
+      w.addDocument(doc);
+    }
+    w.forceMerge(1);
+    IndexReader reader = DirectoryReader.open(w);
+    IndexSearcher searcher = newSearcher(reader);
+    searcher.setQueryCache(null);
+
+    // The term query is less selective, so the IndexOrDocValuesQuery should use points
+    final Query q1 = new BooleanQuery.Builder()
+        .add(new TermQuery(new Term("f1", "bar")), Occur.MUST)
+        .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 2), SortedNumericDocValuesField.newSlowRangeQuery("f2", 2L, 2L)), Occur.MUST)
+        .build();
+
+    final Weight w1 = searcher.createWeight(searcher.rewrite(q1), ScoreMode.COMPLETE, 1);
+    final Scorer s1 = w1.scorer(searcher.getIndexReader().leaves().get(0));
+    assertNull(s1.twoPhaseIterator()); // means we use points
+
+    // The term query is less selective, so the IndexOrDocValuesQuery should use points
+    final Query q2 = new BooleanQuery.Builder()
+        .add(new TermQuery(new Term("f1", "bar")), Occur.MUST)
+        .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), SortedNumericDocValuesField.newSlowRangeQuery("f2", 42, 42L)), Occur.MUST)
+        .build();
+
+    final Weight w2 = searcher.createWeight(searcher.rewrite(q2), ScoreMode.COMPLETE, 1);
+    final Scorer s2 = w2.scorer(searcher.getIndexReader().leaves().get(0));
+    assertNull(s2.twoPhaseIterator()); // means we use points
+
+    // The term query is more selective, so the IndexOrDocValuesQuery should use doc values
+    final Query q3 = new BooleanQuery.Builder()
+        .add(new TermQuery(new Term("f1", "foo")), Occur.MUST)
+        .add(new IndexOrDocValuesQuery(LongPoint.newExactQuery("f2", 42), SortedNumericDocValuesField.newSlowRangeQuery("f2", 42, 42L)), Occur.MUST)
+        .build();
+
+    final Weight w3 = searcher.createWeight(searcher.rewrite(q3), ScoreMode.COMPLETE, 1);
+    final Scorer s3 = w3.scorer(searcher.getIndexReader().leaves().get(0));
+    assertNotNull(s3.twoPhaseIterator()); // means we use doc values
+
+    reader.close();
+    w.close();
+    dir.close();
+  }
+
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java b/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java
index a168aac..6617c60 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestIndexSearcher.java
@@ -20,11 +20,17 @@
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.List;
+import java.util.concurrent.Callable;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Future;
 import java.util.concurrent.LinkedBlockingQueue;
+import java.util.concurrent.RejectedExecutionException;
 import java.util.concurrent.ThreadPoolExecutor;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 import java.util.concurrent.atomic.AtomicInteger;
 
 import org.apache.lucene.document.Document;
@@ -265,4 +271,80 @@
       assertEquals(leaves.size() - 1, numExecutions.get());
     }
   }
+
+  public void testRejectedExecution() throws IOException {
+    ExecutorService service = new RejectingMockExecutor();
+
+    IndexSearcher searcher = new IndexSearcher(reader, service) {
+      @Override
+      protected LeafSlice[] slices(List<LeafReaderContext> leaves) {
+        ArrayList<LeafSlice> slices = new ArrayList<>();
+        for (LeafReaderContext ctx : leaves) {
+          slices.add(new LeafSlice(Arrays.asList(ctx)));
+        }
+        return slices.toArray(new LeafSlice[0]);
+      }
+    };
+
+    // To ensure that failing ExecutorService still allows query to run
+    // successfully
+    TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
+    assert topDocs.scoreDocs.length == 10;
+
+    service.shutdown();
+  }
+
+  private static class RejectingMockExecutor implements ExecutorService {
+
+    public void shutdown() {
+    }
+
+    public List<Runnable> shutdownNow() {
+      throw new UnsupportedOperationException();
+    }
+
+    public boolean isShutdown() {
+      throw new UnsupportedOperationException();
+    }
+
+    public boolean isTerminated() {
+      throw new UnsupportedOperationException();
+    }
+
+    public boolean awaitTermination(final long l, final TimeUnit timeUnit) throws InterruptedException {
+      throw new UnsupportedOperationException();
+    }
+
+    public <T> Future<T> submit(final Callable<T> tCallable) {
+      throw new UnsupportedOperationException();
+    }
+
+    public <T> Future<T> submit(final Runnable runnable, final T t) {
+      throw new UnsupportedOperationException();
+    }
+
+    public Future<?> submit(final Runnable runnable) {
+      throw  new UnsupportedOperationException();
+    }
+
+    public <T> List<Future<T>> invokeAll(final Collection<? extends Callable<T>> callables) throws InterruptedException {
+      throw new UnsupportedOperationException();
+    }
+
+    public <T> List<Future<T>> invokeAll(final Collection<? extends Callable<T>> callables, final long l, final TimeUnit timeUnit) throws InterruptedException {
+      throw new UnsupportedOperationException();
+    }
+
+    public <T> T invokeAny(final Collection<? extends Callable<T>> callables) throws InterruptedException, ExecutionException {
+      throw new UnsupportedOperationException();
+    }
+
+    public <T> T invokeAny(final Collection<? extends Callable<T>> callables, final long l, final TimeUnit timeUnit) throws InterruptedException, ExecutionException, TimeoutException {
+      throw new UnsupportedOperationException();
+    }
+
+    public void execute(final Runnable runnable) {
+      throw new RejectedExecutionException();
+    }
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
index 8d8bc8c..41ebf1b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestLRUQueryCache.java
@@ -218,22 +218,50 @@
 
     searcher.setQueryCachingPolicy(ALWAYS_CACHE);
     searcher.search(new ConstantScoreQuery(red), 1);
-    assertEquals(Collections.singletonList(red), queryCache.cachedQueries());
+
+    if (!(queryCache.cachedQueries().equals(Collections.emptyList()))) {
+      assertEquals(Arrays.asList(red), queryCache.cachedQueries());
+    } else {
+      // Let the cache load be completed
+      Thread.sleep(200);
+      assertEquals(Arrays.asList(red), queryCache.cachedQueries());
+    }
 
     searcher.search(new ConstantScoreQuery(green), 1);
-    assertEquals(Arrays.asList(red, green), queryCache.cachedQueries());
+
+    if (!(queryCache.cachedQueries().equals(Arrays.asList(red)))) {
+      assertEquals(Arrays.asList(red, green), queryCache.cachedQueries());
+    } else {
+      // Let the cache load be completed
+      Thread.sleep(200);
+      assertEquals(Arrays.asList(red, green), queryCache.cachedQueries());
+    }
 
     searcher.search(new ConstantScoreQuery(red), 1);
     assertEquals(Arrays.asList(green, red), queryCache.cachedQueries());
 
     searcher.search(new ConstantScoreQuery(blue), 1);
-    assertEquals(Arrays.asList(red, blue), queryCache.cachedQueries());
+
+    if (!(queryCache.cachedQueries().equals(Arrays.asList(green, red)))) {
+      assertEquals(Arrays.asList(red, blue), queryCache.cachedQueries());
+    } else {
+      // Let the cache load be completed
+      Thread.sleep(200);
+      assertEquals(Arrays.asList(red, blue), queryCache.cachedQueries());
+    }
 
     searcher.search(new ConstantScoreQuery(blue), 1);
     assertEquals(Arrays.asList(red, blue), queryCache.cachedQueries());
 
     searcher.search(new ConstantScoreQuery(green), 1);
-    assertEquals(Arrays.asList(blue, green), queryCache.cachedQueries());
+
+    if (!(queryCache.cachedQueries().equals(Arrays.asList(red, blue)))) {
+      assertEquals(Arrays.asList(blue, green), queryCache.cachedQueries());
+    } else {
+      // Let the cache load be completed
+      Thread.sleep(200);
+      assertEquals(Arrays.asList(blue, green), queryCache.cachedQueries());
+    }
 
     searcher.setQueryCachingPolicy(NEVER_CACHE);
     searcher.search(new ConstantScoreQuery(red), 1);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
index b4a2eec..130449b 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestTopDocsCollector.java
@@ -29,9 +29,15 @@
 import org.apache.lucene.index.IndexReader;
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.index.MultiTerms;
 import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.index.RandomIndexWriter;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.index.Terms;
+import org.apache.lucene.index.TermsEnum;
 import org.apache.lucene.store.Directory;
+import org.apache.lucene.util.BytesRef;
+import org.apache.lucene.util.LineFileDocs;
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.NamedThreadFactory;
 
@@ -114,7 +120,7 @@
     return tdc;
   }
 
-  private TopDocs doConcurrentSearchWithThreshold(int numResults, int threshold) throws IOException {
+  private TopDocs doConcurrentSearchWithThreshold(int numResults, int threshold, IndexReader reader) throws IOException {
     Query q = new MatchAllDocsQuery();
     ExecutorService service = new ThreadPoolExecutor(4, 4, 0L, TimeUnit.MILLISECONDS,
         new LinkedBlockingQueue<Runnable>(),
@@ -339,7 +345,7 @@
     w.close();
 
     TopDocsCollector collector = doSearchWithThreshold(5, 10);
-    TopDocs tdc = doConcurrentSearchWithThreshold(5, 10);
+    TopDocs tdc = doConcurrentSearchWithThreshold(5, 10, reader);
     TopDocs tdc2 = collector.topDocs();
 
     CheckHits.checkEqual(q, tdc.scoreDocs, tdc2.scoreDocs);
@@ -396,4 +402,46 @@
     dir.close();
   }
 
+  public void testGlobalScore() throws Exception {
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    try (LineFileDocs docs = new LineFileDocs(random())) {
+      int numDocs = atLeast(100);
+      for (int i = 0; i < numDocs; i++) {
+        writer.addDocument(docs.nextDoc());
+      }
+    }
+
+    IndexReader reader = writer.getReader();
+    writer.close();
+
+    final IndexSearcher s = newSearcher(reader);
+    Terms terms = MultiTerms.getTerms(reader, "body");
+    int termCount = 0;
+    TermsEnum termsEnum = terms.iterator();
+    while(termsEnum.next() != null) {
+      termCount++;
+    }
+    assertTrue(termCount > 0);
+
+    // Target ~10 terms to search:
+    double chance = 10.0 / termCount;
+    termsEnum = terms.iterator();
+    while(termsEnum.next() != null) {
+      if (random().nextDouble() <= chance) {
+        BytesRef term = BytesRef.deepCopyOf(termsEnum.term());
+        Query query = new TermQuery(new Term("body", term));
+
+        TopDocsCollector collector = doSearchWithThreshold(5, 10);
+        TopDocs tdc = doConcurrentSearchWithThreshold(5, 10, reader);
+        TopDocs tdc2 = collector.topDocs();
+
+        CheckHits.checkEqual(query, tdc.scoreDocs, tdc2.scoreDocs);
+      }
+    }
+
+    reader.close();
+    dir.close();
+  }
+
 }
diff --git a/lucene/core/src/test/org/apache/lucene/util/fst/TestFstDirect.java b/lucene/core/src/test/org/apache/lucene/util/fst/TestFstDirect.java
index af9f933..d95a936 100644
--- a/lucene/core/src/test/org/apache/lucene/util/fst/TestFstDirect.java
+++ b/lucene/core/src/test/org/apache/lucene/util/fst/TestFstDirect.java
@@ -29,7 +29,6 @@
 import org.apache.lucene.util.IntsRefBuilder;
 import org.apache.lucene.util.LuceneTestCase;
 
-
 public class TestFstDirect extends LuceneTestCase {
 
   public void testDenseWithGap() throws Exception {
diff --git a/lucene/default-nested-ivy-settings.xml b/lucene/default-nested-ivy-settings.xml
index cfee7c4..c9fe95b 100644
--- a/lucene/default-nested-ivy-settings.xml
+++ b/lucene/default-nested-ivy-settings.xml
@@ -34,7 +34,7 @@
   <resolvers>
     <ibiblio name="sonatype-releases" root="https://oss.sonatype.org/content/repositories/releases" m2compatible="true" />
     <ibiblio name="maven.restlet.com" root="https://maven.restlet.com" m2compatible="true" />
-    <ibiblio name="releases.cloudera.com" root="https://repository.cloudera.com/cloudera/libs-release-local" m2compatible="true" />
+    <ibiblio name="releases.cloudera.com" root="https://repository.cloudera.com/artifactory/libs-release-local" m2compatible="true" />
 
     <filesystem name="local-maven-2" m2compatible="true" local="true">
       <artifact
diff --git a/lucene/ivy-versions.properties b/lucene/ivy-versions.properties
index 12f7bc8..e5a4a8a 100644
--- a/lucene/ivy-versions.properties
+++ b/lucene/ivy-versions.properties
@@ -23,7 +23,7 @@
 /com.fasterxml.jackson.core/jackson-databind = 2.9.9.3
 /com.fasterxml.jackson.dataformat/jackson-dataformat-smile = ${com.fasterxml.jackson.core.version}
 
-/com.github.ben-manes.caffeine/caffeine = 2.4.0
+/com.github.ben-manes.caffeine/caffeine = 2.8.0
 /com.github.virtuald/curvesapi = 1.04
 
 /com.google.guava/guava = 25.1-jre
@@ -52,7 +52,6 @@
 
 /com.tdunning/t-digest = 3.1
 /com.vaadin.external.google/android-json = 0.0.20131108.vaadin1
-/commons-beanutils/commons-beanutils = 1.9.3
 /commons-cli/commons-cli = 1.2
 /commons-codec/commons-codec = 1.11
 /commons-collections/commons-collections = 3.2.2
diff --git a/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java b/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java
index 5554d70..f151617 100644
--- a/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java
+++ b/lucene/luke/src/test/org/apache/lucene/luke/models/overview/OverviewTestBase.java
@@ -28,6 +28,8 @@
 import org.apache.lucene.document.TextField;
 import org.apache.lucene.index.DirectoryReader;
 import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.IndexWriterConfig;
+import org.apache.lucene.index.NoMergePolicy;
 import org.apache.lucene.index.RandomIndexWriter;
 import org.apache.lucene.store.Directory;
 import org.apache.lucene.util.LuceneTestCase;
@@ -55,7 +57,9 @@
     Path indexDir = createTempDir();
 
     Directory dir = newFSDirectory(indexDir);
-    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, new MockAnalyzer(random()));
+    IndexWriterConfig config = new IndexWriterConfig(new MockAnalyzer(random()));
+    config.setMergePolicy(NoMergePolicy.INSTANCE);  // see LUCENE-8998
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir, config);
 
     Document doc1 = new Document();
     doc1.add(newStringField("f1", "1", Field.Store.NO));
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java b/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java
index 4fb6c4f..709e56f 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/mlt/MoreLikeThis.java
@@ -650,13 +650,17 @@
    */
   private PriorityQueue<ScoreTerm> createQueue(Map<String, Map<String, Int>> perFieldTermFrequencies) throws IOException {
     // have collected all words in doc and their freqs
-    int numDocs = ir.numDocs();
     final int limit = Math.min(maxQueryTerms, this.getTermsCount(perFieldTermFrequencies));
     FreqQ queue = new FreqQ(limit); // will order words by score
     for (Map.Entry<String, Map<String, Int>> entry : perFieldTermFrequencies.entrySet()) {
       Map<String, Int> perWordTermFrequencies = entry.getValue();
       String fieldName = entry.getKey();
 
+      long numDocs = ir.getDocCount(fieldName);
+      if(numDocs == -1) {
+        numDocs = ir.numDocs();
+      }
+
       for (Map.Entry<String, Int> tfEntry : perWordTermFrequencies.entrySet()) { // for every word
         String word = tfEntry.getKey();
         int tf = tfEntry.getValue().x; // term freq in the source doc
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java b/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java
index 2061068..1c246f5 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/mlt/TestMoreLikeThis.java
@@ -23,6 +23,7 @@
 import java.util.Collection;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.List;
 import java.util.Locale;
 import java.util.Map;
 
@@ -130,6 +131,64 @@
     writer.addDocument(doc);
   }
 
+  public void testSmallSampleFromCorpus() throws Throwable {
+    // add series of docs with terms of decreasing df
+    Directory dir = newDirectory();
+    RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
+    for (int i = 0; i < 1980; i++) {
+      Document doc = new Document();
+      doc.add(newTextField("text", "filler", Field.Store.YES));
+      writer.addDocument(doc);
+    }
+    for (int i = 0; i < 18; i++) {
+      Document doc = new Document();
+      doc.add(newTextField("one_percent", "all", Field.Store.YES));
+      writer.addDocument(doc);
+    }
+    for (int i = 0; i < 2; i++) {
+      Document doc = new Document();
+      doc.add(newTextField("one_percent", "all", Field.Store.YES));
+      doc.add(newTextField("one_percent", "tenth", Field.Store.YES));
+      writer.addDocument(doc);
+    }
+    IndexReader reader = writer.getReader();
+    writer.close();
+
+    // setup MLT query
+    MoreLikeThis mlt = new MoreLikeThis(reader);
+    Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, false);
+    mlt.setAnalyzer(analyzer);
+    mlt.setMaxQueryTerms(3);
+    mlt.setMinDocFreq(1);
+    mlt.setMinTermFreq(1);
+    mlt.setMinWordLen(1);
+    mlt.setFieldNames(new String[]{"one_percent"});
+
+    BooleanQuery query = (BooleanQuery) mlt.like("one_percent", new StringReader("tenth tenth all"));
+    Collection<BooleanClause> clauses = query.clauses();
+
+    assertTrue(clauses.size() == 2);
+    Term term = ((TermQuery) ((List<BooleanClause>) clauses).get(0).getQuery()).getTerm();
+    assertTrue(term.text().equals("all"));
+    term = ((TermQuery) ((List<BooleanClause>) clauses).get(1).getQuery()).getTerm();
+    assertTrue(term.text().equals("tenth"));
+
+
+    query = (BooleanQuery) mlt.like("one_percent", new StringReader("tenth all all"));
+    clauses = query.clauses();
+
+    assertTrue(clauses.size() == 2);
+    term = ((TermQuery) ((List<BooleanClause>) clauses).get(0).getQuery()).getTerm();
+    assertTrue(term.text().equals("all"));
+    term = ((TermQuery) ((List<BooleanClause>) clauses).get(1).getQuery()).getTerm();
+    assertTrue(term.text().equals("tenth"));
+
+    // clean up
+    reader.close();
+    dir.close();
+    analyzer.close();
+  }
+
   public void testBoostFactor() throws Throwable {
     Map<String,Float> originalValues = getOriginalValues();
     mlt.setFieldNames(new String[] {"text"});
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
index 77680cc..b470312 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/xml/CoreParser.java
@@ -16,34 +16,9 @@
  */
 package org.apache.lucene.queryparser.xml;
 
-import javax.xml.XMLConstants;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.parsers.ParserConfigurationException;
-import java.io.InputStream;
-import java.util.Locale;
-
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.queryparser.classic.QueryParser;
-import org.apache.lucene.queryparser.xml.builders.BooleanQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.BoostingTermBuilder;
-import org.apache.lucene.queryparser.xml.builders.ConstantScoreQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.DisjunctionMaxQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.MatchAllDocsQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.PointRangeQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.RangeQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanFirstBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanNearBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanNotBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanOrBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanOrTermsBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanPositionRangeBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.SpanQueryBuilderFactory;
-import org.apache.lucene.queryparser.xml.builders.SpanTermBuilder;
-import org.apache.lucene.queryparser.xml.builders.TermQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.TermsQueryBuilder;
-import org.apache.lucene.queryparser.xml.builders.UserInputQueryBuilder;
+import org.apache.lucene.queryparser.xml.builders.*;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.spans.SpanQuery;
 import org.w3c.dom.Document;
@@ -52,6 +27,14 @@
 import org.xml.sax.ErrorHandler;
 import org.xml.sax.SAXException;
 
+import javax.xml.XMLConstants;
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+import javax.xml.parsers.ParserConfigurationException;
+
+import java.io.InputStream;
+import java.util.Locale;
+
 /**
  * Assembles a QueryBuilder which uses only core Lucene Query objects
  */
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
index cb45dc9..4faf6e8 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/xml/TestCoreParser.java
@@ -16,9 +16,6 @@
  */
 package org.apache.lucene.queryparser.xml;
 
-import java.io.IOException;
-import java.io.InputStream;
-
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.MockAnalyzer;
 import org.apache.lucene.analysis.MockTokenFilter;
@@ -35,6 +32,9 @@
 import org.junit.AfterClass;
 import org.xml.sax.SAXException;
 
+import java.io.IOException;
+import java.io.InputStream;
+
 public class TestCoreParser extends LuceneTestCase {
 
   final private static String defaultField = "contents";
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
index 9f7f326..aa1f93d 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
@@ -42,6 +42,9 @@
   @Override
   protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
                                             int maxXOffset, int maxYOffset, byte[] maxTriangle) {
+    if (queryRelation == QueryRelation.INTERSECTS || queryRelation == QueryRelation.DISJOINT) {
+      return rectangle2D.intersectRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
+    }
     return rectangle2D.relateRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
   }
 
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java
index 5e11e88..681b739 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/ShapeQuery.java
@@ -283,7 +283,7 @@
     public long cost() {
       if (cost == -1) {
         // Computing the cost may be expensive, so only do it if necessary
-        cost = values.estimatePointCount(getEstimateVisitor(query));
+        cost = values.estimateDocCount(getEstimateVisitor(query));
         assert cost >= 0;
       }
       return cost;
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java
index dd809e4..4a9e465 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/XYShapeBoundingBoxQuery.java
@@ -41,6 +41,9 @@
   @Override
   protected PointValues.Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
                                                         int maxXOffset, int maxYOffset, byte[] maxTriangle) {
+    if (queryRelation == QueryRelation.INTERSECTS || queryRelation == QueryRelation.DISJOINT) {
+      return rectangle2D.intersectRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
+    }
     return rectangle2D.relateRangeBBox(minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
   }
 
diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java
index c200537..b3fdf0e 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Rectangle2D.java
@@ -105,16 +105,28 @@
     return bboxContainsPoint(x, y, this.minX, this.maxX, this.minY, this.maxY);
   }
 
-  /** compare this to a provided rangle bounding box **/
+  /** compare this to a provided range bounding box **/
   public Relation relateRangeBBox(int minXOffset, int minYOffset, byte[] minTriangle,
                                   int maxXOffset, int maxYOffset, byte[] maxTriangle) {
-    Relation eastRelation = compareBBoxToRangeBBox(this.bbox, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
+    Relation eastRelation = compareBBoxToRangeBBox(this.bbox,
+        minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
     if (this.crossesDateline() && eastRelation == Relation.CELL_OUTSIDE_QUERY) {
       return compareBBoxToRangeBBox(this.west, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
     }
     return eastRelation;
   }
 
+  /** intersects this to a provided range bounding box **/
+  public Relation intersectRangeBBox(int minXOffset, int minYOffset, byte[] minTriangle,
+                                  int maxXOffset, int maxYOffset, byte[] maxTriangle) {
+    Relation eastRelation = intersectBBoxWithRangeBBox(this.bbox,
+        minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
+    if (this.crossesDateline() && eastRelation == Relation.CELL_OUTSIDE_QUERY) {
+      return intersectBBoxWithRangeBBox(this.west, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
+    }
+    return eastRelation;
+  }
+
   /** Checks if the rectangle intersects the provided triangle **/
   public boolean intersectsTriangle(int aX, int aY, int bX, int bY, int cX, int cY) {
     // 1. query contains any triangle points
@@ -165,15 +177,14 @@
     return bboxContainsTriangle(ax, ay, bx, by, cx, cy, minX, maxX, minY, maxY);
   }
 
-  /** static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection) */
+  /**
+   * static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection)
+   **/
   private static Relation compareBBoxToRangeBBox(final byte[] bbox,
                                                  int minXOffset, int minYOffset, byte[] minTriangle,
                                                  int maxXOffset, int maxYOffset, byte[] maxTriangle) {
     // check bounding box (DISJOINT)
-    if (Arrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) > 0 ||
-        Arrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) < 0 ||
-        Arrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) > 0 ||
-        Arrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) < 0) {
+    if (disjoint(bbox, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle)) {
       return Relation.CELL_OUTSIDE_QUERY;
     }
 
@@ -183,10 +194,62 @@
         Arrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) {
       return Relation.CELL_INSIDE_QUERY;
     }
+
     return Relation.CELL_CROSSES_QUERY;
   }
 
   /**
+   * static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection)
+   * for intersection
+   **/
+  private static Relation intersectBBoxWithRangeBBox(final byte[] bbox,
+                                                     int minXOffset, int minYOffset, byte[] minTriangle,
+                                                     int maxXOffset, int maxYOffset, byte[] maxTriangle) {
+    // check bounding box (DISJOINT)
+    if (disjoint(bbox, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle)) {
+      return Relation.CELL_OUTSIDE_QUERY;
+    }
+
+    if (Arrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 &&
+        Arrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 0, BYTES) >= 0 ) {
+      if (Arrays.compareUnsigned(maxTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 &&
+          Arrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) {
+        return Relation.CELL_INSIDE_QUERY;
+      }
+      if (Arrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 &&
+          Arrays.compareUnsigned(maxTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0) {
+        return Relation.CELL_INSIDE_QUERY;
+      }
+    }
+
+    if (Arrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) <= 0 &&
+        Arrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) <= 0 ) {
+      if (Arrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 &&
+          Arrays.compareUnsigned(minTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) >= 0) {
+        return Relation.CELL_INSIDE_QUERY;
+      }
+      if (Arrays.compareUnsigned(minTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) >= 0 &&
+          Arrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 0, BYTES) >= 0) {
+        return Relation.CELL_INSIDE_QUERY;
+      }
+    }
+
+    return Relation.CELL_CROSSES_QUERY;
+  }
+
+  /**
+   * static utility method to check a bbox is disjoint with a range of triangles
+   **/
+  private static boolean disjoint(final byte[] bbox,
+                               int minXOffset, int minYOffset, byte[] minTriangle,
+                               int maxXOffset, int maxYOffset, byte[] maxTriangle) {
+      return Arrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) > 0 ||
+          Arrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) < 0 ||
+          Arrays.compareUnsigned(minTriangle, minYOffset, minYOffset + BYTES, bbox, 2 * BYTES, 3 * BYTES) > 0 ||
+          Arrays.compareUnsigned(maxTriangle, maxYOffset, maxYOffset + BYTES, bbox, 0, BYTES) < 0;
+  }
+
+  /**
    * encodes a bounding box into the provided byte array
    */
   private static void encode(final int minX, final int maxX, final int minY, final int maxY, byte[] b) {
diff --git a/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java b/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java
index 9db4079..e84af18 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/search/MultiRangeQuery.java
@@ -278,7 +278,7 @@
             public long cost() {
               if (cost == -1) {
                 // Computing the cost may be expensive, so only do it if necessary
-                cost = values.estimatePointCount(visitor) * rangeClauses.size();
+                cost = values.estimateDocCount(visitor) * rangeClauses.size();
                 assert cost >= 0;
               }
               return cost;
diff --git a/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java b/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java
index ef90c33..787a2a5 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/geo/TestRectangle2D.java
@@ -87,7 +87,13 @@
       NumericUtils.intToSortableBytes(tMaxY, triangle, 2 * BYTES);
       NumericUtils.intToSortableBytes(tMaxX, triangle, 3 * BYTES);
 
-      PointValues.Relation r = rectangle2D.relateRangeBBox(BYTES, 0, triangle, 3 * BYTES, 2 * BYTES, triangle);
+      PointValues.Relation r;
+      if (random().nextBoolean()) {
+        r = rectangle2D.relateRangeBBox(BYTES, 0, triangle, 3 * BYTES, 2 * BYTES, triangle);
+      } else {
+        r = rectangle2D.intersectRangeBBox(BYTES, 0, triangle, 3 * BYTES, 2 * BYTES, triangle);
+      }
+
       if (r == PointValues.Relation.CELL_OUTSIDE_QUERY) {
         assertFalse(rectangle2D.intersectsTriangle(ax, ay, bx, by , cx, cy));
         assertFalse(rectangle2D.containsTriangle(ax, ay, bx, by , cx, cy));
@@ -97,4 +103,54 @@
       }
     }
   }
+
+  public void testIntersectOptimization() {
+    byte[] minTriangle = box(0, 0, 10, 5);
+    byte[] maxTriangle = box(20, 10, 30, 15);
+
+    Rectangle2D rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 30.1, -0.1, 15.1));
+    assertEquals(PointValues.Relation.CELL_INSIDE_QUERY,
+        rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+    assertEquals(PointValues.Relation.CELL_INSIDE_QUERY,
+        rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+
+    rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 30.1, -0.1, 10.1));
+    assertEquals(PointValues.Relation.CELL_INSIDE_QUERY,
+        rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+    assertEquals(PointValues.Relation.CELL_CROSSES_QUERY,
+        rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+
+    rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 30.1, 4.9, 15.1));
+    assertEquals(PointValues.Relation.CELL_INSIDE_QUERY,
+        rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+    assertEquals(PointValues.Relation.CELL_CROSSES_QUERY,
+        rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+
+    rectangle2D = Rectangle2D.create(new Rectangle(-0.1, 20.1, -0.1, 15.1));
+    assertEquals(PointValues.Relation.CELL_INSIDE_QUERY,
+        rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+    assertEquals(PointValues.Relation.CELL_CROSSES_QUERY,
+        rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+
+    rectangle2D = Rectangle2D.create(new Rectangle(9.9, 30.1, -0.1, 15.1));
+    assertEquals(PointValues.Relation.CELL_INSIDE_QUERY,
+        rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+    assertEquals(PointValues.Relation.CELL_CROSSES_QUERY,
+        rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+
+    rectangle2D = Rectangle2D.create(new Rectangle(5, 25, 3, 13));
+    assertEquals(PointValues.Relation.CELL_CROSSES_QUERY,
+        rectangle2D.intersectRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+    assertEquals(PointValues.Relation.CELL_CROSSES_QUERY,
+        rectangle2D.relateRangeBBox(BYTES, 0, minTriangle, 3 * BYTES, 2 * BYTES, maxTriangle));
+  }
+
+  private byte[] box(int minY, int minX, int maxY, int maxX) {
+    byte[] bytes = new byte[4 * BYTES];
+    NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLatitude(minY), bytes, 0); // min y
+    NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLongitude(minX), bytes, BYTES); // min x
+    NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLatitude(maxY), bytes, 2 * BYTES); // max y
+    NumericUtils.intToSortableBytes(GeoEncodingUtils.encodeLongitude(maxX), bytes, 3 * BYTES); // max x
+    return bytes;
+  }
 }
diff --git a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
index c0083b9..46000ff 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/util/LuceneTestCase.java
@@ -115,6 +115,7 @@
 import org.junit.rules.RuleChain;
 import org.junit.rules.TestRule;
 import org.junit.runner.RunWith;
+import org.junit.internal.AssumptionViolatedException;
 
 import com.carrotsearch.randomizedtesting.JUnit4MethodProvider;
 import com.carrotsearch.randomizedtesting.LifecycleScope;
@@ -2720,17 +2721,16 @@
 
   /** Checks a specific exception class is thrown by the given runnable, and returns it. */
   public static <T extends Throwable> T expectThrows(Class<T> expectedType, String noExceptionMessage, ThrowingRunnable runnable) {
-    try {
-      runnable.run();
-    } catch (Throwable e) {
-      if (expectedType.isInstance(e)) {
-        return expectedType.cast(e);
-      }
-      AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + e);
-      assertion.initCause(e);
-      throw assertion;
+    final Throwable thrown = _expectThrows(Collections.singletonList(expectedType), runnable);
+    if (expectedType.isInstance(thrown)) {
+      return expectedType.cast(thrown);
     }
-    throw new AssertionFailedError(noExceptionMessage);
+    if (null == thrown) {
+      throw new AssertionFailedError(noExceptionMessage);
+    }
+    AssertionFailedError assertion = new AssertionFailedError("Unexpected exception type, expected " + expectedType.getSimpleName() + " but got " + thrown);
+    assertion.initCause(thrown);
+    throw assertion;
   }
 
   /** Checks a specific exception class is thrown by the given runnable, and returns it. */
@@ -2739,16 +2739,13 @@
       throw new AssertionError("At least one expected exception type is required?");
     }
 
-    Throwable thrown = null;
-    try {
-      runnable.run();
-    } catch (Throwable e) {
+    final Throwable thrown = _expectThrows(expectedTypes, runnable);
+    if (null != thrown) {
       for (Class<? extends T> expectedType : expectedTypes) {
-        if (expectedType.isInstance(e)) {
-          return expectedType.cast(e);
+        if (expectedType.isInstance(thrown)) {
+          return expectedType.cast(thrown);
         }
       }
-      thrown = e;
     }
 
     List<String> exceptionTypes = expectedTypes.stream().map(c -> c.getSimpleName()).collect(Collectors.toList());
@@ -2771,29 +2768,28 @@
    */
   public static <TO extends Throwable, TW extends Throwable> TW expectThrows
   (Class<TO> expectedOuterType, Class<TW> expectedWrappedType, ThrowingRunnable runnable) {
-    try {
-      runnable.run();
-    } catch (Throwable e) {
-      if (expectedOuterType.isInstance(e)) {
-        Throwable cause = e.getCause();
-        if (expectedWrappedType.isInstance(cause)) {
-          return expectedWrappedType.cast(cause);
-        } else {
-          AssertionFailedError assertion = new AssertionFailedError
-              ("Unexpected wrapped exception type, expected " + expectedWrappedType.getSimpleName() 
-                  + " but got: " + cause);
-          assertion.initCause(e);
-          throw assertion;
-        }
-      }
-      AssertionFailedError assertion = new AssertionFailedError
-          ("Unexpected outer exception type, expected " + expectedOuterType.getSimpleName()
-           + " but got: " + e);
-      assertion.initCause(e);
-      throw assertion;
+    final Throwable thrown = _expectThrows(Collections.singletonList(expectedOuterType), runnable);
+    if (null == thrown) {
+      throw new AssertionFailedError("Expected outer exception " + expectedOuterType.getSimpleName()
+                                     + " but no exception was thrown.");
     }
-    throw new AssertionFailedError("Expected outer exception " + expectedOuterType.getSimpleName()
-        + " but no exception was thrown.");
+    if (expectedOuterType.isInstance(thrown)) {
+      Throwable cause = thrown.getCause();
+      if (expectedWrappedType.isInstance(cause)) {
+        return expectedWrappedType.cast(cause);
+      } else {
+        AssertionFailedError assertion = new AssertionFailedError
+          ("Unexpected wrapped exception type, expected " + expectedWrappedType.getSimpleName() 
+           + " but got: " + cause);
+        assertion.initCause(thrown);
+        throw assertion;
+      }
+    }
+    AssertionFailedError assertion = new AssertionFailedError
+      ("Unexpected outer exception type, expected " + expectedOuterType.getSimpleName()
+       + " but got: " + thrown);
+    assertion.initCause(thrown);
+    throw assertion;
   }
 
   /**
@@ -2805,41 +2801,65 @@
    */
   public static <TO extends Throwable, TW extends Throwable> TO expectThrowsAnyOf
   (LinkedHashMap<Class<? extends TO>,List<Class<? extends TW>>> expectedOuterToWrappedTypes, ThrowingRunnable runnable) {
-    try {
-      runnable.run();
-    } catch (Throwable e) {
-      for (Map.Entry<Class<? extends TO>, List<Class<? extends TW>>> entry : expectedOuterToWrappedTypes.entrySet()) {
-        Class<? extends TO> expectedOuterType = entry.getKey();
-        List<Class<? extends TW>> expectedWrappedTypes = entry.getValue();
-        Throwable cause = e.getCause();
-        if (expectedOuterType.isInstance(e)) {
-          if (expectedWrappedTypes.isEmpty()) {
-            return null; // no wrapped exception
-          } else {
-            for (Class<? extends TW> expectedWrappedType : expectedWrappedTypes) {
-              if (expectedWrappedType.isInstance(cause)) {
-                return expectedOuterType.cast(e);
-              }
+    final List<Class<? extends TO>> outerClasses = expectedOuterToWrappedTypes.keySet().stream().collect(Collectors.toList());
+    final Throwable thrown = _expectThrows(outerClasses, runnable);
+    
+    if (null == thrown) {
+      List<String> outerTypes = outerClasses.stream().map(Class::getSimpleName).collect(Collectors.toList());
+      throw new AssertionFailedError("Expected any of the following outer exception types: " + outerTypes
+                                     + " but no exception was thrown.");
+    }
+    for (Map.Entry<Class<? extends TO>, List<Class<? extends TW>>> entry : expectedOuterToWrappedTypes.entrySet()) {
+      Class<? extends TO> expectedOuterType = entry.getKey();
+      List<Class<? extends TW>> expectedWrappedTypes = entry.getValue();
+      Throwable cause = thrown.getCause();
+      if (expectedOuterType.isInstance(thrown)) {
+        if (expectedWrappedTypes.isEmpty()) {
+          return null; // no wrapped exception
+        } else {
+          for (Class<? extends TW> expectedWrappedType : expectedWrappedTypes) {
+            if (expectedWrappedType.isInstance(cause)) {
+              return expectedOuterType.cast(thrown);
             }
-            List<String> wrappedTypes = expectedWrappedTypes.stream().map(Class::getSimpleName).collect(Collectors.toList());
-            AssertionFailedError assertion = new AssertionFailedError
-                ("Unexpected wrapped exception type, expected one of " + wrappedTypes + " but got: " + cause);
-            assertion.initCause(e);
-            throw assertion;
           }
+          List<String> wrappedTypes = expectedWrappedTypes.stream().map(Class::getSimpleName).collect(Collectors.toList());
+          AssertionFailedError assertion = new AssertionFailedError
+            ("Unexpected wrapped exception type, expected one of " + wrappedTypes + " but got: " + cause);
+          assertion.initCause(thrown);
+          throw assertion;
         }
       }
-      List<String> outerTypes = expectedOuterToWrappedTypes.keySet().stream().map(Class::getSimpleName).collect(Collectors.toList());
-      AssertionFailedError assertion = new AssertionFailedError
-          ("Unexpected outer exception type, expected one of " + outerTypes + " but got: " + e);
-      assertion.initCause(e);
-      throw assertion;
     }
-    List<String> outerTypes = expectedOuterToWrappedTypes.keySet().stream().map(Class::getSimpleName).collect(Collectors.toList());
-    throw new AssertionFailedError("Expected any of the following outer exception types: " + outerTypes
-        + " but no exception was thrown.");
+    List<String> outerTypes = outerClasses.stream().map(Class::getSimpleName).collect(Collectors.toList());
+    AssertionFailedError assertion = new AssertionFailedError
+      ("Unexpected outer exception type, expected one of " + outerTypes + " but got: " + thrown);
+    assertion.initCause(thrown);
+    throw assertion;
   }
 
+  /**
+   * Helper method for {@link #expectThrows} and {@link #expectThrowsAnyOf} that takes care of propagating
+   * any {@link AssertionError} or {@link AssumptionViolatedException} instances thrown if and only if they 
+   * are super classes of the <code>expectedTypes</code>.  Otherwise simply returns any {@link Throwable} 
+   * thrown, regardless of type, or null if the <code>runnable</code> completed w/o error.
+   */
+  private static Throwable _expectThrows(List<? extends Class<?>> expectedTypes, ThrowingRunnable runnable) {
+                                         
+    try {
+      runnable.run();
+    } catch (AssertionError | AssumptionViolatedException ae) {
+      for (Class<?> expectedType : expectedTypes) {
+        if (expectedType.isInstance(ae)) { // user is expecting this type explicitly
+          return ae;
+        }
+      }
+      throw ae;
+    } catch (Throwable e) {
+      return e;
+    }
+    return null;
+  }
+  
   /** Returns true if the file exists (can be opened), false
    *  if it cannot be opened, and (unlike Java's
    *  File.exists) throws IOException if there's some
diff --git a/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java b/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java
new file mode 100644
index 0000000..cfc70be
--- /dev/null
+++ b/lucene/test-framework/src/test/org/apache/lucene/util/TestExpectThrows.java
@@ -0,0 +1,155 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.util;
+
+import java.util.concurrent.atomic.AtomicBoolean;
+import java.io.IOException;
+
+import org.junit.internal.AssumptionViolatedException;
+  
+public class TestExpectThrows extends LuceneTestCase {
+
+  private static class HuperDuperException extends IOException {
+    public HuperDuperException() {
+      /* No-Op */
+    }
+  }
+  
+  /** 
+   * Tests that {@link #expectThrows} behaves correctly when the Runnable throws (an 
+   * instance of a subclass of) the expected Exception type: by returning that Exception.
+   */
+  public void testPass() {
+    final AtomicBoolean ran = new AtomicBoolean(false);
+    final IOException returned = expectThrows(IOException.class, () -> {
+        ran.getAndSet(true);
+        throw new HuperDuperException();
+      });
+    assertTrue(ran.get());
+    assertNotNull(returned);
+    assertEquals(HuperDuperException.class, returned.getClass());
+  }
+  
+  /** 
+   * Tests that {@link #expectThrows} behaves correctly when the Runnable does not throw (an 
+   * instance of a subclass of) the expected Exception type: by throwing an assertion to 
+   * <code>FAIL</code> the test.
+   */
+  public void testFail() {
+    final AtomicBoolean ran = new AtomicBoolean(false);
+    AssertionError caught = null;
+    try {
+      final IOException returned = expectThrows(IOException.class, () -> {
+          ran.getAndSet(true);
+        });
+      fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows
+    } catch (AssertionError ae) {
+      caught = ae;
+    }
+    assertTrue(ran.get());
+    assertNotNull(caught);
+    assertEquals("Expected exception IOException but no exception was thrown", caught.getMessage());
+                 
+  }
+
+  /** 
+   * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an  
+   * assertion that does not pass: by allowing that assertion to propogate and 
+   * <code>FAIL</code> the test.
+   */
+  public void testNestedFail() {
+    final AtomicBoolean ran = new AtomicBoolean(false);
+    AssertionError caught = null;
+    try {
+      final IOException returned = expectThrows(IOException.class, () -> {
+          ran.getAndSet(true);
+          fail("this failure should propogate");
+        });
+      fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows
+    } catch (AssertionError ae) {
+      caught = ae;
+    }
+    assertTrue(ran.get());
+    assertNotNull(caught);
+    assertEquals("this failure should propogate", caught.getMessage());
+  }
+  
+  /** 
+   * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an 
+   * assumption that does not pass: by allowing that assumption to propogate and cause 
+   * the test to <code>SKIP</code>.
+   */
+  public void testNestedAssume() {
+    final AtomicBoolean ran = new AtomicBoolean(false);
+    AssumptionViolatedException caught = null;
+    try {
+      final IOException returned = expectThrows(IOException.class, () -> {
+          ran.getAndSet(true);
+          assumeTrue("this assumption should propogate", false);
+        });
+      fail("must not complete"); // NOTE: we don't use expectThrows to test expectThrows
+    } catch (AssumptionViolatedException ave) {
+      caught = ave;
+    }
+    assertTrue(ran.get());
+    assertNotNull(caught);
+    assertEquals("this assumption should propogate", caught.getMessage());
+  }
+
+  /** 
+   * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an  
+   * assertion that does not pass but the caller has explicitly said they expect an Exception of that type:
+   * by returning that assertion failure Exception.
+   */
+  public void testExpectingNestedFail() {
+    final AtomicBoolean ran = new AtomicBoolean(false);
+    AssertionError returned = null;
+    try {
+      returned = expectThrows(AssertionError.class, () -> {
+          ran.getAndSet(true);
+          fail("this failure should be returned, not propogated");
+        });
+    } catch (AssertionError caught) { // NOTE: we don't use expectThrows to test expectThrows
+      assertNull("An exception should not have been thrown", caught);
+    }
+    assertTrue(ran.get());
+    assertNotNull(returned);
+    assertEquals("this failure should be returned, not propogated", returned.getMessage());
+  }
+  
+  /** 
+   * Tests that {@link #expectThrows} behaves correctly when the Runnable contains an 
+   * assumption that does not pass but the caller has explicitly said they expect an Exception of that type: 
+   * by returning that assumption failure Exception.
+   */
+  public void testExpectingNestedAssume() {
+    final AtomicBoolean ran = new AtomicBoolean(false);
+    AssumptionViolatedException returned = null;
+    try {
+      returned = expectThrows(AssumptionViolatedException.class, () -> {
+          ran.getAndSet(true);
+          assumeTrue("this assumption should be returned, not propogated", false);
+        });
+    } catch (AssumptionViolatedException caught) { // NOTE: we don't use expectThrows to test expectThrows
+      assertNull("An exception should not have been thrown", caught);
+    }
+    assertTrue(ran.get());
+    assertNotNull(returned);
+    assertEquals("this assumption should be returned, not propogated", returned.getMessage());
+  }
+  
+}
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 756dd22..d57a8c5 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -66,6 +66,11 @@
 
 * SOLR-13593 SOLR-13690 SOLR-13691: Allow to look up analyzer components by their SPI names in field type configuration. (Tomoko Uchida)
 
+Improvements
+----------------------
+
+* LUCENE-8984: MoreLikeThis MLT is biased for uncommon fields (Andy Hind via Anshum Gupta)
+
 Other Changes
 ----------------------
 
@@ -73,6 +78,44 @@
 
 * SOLR-13655:Upgrade Collections.unModifiableSet to Set.of and Set.copyOf (Atri Sharma via Tomás Fernández Löbbe)
 
+* SOLR-13797: SolrResourceLoader no longer caches bad results when asked for wrong type (Mike Drob)
+
+==================  8.4.0 ==================
+
+Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
+
+Versions of Major Components
+---------------------
+Apache Tika 1.19.1
+Carrot2 3.16.0
+Velocity 2.0 and Velocity Tools 3.0
+Apache ZooKeeper 3.5.5
+Jetty 9.4.19.v20190610
+
+Upgrade Notes
+---------------------
+(No changes)
+
+New Features
+---------------------
+(No changes)
+
+Improvements
+---------------------
+(No changes)
+
+Optimizations
+---------------------
+(No changes)
+
+Bug Fixes
+---------------------
+(No changes)
+
+Other Changes
+---------------------
+(No changes)
+
 ==================  8.3.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
@@ -108,10 +151,6 @@
   when using compositeIds.  Document distribution is calculated using the "id_prefix" field (if it exists) containing
   just the compositeId prefixes, or directly from the indexed "id" field otherwise. (yonik, Megan Carey)
 
-* SOLR-13565: Node level runtime libs loaded from remote urls  (noble)
-
-* SOLR-13553: Node level custom RequestHandlers (noble)
-
 * SOLR-13622: Add cat() stream source to create tuples from lines in local files (Jason Gerlowski and Joel Bernstein)
 
 * SOLR-11866: QueryElevationComponent can have query rules configured with match="subset" wherein the words need only
@@ -119,16 +158,9 @@
 
 * SOLR-13682: command line option to export documents to a file (noble)
 
-* SOLR-13650: Solr now can define and add "packages" with plugins. Each plugin can choose to
-  load from one of those packages & updating packages can reload those plugins independently (noble)
-
 * SOLR-13257: Support deterministic replica routing preferences for better cache usage (Michael Gibney
   via Christine Poerschke, Tomás Fernández Löbbe)
 
-* SOLR-13707: API to expose the currently used package name, details for each plugin (noble)
-
-* SOLR-13710: Persist package jars locally & expose them over http at /api/node/blob  (noble)
-
 * SOLR-13122: Ability to query aliases in Solr Admin UI (janhoy)
 
 * SOLR-13713: JWTAuthPlugin to support multiple JWKS endpoints (janhoy)
@@ -136,6 +168,21 @@
 * SOLR-13734: JWTAuthPlugin now supports multiple IdP issuers through configuring a new 'issuers' configuration key.
   Access tokens issued and signed by any of the configured issuers will be validated (janhoy)
 
+* SOLR-13272: Add support for arbitrary ranges in JSON facet's Range facets.
+  (Apoorv Bhawsar, Munendra S N, Mikhail Khludnev, Ishan Chattopadhyaya, Jan Høydahl)
+
+* SOLR-13632: Support integral plots, cosine distance and string truncation with math expressions (Joel Bernstein)
+
+* SOLR-13667: Add upper, lower, trim and split Stream Evaluators (Joel Bernstein)
+
+* SOLR-13625: Add CsvStream, TsvStream Streaming Expressions and supporting Stream Evaluators (Joel bernstein)
+
+* SOLR-8241: Add CaffeineCache, an efficient implementation of SolrCache.(Ben Manes, Shawn Heisey, David Smiley, Andrzej Bialecki)
+
+* SOLR-13821: A Package store to store and load package artefacts (noble, Ishan Chattopadhyaya)
+
+* SOLR-13298: Allow zplot to plot matrices (Joel Bernstein)
+
 Improvements
 ----------------------
 
@@ -146,8 +193,6 @@
 
 * SOLR-6305: Ability to set the replication factor for index files created by HDFSDirectoryFactory (Boris Pasko via Kevin Risden)
 
-* SOLR-13677: All Metrics Gauges should be unregistered by the objects that registered them (noble)
-
 * SOLR-13702: Some components register twice their metric names (janhoy)
 
 * SOLR-11601: Improved error message when geodist(llpsf) is used with arguments referring to a LatLonPointSpatialField.
@@ -173,6 +218,17 @@
 
 * SOLR-13638: Add debug, trace logging to RuleBasedAuthorizationPlugin (Jason Gerlowski)
 
+* SOLR-13784: EmbeddedSolrServer's defaultCore constructor argument is now optional (David Smiley)
+
+* SOLR-13798: SSL: Adding Enabling/Disabling client's hostname verification config (Cao Manh Dat)
+
+* SOLR-13771: Add -v and -m to ulimit section of reference guide  and bin/solr checks (Erick Erickson)
+
+* SOLR-13795: Managed schema operations should do a core reload in Solr standalone mode.
+  (Thomas Wöckinger via David Smiley)
+
+* SOLR-13719: Introducing SolrClient.ping(collection) in SolrJ (Geza Nagy via Mikhail Khludnev) 
+
 Bug Fixes
 ----------------------
 
@@ -222,6 +278,33 @@
 
 * SOLR-13238: BlobHandler generates non-padded md5 (Jeff Walraven via janhoy)
 
+* SOLR-13780: Fix ClassCastException in NestableJsonFacet (Tiago Martinho de Barros, Munendra S N)
+
+* SOLR-13725: Allow negative values for limit in TermsFacetMap (Richard Walker, Munendra S N)
+
+* SOLR-13022: Fix NPE when sorting by non-existent aggregate function in JSON Facet (hossman, Munendra S N)
+
+* SOLR-13727: Fixed V2Requests - HttpSolrClient replaced first instance of "/solr" with "/api" which
+  caused a change in host names starting with "solr".  (Megan Carey via yonik)
+
+* SOLR-13180: Fix ClassCastException in Json Request API (Johannes Kloos, Jan Høydahl, Munendra S N)
+
+* SOLR-13417: Handle stats aggregation on date and string fields in SolrJ's JSON facet response processing
+  (Jason Gerlowski, Munendra S N)
+
+* SOLR-13712: JMX MBeans are not exposed because of race condition between creating platform mbean server and
+  registering mbeans. (shalin)
+
+* SOLR-13802: Managed schema manipulations were not persisting the optional luceneMatchVersion that can be set
+  on an Analyzer. (Thomas Wöckinger)
+
+* SOLR-13539: Fix for class-cast issues during atomic-update 'removeregex' operations. This also incorporated some
+  tests Tim wrote as a part of SOLR-9505. (Tim Owen via Jason Gerlowski)
+
+* SOLR-13790: LRUStatsCache size explosion and ineffective caching. (ab)
+
+* SOLR-13376: Multi-node race condition to create/remove nodeLost markers. (hoss, ab)
+
 Other Changes
 ----------------------
 
@@ -237,8 +320,6 @@
 
 * SOLR-13643: Add Getters/Setters in ResponseBuilder for analytics response handling (Neal Sidhwaney via Munendra S N)
 
-* SOLR-13659: Refactor CacheConfig to lazily load the the implementation class (noble)
-
 * SOLR-13680: Use try-with-resource to close the closeable resource (Furkan KAMACI, Munendra S N)
 
 * SOLR-13573: Add SolrRangeQuery getters for upper, lower bound (Brian Rhees via Jason Gerlowski)
@@ -247,6 +328,18 @@
 
 * SOLR-13767: Upgrade jackson to 2.9.9 (janhoy)
 
+* SOLR-11492: Clean up /solr/cloud-dev scripts and provide a single well documented script (Gus Heck, Robert Bunch)
+
+* SOLR-13747: New TestSSLTestConfig.testFailIfUserRunsTestsWithJVMThatHasKnownSSLBugs() to give people running
+  tests more visibility if/when they use a known-buggy JVM causing most SSL tests to silently SKIP. (hossman)
+
+* SOLR-13791: Remove remaining Commons BeanUtils references. (Andras Salamon, Christine Poerschke)
+
+* SOLR-13812: Add javadocs, uneven rejection and basic test coverage for the SolrTestCaseJ4.params method.
+  (Diego Ceccarelli, Christine Poerschke, Munendra S N)
+
+* SOLR-13787: An annotation based system to write v2 APIs (noble)
+
 ==================  8.2.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
@@ -307,8 +400,6 @@
 
 * SOLR-13552: Add recNum Stream Evaluator (Joel Bernstein)
 
-* SOLR-13534: Dynamic loading to support loading jars from a URL (noble)
-
 * SOLR-13560: Add isNull and notNull Stream Evaluators (Joel Bernstein)
 
 * SOLR-10291: Add matches Stream Evaluator to support regex matching (Joel Bernstein)
diff --git a/solr/bin/solr b/solr/bin/solr
index ca1948f..596242f 100755
--- a/solr/bin/solr
+++ b/solr/bin/solr
@@ -209,6 +209,11 @@
   if [ -n "$SOLR_SSL_NEED_CLIENT_AUTH" ]; then
     SOLR_SSL_OPTS+=" -Dsolr.jetty.ssl.needClientAuth=$SOLR_SSL_NEED_CLIENT_AUTH"
   fi
+
+  if [ -z "$SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION" ] ; then
+    SOLR_SSL_OPTS+=" -Dsolr.jetty.ssl.verifyClientHostName=HTTPS"
+  fi
+
   if [ -n "$SOLR_SSL_WANT_CLIENT_AUTH" ]; then
     SOLR_SSL_OPTS+=" -Dsolr.jetty.ssl.wantClientAuth=$SOLR_SSL_WANT_CLIENT_AUTH"
   fi
@@ -1516,6 +1521,8 @@
     if hash ulimit 2>/dev/null; then
        openFiles=$(ulimit -n)
        maxProcs=$(ulimit -u)
+       virtualMemory=$(ulimit -v)
+       maxMemory=$(ulimit -m)
        if [ $openFiles != "unlimited" ] && [ $openFiles -lt "$SOLR_RECOMMENDED_OPEN_FILES" ]; then
            echo "*** [WARN] *** Your open file limit is currently $openFiles.  "
            echo " It should be set to $SOLR_RECOMMENDED_OPEN_FILES to avoid operational disruption. "
@@ -1527,10 +1534,23 @@
            echo " It should be set to $SOLR_RECOMMENDED_MAX_PROCESSES to avoid operational disruption. "
            echo " If you no longer wish to see this warning, set SOLR_ULIMIT_CHECKS to false in your profile or solr.in.sh"
        fi
+       if [ $virtualMemory != "unlimited" ]; then
+           echo "*** [WARN] ***  Your Virtual Memory limit is $virtualMemory. "
+           echo " It should be set to 'unlimited' to avoid operational disruption. "
+           echo " If you no longer wish to see this warning, set SOLR_ULIMIT_CHECKS to false in your profile or solr.in.sh"
+       fi
+       if [ $maxMemory != "unlimited" ]; then
+           echo "*** [WARN] ***  Your Max Memory Size limit is $maxMemory. "
+           echo " It should be set to 'unlimited' to avoid operational disruption. "
+           echo " If you no longer wish to see this warning, set SOLR_ULIMIT_CHECKS to false in your profile or solr.in.sh"
+       fi
+
     else
       echo "Could not check ulimits for processes and open files, recommended values are"
-      echo "     max processes: $SOLR_RECOMMENDED_MAX_PROCESSES "
-      echo "     open files:    $SOLR_RECOMMENDED_OPEN_FILES"
+      echo "     max processes:   $SOLR_RECOMMENDED_MAX_PROCESSES "
+      echo "     open files:      $SOLR_RECOMMENDED_OPEN_FILES"
+      echo "     virtual memory:  unlimited"
+      echo "     max memorh size: unlimited"
     fi
   fi
 fi
diff --git a/solr/bin/solr.in.cmd b/solr/bin/solr.in.cmd
index a831c55..e462336 100755
--- a/solr/bin/solr.in.cmd
+++ b/solr/bin/solr.in.cmd
@@ -122,6 +122,8 @@
 REM set SOLR_SSL_NEED_CLIENT_AUTH=false

 REM Enable clients to authenticate (but not require)

 REM set SOLR_SSL_WANT_CLIENT_AUTH=false

+REM Verify client hostname during SSL handshake

+REM set SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false

 REM SSL Certificates contain host/ip "peer name" information that is validated by default. Setting

 REM this to false can be useful to disable these checks when re-using a certificate on many hosts

 REM set SOLR_SSL_CHECK_PEER_NAME=true

diff --git a/solr/bin/solr.in.sh b/solr/bin/solr.in.sh
index 9d1be37..d4e6b7b 100644
--- a/solr/bin/solr.in.sh
+++ b/solr/bin/solr.in.sh
@@ -139,6 +139,8 @@
 #SOLR_SSL_NEED_CLIENT_AUTH=false
 # Enable clients to authenticate (but not require)
 #SOLR_SSL_WANT_CLIENT_AUTH=false
+# Verify client's hostname during SSL handshake
+#SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false
 # SSL Certificates contain host/ip "peer name" information that is validated by default. Setting
 # this to false can be useful to disable these checks when re-using a certificate on many hosts
 #SOLR_SSL_CHECK_PEER_NAME=true
diff --git a/solr/cloud-dev/clean.sh b/solr/cloud-dev/clean.sh
deleted file mode 100755
index 2f42d45..0000000
--- a/solr/cloud-dev/clean.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/bin/bash
-
-numServers=$1
-
-die () {
-    echo >&2 "$@"
-    exit 1
-}
-
-[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: clean.sh {numServers}"
-
-cd ..
-
-for (( i=1; i <= $numServers; i++ ))
-do
-  rm -r -f server$i
-done
-
-rm -r -f serverzk
-rm -r -f server-lastlogs
\ No newline at end of file
diff --git a/solr/cloud-dev/cli-test-solrcloud-start.sh b/solr/cloud-dev/cli-test-solrcloud-start.sh
deleted file mode 100755
index 1634ab7..0000000
--- a/solr/cloud-dev/cli-test-solrcloud-start.sh
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/usr/bin/env bash
-
-# TODO: !OUT OF DATE!
-
-cd ..
-
-rm -r -f server2
-rm -r -f server3
-rm -r -f server4
-rm -r -f server5
-rm -r -f server6
-
-rm -r -f dist
-rm -r -f build
-rm -r -f server/solr/zoo_data
-rm -r -f server/solr/data
-rm -f server/server.log
-
-ant server dist
-
-cp -r -f server server2
-cp -r -f server server3
-cp -r -f server server4
-cp -r -f server server5
-cp -r -f server server6
-
-# first try uploading a conf dir
-java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd upconfig -zkhost 127.0.0.1:9983 -confdir server/solr/collection1/conf -confname conf1 -solrhome server/solr -runzk 8983
-
-# upload a second conf set so we avoid single conf auto linking
-java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd upconfig -zkhost 127.0.0.1:9983 -confdir server/solr/collection1/conf -confname conf2 -solrhome server/solr -runzk 8983
-
-# now try linking a collection to a conf set
-java -classpath lib/*:dist/*:build/lucene-libs/* org.apache.solr.cloud.ZkCLI -cmd linkconfig -zkhost 127.0.0.1:9983 -collection collection1 -confname conf1 -solrhome server/solr -runzk 8983
-
-
-cd server
-java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -jar start.jar 1>server.log 2>&1 &
-
-cd ../server2
-java -Djetty.port=7574 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>server2.log 2>&1 &
-
-cd ../server3
-java -Djetty.port=7575 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6575 -DSTOP.KEY=key -jar start.jar 1>server3.log 2>&1 &
-
-cd ../server4
-java -Djetty.port=7576 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6576 -DSTOP.KEY=key -jar start.jar 1>server4.log 2>&1 &
-
-cd ../server5
-java -Djetty.port=7577 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6577 -DSTOP.KEY=key -jar start.jar 1>server5.log 2>&1 &
-
-cd ../server6
-java -Djetty.port=7578 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6578 -DSTOP.KEY=key -jar start.jar 1>server6.log 2>&1 &
diff --git a/solr/cloud-dev/cloud.sh b/solr/cloud-dev/cloud.sh
new file mode 100644
index 0000000..3b6d710
--- /dev/null
+++ b/solr/cloud-dev/cloud.sh
@@ -0,0 +1,383 @@
+#!/bin/bash
+
+##################################################################################
+#
+# The goal of this script is to allow quick setup of a blank local multi node
+# cluster for development testing without needing to erase or interfere with
+# previous testing. It also enables redeployment of the code for such testing
+# clusters without erasing the data previously indexed.
+#
+# It is for dev testing only NOT for production use.
+#
+# This is also NOT meant to be run from this directory within a lucene-solr
+# working copy. Typical usage is to copy it out to a separate workspace
+# such as (<GIT_CHECKOUT>/../testing) and edit then either use the -w option
+# or edit the definition of DEFAULT_VCS_WORKSPACE variable below.
+#
+# Usage:
+#    ./cloud.sh <command> [options] [name]
+#
+# Options:
+#  -c                clean the data & zk collections erasing all indexed data
+#  -r                recompile server with 'ant clean server create-package'
+#  -m <mem>          memory per node
+#  -a <args>         additional JVM options
+#  -n <num>          number of nodes to create/start if this doesn't match error
+#  -w <path>         path to the vcs checkout
+#  -z <num>          port to look for zookeeper on (2181 default)
+#
+# Commands:
+#   new              Create a new cluster named by the current date or [name]
+#   start            Start an existing cluster specified by [name]
+#   stop             stop the cluster specified by [name]
+#   restart          stop and then start
+#
+# In all cases if [name] is unspecified ls -t will be used to determine the
+# most recent cluster working directory, and that will be used. If it is
+# specified it will be resolved as a path from the directory where cloud.sh
+# has been run.
+#
+# By default the script sets up a local Solr cloud with 4 nodes, in a local
+# directory with ISO date as the name. A local zookeeper at 2181 or the
+# specified port is presumed to be available, a new zk chroot is used for each
+# cluster based on the file system path to the cluster directory. the default
+# solr.xml is added to this solr root dir in zookeeper.
+#
+# Debugging ports are automatically opened for each node starting with port 5001
+#
+# Specifying an explicit destination path will cause the script to
+# use that path and a zk chroot that matches, so more than one install
+# can be created in a day, or issue numbers etc can be used. Normally the
+# directories containing clusters created by this tool are in the same
+# directory as this script. Distant  paths with slashes or funny characters
+# *might* work, but are not well tested, YMMV.
+#
+# PEREQ: 1. Zookeeper on localhost:2181 (or as specified by -z option) where
+#           it is ok to create a lot of top level directories named for
+#           the absolute path of the [name] directory (for example:
+#           /solr_home_myuser_projects_solr_testing_2019-01-01) Note
+#           that not using the embedded zookeeper is key to being able
+#           switch between testing setups and to test vs alternate versions
+#           of zookeeper if desired.
+#
+# SETUP: 1. Place this script in a directory intended to hold all your
+#           testing installations of solr.
+#        2. Edit DEFAULT_VCS_WORKSPACE if the present value does not suit
+#           your purposes.
+#        3. chmod +x cloud.sh
+#
+# EXAMPLES:
+#
+# Create a brand new 4 node cluster deployed in a directory named for today
+#
+#   ./cloud.sh new
+#
+# Create a brand new 4 node cluster deployed in a directory named SOLR-1234567
+#
+#   ./cloud.sh new SOLR-1234567
+#
+# Stop the cluster
+#
+#   ./cloud.sh stop
+#
+# Compile and push new code to a running cluster (incl bounce the cluster)
+#
+#   ./cloud.sh restart -r
+#
+# Dump your hoplessly fubar'd test collections and start fresh with current tarball
+#
+#   ./cloud.sh restart -c
+#
+##################################################################################
+
+DEFAULT_VCS_WORKSPACE='../code/lucene-solr'
+
+############## Normally  no need to edit below this line ##############
+
+##############
+# Parse Args #
+##############
+
+COMMAND=$1
+shift
+
+CLEAN=false      # default
+MEMORY=1g        # default
+JVM_ARGS=''      # default
+RECOMPILE=false  # default
+NUM_NODES=0      # need to detect if not specified
+VCS_WORK=${DEFAULT_VCS_WORKSPACE}
+ZK_PORT=2181
+
+while getopts ":crm:a:n:w:z:" opt; do
+  case ${opt} in
+    c)
+      CLEAN=true
+      ;;
+    r)
+      RECOMPILE=true
+      ;;
+    m)
+      MEMORY=$OPTARG
+      ;;
+    a)
+      JVM_ARGS=$OPTARG
+      ;;
+    n)
+      NUM_NODES=$OPTARG
+      ;;
+    w)
+      VCS_WORK=$OPTARG
+      ;;
+    z)
+      ZK_PORT=$OPTARG
+      ;;
+   \?)
+      echo "Invalid option: -$OPTARG" >&2
+      exit 1
+   esac
+done
+shift $((OPTIND -1))
+
+CLUSTER_WD=$1
+
+#################
+# Validate Args #
+#################
+case ${COMMAND} in
+   new);;
+   stop);;
+   start);;
+   restart);;
+   *) echo "Invalid command $COMMAND"; exit 2;
+esac
+
+case ${NUM_NODES} in
+    ''|*[!0-9]*) echo "$NUM_NODES (-n) is not a positive integer"; exit 3 ;;
+    *) ;;
+esac
+
+case ${ZK_PORT} in
+    ''|*[!0-9]*) echo "$NUM_NODES (-z) is not a positive integer"; exit 3 ;;
+    *) ;;
+esac
+
+if [[ "$COMMAND" = "new" ]]; then
+  if [[ "$CLEAN" = true ]]; then
+    echo "Command new and option -c (clean) do not make sense together since a newly created cluster has no data to clean."; exit 1;
+  fi
+fi
+
+if [[ ! -d "$VCS_WORK" ]]; then
+  echo "$VCS_WORK (vcs working directory) does not exist"; exit 4;
+fi
+
+if [[ ! "$COMMAND" = "new" ]]; then
+  if [[ -z "$CLUSTER_WD" ]]; then
+    # find the most recently touched directory in the local directory
+    CLUSTER_WD=$(find . -maxdepth 1 -mindepth 1 -type d -print0 | xargs -0 ls -1 -td | sed -E 's/\.\/(.*)/\1/' | head -n1)
+  fi
+fi
+
+if [[ ! -z "$CLUSTER_WD" ]]; then
+  if [[ ! -d "$CLUSTER_WD" && ! "$COMMAND" = "new" ]]; then
+    echo "$CLUSTER_WD (cluster working directory) does not exist or is not a directory"; exit 5;
+  fi
+fi
+
+############################
+# Print our initialization #
+############################
+echo "COMMAND    : $COMMAND"
+echo "VCS WD     : $VCS_WORK"
+echo "CLUSTER WD : $CLUSTER_WD"
+echo "NUM NODES  : $NUM_NODES"
+echo "ZK PORT    : $ZK_PORT"
+echo "CLEAN      : $CLEAN"
+echo "RECOMPILE  : $RECOMPILE"
+
+###########################################################
+# Create new cluster working dir if new command specified #
+###########################################################
+mkdirIfReq() {
+  if [[ "$COMMAND" = "new" ]]; then
+    if [[ -z "$CLUSTER_WD" ]]; then
+      DATE=$(date "+%Y-%m-%d")
+      CLUSTER_WD="${DATE}"
+    fi
+    mkdir "$CLUSTER_WD"
+    if [[ "$?" -ne 0 ]]; then
+      echo "Unable to create $CLUSTER_WD"; exit 6;
+    fi
+  fi
+}
+
+#################
+# Find Solr etc #
+#################
+
+findSolr() {
+  pushd ${CLUSTER_WD}
+  CLUSTER_WD_FULL=$(pwd -P)
+  SOLR=${CLUSTER_WD}/$(find . -maxdepth 1 -name 'solr*' -type d -print0 | xargs -0 ls -1 -td | sed -E 's/\.\/(solr.*)/\1/' | head -n1)
+  popd
+
+  #echo "Found solr at $SOLR"
+  SAFE_DEST="${CLUSTER_WD_FULL//\//_}";
+}
+
+###############################################
+# Clean node dir (and thus data) if requested #
+###############################################
+cleanIfReq() {
+  if [[ "$CLEAN" = true ]]; then
+    if [[ -d "$CLUSTER_WD" ]]; then
+      echo "Cleaning out $CLUSTER_WD"
+      pushd ${CLUSTER_WD}
+      rm -rf n*      # remove node dirs which are are n1, n2, n3 etc
+      popd
+    fi
+    findSolr
+    echo COLLECTIONS FOUND IN ZK | egrep --color=always '.*'
+    COLLECTIONS_TO_CLEAN=`${SOLR}/bin/solr zk ls /solr_${SAFE_DEST}/collections -z     localhost:${ZK_PORT}`; echo $COLLECTIONS_TO_CLEAN | egrep --color=always '.*'
+    for collection in ${COLLECTIONS_TO_CLEAN}; do
+      echo nuke $collection
+      ${SOLR}/bin/solr zk rm -r /solr_${SAFE_DEST}/collections/${collection} -z     localhost:${ZK_PORT}
+      echo $?
+    done
+  fi
+}
+
+#################################
+# Recompile server if requested #
+#################################
+recompileIfReq() {
+  if [[ "$RECOMPILE" = true ]]; then
+    pushd "$VCS_WORK"/solr
+    ant clean server create-package
+    if [[ "$?" -ne 0 ]]; then
+      echo "BUILD FAIL - cloud.sh stopping, see above output for details"; popd; exit 7;
+    fi
+    popd
+    copyTarball
+  fi
+}
+
+################
+# Copy tarball #
+################
+copyTarball() {
+    echo "foo"
+    pushd ${CLUSTER_WD}
+    echo "bar"
+    rm -rf solr-*  # remove tarball and dir to which it extracts
+    echo "baz"
+    pushd # back to original dir to properly resolve vcs working dir
+    echo "foobar:"$(pwd)
+    if [[ ! -f $(ls "$VCS_WORK"/solr/package/solr-*.tgz) ]]; then
+      echo "No solr tarball found try again with -r"; popd; exit 10;
+    fi
+    cp "$VCS_WORK"/solr/package/solr-*.tgz ${CLUSTER_WD}
+    pushd # back into cluster wd to unpack
+    tar xzvf solr-*.tgz
+    popd
+}
+
+#############################################
+# Test to see if port for zookeeper is open #
+# Assume that zookeeper holds it if it is   #
+#############################################
+testZookeeper() {
+  PORT_FOUND=$( netstat -an | grep '\b'${ZK_PORT}'\s' | grep LISTEN | awk '{print $4}' | sed -E 's/.*\b('${ZK_PORT}')\s*/\1/');
+  if [[ -z  "$PORT_FOUND" ]]; then
+    echo "No process listening on port ${ZK_PORT}. Please start zookeeper and try again"; exit 8;
+  fi
+}
+
+##########################
+# Start server instances #
+##########################
+start(){
+  testZookeeper
+  echo "Starting servers"
+  findSolr
+
+  echo "SOLR=$SOLR"
+  SOLR_ROOT=$("${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd getfile "/solr_${SAFE_DEST}" /dev/stdout);
+  if [[ -z ${SOLR_ROOT} ]]; then
+    # Need a fresh root in zookeeper...
+    "${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd makepath "/solr_${SAFE_DEST}";
+    "${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd put "/solr_${SAFE_DEST}" "created by cloud.sh"; # so we can test for existence next time
+    "${SOLR}/server/scripts/cloud-scripts/zkcli.sh" -zkhost localhost:${ZK_PORT} -cmd putfile "/solr_${SAFE_DEST}/solr.xml" "${SOLR}/server/solr/solr.xml";
+  fi
+
+  ACTUAL_NUM_NODES=$(ls -1 -d ${CLUSTER_WD}/n* | wc -l )
+  if [[ "$NUM_NODES" -eq 0 ]]; then
+    NUM_NODES=${ACTUAL_NUM_NODES}
+  else
+    if [[ "$NUM_NODES" -ne "$ACTUAL_NUM_NODES" ]]; then
+      #check that this isn't first time startup..
+      if [[ "$ACTUAL_NUM_NODES" -ne 0 ]]; then
+        echo "Requested $NUM_NODES for a cluster that already has $ACTUAL_NUM_NODES. Refusing to start!"; exit 9;
+      fi
+    fi
+  fi
+
+  if [[ "$NUM_NODES" -eq 0 ]]; then
+    NUM_NODES=4  # nothing pre-existing found, default to 4
+  fi
+  echo "Final NUM_NODES is $NUM_NODES"
+  for i in `seq 1 $NUM_NODES`; do
+    mkdir -p "${CLUSTER_WD}/n${i}"
+    argsArray=(-c -s $CLUSTER_WD_FULL/n${i} -z localhost:${ZK_PORT}/solr_${SAFE_DEST} -p 898${i} -m $MEMORY \
+    -a "-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=500${i} \
+    -Dsolr.solrxml.location=zookeeper -Dsolr.log.dir=$CLUSTER_WD_FULL/n${i} $JVM_ARGS")
+    FINAL_COMMAND="${SOLR}/bin/solr ${argsArray[@]}"
+    echo ${FINAL_COMMAND}
+    ${SOLR}/bin/solr "${argsArray[@]}"
+  done
+
+  touch ${CLUSTER_WD}  # make this the most recently updated dir for ls -t
+
+}
+
+stop() {
+  echo "Stopping servers"
+  pushd ${CLUSTER_WD}
+  SOLR=${CLUSTER_WD}/$(find . -maxdepth 1 -name 'solr*' -type d -print0 | xargs -0 ls -1 -td | sed -E 's/\.\/(solr.*)/\1/' | head -n1)
+  popd
+
+  "${SOLR}/bin/solr" stop -all
+}
+
+########################
+# process the commands #
+########################
+case ${COMMAND} in
+  new)
+    testZookeeper
+    mkdirIfReq
+    recompileIfReq
+    if [[ "$RECOMPILE" = false ]]; then
+      copyTarball
+    fi
+    start
+  ;;
+  stop)
+    stop
+  ;;
+  start)
+    testZookeeper
+    cleanIfReq
+    recompileIfReq
+    start
+  ;;
+  restart)
+    testZookeeper
+    stop
+    cleanIfReq
+    recompileIfReq
+    start
+  ;;
+  *) echo "Invalid command $COMMAND"; exit 2;
+esac
\ No newline at end of file
diff --git a/solr/cloud-dev/control.sh b/solr/cloud-dev/control.sh
deleted file mode 100755
index 575e40c..0000000
--- a/solr/cloud-dev/control.sh
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/bin/bash
-
-source ./functions.sh
-
-case "$1" in
-  start)
-        start $2 $3 "$4"
-        ;;
-  stop)
-        stop $2
-        ;;
-  kill)
-        do_kill $2
-        ;;
-  reinstall)
-        reinstall $2
-        ;;
-  rebuild)
-        rebuild $2
-        ;;
-  status)
-        status $2
-        ;;
-  cleanlogs)
-        cleanlogs $2
-        ;;
-  taillogs)
-        taillogs $2
-        ;;
-  createshard)
-        createshard $2 $3 $4 $5
-        ;;
-  *)
-        echo $"Usage: $0 { rebuild| reinstall <instanceid>| start <instanceid> [numshards]| stop <instanceid>|kill <instanceid>| status<instanceid>| cleanlogs<instanceid>| createshard <instance> <collection> <coreName> [shardId]}"
-        exit 1
-esac
-exit 0
\ No newline at end of file
diff --git a/solr/cloud-dev/example1.sh b/solr/cloud-dev/example1.sh
deleted file mode 100755
index 418642d..0000000
--- a/solr/cloud-dev/example1.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/bin/bash
-
-cd ..
-
-rm -r -f example2
-
-rm -r -f dist
-rm -r -f build
-rm -r -f example/solr/zoo_data
-rm -r -f example/solr/collection1/data
-rm -f example/example.log
-
-ant server dist
-
-cp -r -f example example2
-
-
-cd example
-java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -Dbootstrap_conf=true -jar start.jar 1>example.log 2>&1 &
-
-sleep 10
-
-cd ../example2
-java -Djetty.port=9574 -DzkRun -DzkHost=localhost:9983 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>example2.log 2>&1 &
-
-
diff --git a/solr/cloud-dev/example2.sh b/solr/cloud-dev/example2.sh
deleted file mode 100755
index 3c9f232..0000000
--- a/solr/cloud-dev/example2.sh
+++ /dev/null
@@ -1,36 +0,0 @@
-#!/bin/bash
-
-cd ..
-
-rm -r -f example2
-rm -r -f example3
-rm -r -f example4
-
-rm -r -f dist
-rm -r -f build
-rm -r -f example/solr/zoo_data
-rm -r -f example/solr/collection1/data
-rm -f example/example.log
-
-ant server dist
-
-cp -r -f example example2
-cp -r -f example example3
-cp -r -f example example4
-
-
-cd example
-java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -Dbootstrap_conf=true -jar start.jar 1>example.log 2>&1 &
-
-# wait for config to go up
-sleep 10
-
-cd ../example2
-java -Djetty.port=9574 -DzkRun -DzkHost=localhost:9983 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>example2.log 2>&1 &
-
-cd ../example3
-java -Djetty.port=9575 -DzkRun -DzkHost=localhost:9983 -DSTOP.PORT=6575 -DSTOP.KEY=key -jar start.jar 1>example3.log 2>&1 &
-
-cd ../example4
-java -Djetty.port=9576 -DzkHost=localhost:9983 -DnumShards=2 -DSTOP.PORT=6576 -DSTOP.KEY=key -jar start.jar 1>example4.log 2>&1 &
-
diff --git a/solr/cloud-dev/example3.sh b/solr/cloud-dev/example3.sh
deleted file mode 100755
index 404db01..0000000
--- a/solr/cloud-dev/example3.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-
-cd ..
-
-rm -r -f example2
-rm -r -f example3
-rm -r -f example4
-
-rm -r -f dist
-rm -r -f build
-rm -r -f example/solr/zoo_data
-rm -r -f example/solr/collection1/data
-rm -f example/example.log
-
-ant server dist
-
-cp -r -f example example2
-cp -r -f example example3
-cp -r -f example example4
-
-
-cd example
-java -DzkRun -DnumShards=2 -DSTOP.PORT=7983 -DSTOP.KEY=key -Dbootstrap_conf=true -DzkHost=localhost:9983,localhost:14574,localhost:14585 -jar start.jar 1>example.log 2>&1 &
-
-cd ../example2
-java -Djetty.port=13574 -DzkRun -DzkHost=localhost:9983,localhost:14574,localhost:14585 -DSTOP.PORT=6574 -DSTOP.KEY=key -jar start.jar 1>example2.log 2>&1 &
-
-cd ../example3
-java -Djetty.port=13585 -DzkRun -DzkHost=localhost:9983,localhost:14574,localhost:14585 -DSTOP.PORT=6575 -DSTOP.KEY=key -jar start.jar 1>example3.log 2>&1 &
-
-# wait for config to go up
-sleep 10
-
-cd ../example4
-java -Djetty.port=13596 -DzkHost=localhost:9983,localhost:14574,localhost:14585 -DnumShards=2 -DSTOP.PORT=6576 -DSTOP.KEY=key -jar start.jar 1>example4.log 2>&1 &
diff --git a/solr/cloud-dev/functions.sh b/solr/cloud-dev/functions.sh
deleted file mode 100755
index 148ec69..0000000
--- a/solr/cloud-dev/functions.sh
+++ /dev/null
@@ -1,77 +0,0 @@
-INT_JAVA_OPTS="-server -Xms256M -Xmx256M"
-BASE_PORT=8900
-BASE_STOP_PORT=9900
-ZK_PORT="2414"
-ZK_CHROOT="solr"
-
-rebuild() {
-  echo "Rebuilding"
-  cd ..
-  rm -r -f dist
-  rm -r -f build
-  rm -r -f server/solr/zoo_data
-  rm -f server/server.log
-  ant server dist
-}
-
-setports() {
-  PORT="$(( $BASE_PORT + $1 ))"
-  STOP_PORT="$(( $BASE_STOP_PORT + $1 ))"
-}
-
-reinstall() {
-  echo "Reinstalling instance $1"
-  cd ..
-  rm -rf  server$1
-  cp -r -f server server$1
-}
-
-start() {
-  OPT="-DzkHost=localhost:$ZK_PORT/$ZK_CHROOT"
-  NUMSHARDS=$2
-
-  echo "Starting instance $1"
-
-  setports $1
-  cd ../server$1
-  java $JAVA_OPTS -Djetty.port=$PORT $OPT -jar start.jar --module=http STOP.PORT=$STOP_PORT STOP.KEY=key jetty.base=. 1>server$1.log 2>&1 &
-}
-
-stop() {
-  echo "Stopping instance $1"
-  setports $1
-  cd ../server$1
-  java -jar start.jar --module=http STOP.PORT=$STOP_PORT STOP.KEY=key --stop
-}
-
-do_kill() {
-  echo "Killing instance $1"
-  setports $1
-  PID=`ps aux|grep "STOP.PORT=$STOP_PORT"|grep -v grep|cut -b 8-15`
-  if [ "" = "$PID" ]; then
-    echo "not running?"
-  else
-    kill -9 $PID
-  fi
-}
-
-status() {
-  echo "Status:"
-  ps aux|grep "STOP.PORT"|grep -v grep
-}
-
-cleanlogs() {
-    cd ../server$1
-  mv server$1.log server$1.oldlog
-}
-
-taillogs() {
-  cd ../server$1
-  tail -f server$1.log
-}
-
-createshard() {
-  setports $1
-  echo "Creating new shard @instance $1, collection=$2, shard=$3, name=$4"
-  curl "http://127.0.0.1:$PORT/solr/admin/cores?action=CREATE&collection=$2&name=$3&shard=$4"
-}
diff --git a/solr/cloud-dev/solrcloud-start-existing.sh b/solr/cloud-dev/solrcloud-start-existing.sh
deleted file mode 100755
index 9c5ec29..0000000
--- a/solr/cloud-dev/solrcloud-start-existing.sh
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/bin/bash
-
-numServers=$1
-
-baseJettyPort=8900
-baseStopPort=9900
-
-ZK_CHROOT="solr"
-
-die () {
-    echo >&2 "$@"
-    exit 1
-}
-
-[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: solrcloud-start-exisiting.sh [numServers]"
-
-
-cd ..
-
-# Useful if you want to startup on an existing setup with new code mods
-# ant server dist
-
-cd serverzk
-stopPort=1313
-jettyPort=8900
-exec -a jettyzk java -Xmx512m $JAVA_OPTS -Djetty.port=$jettyPort -DhostPort=$jettyPort -DzkRun -DzkHost=localhost:9900/$ZK_CHROOT -DzkRunOnly=true -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>serverzk.log 2>&1 &
-
-cd ..
-
-cd server
-
-for (( i=1; i <= $numServers; i++ ))
-do
-  echo "starting server$i"
-  cd ../server$i
-  stopPort=`expr $baseStopPort + $i`
-  jettyPort=`expr $baseJettyPort + $i`
-  exec -a jetty java -Xmx1g $JAVA_OPTS -Djetty.port=$jettyPort -DzkHost=localhost:9900/$ZK_CHROOT -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>server$i.log 2>&1 &
-done
diff --git a/solr/cloud-dev/solrcloud-start.sh b/solr/cloud-dev/solrcloud-start.sh
deleted file mode 100755
index bf25618..0000000
--- a/solr/cloud-dev/solrcloud-start.sh
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/bin/bash
-
-# These scripts are best effort developer scripts. No promises.
-
-# To run on hdfs, try something along the lines of:
-# export JAVA_OPTS="-Dsolr.directoryFactory=solr.HdfsDirectoryFactory -Dsolr.lock.type=hdfs -Dsolr.hdfs.home=hdfs://localhost:8020/solr -Dsolr.hdfs.confdir=/etc/hadoop_conf/conf"
-
-# To use ZooKeeper security, try:
-# export JAVA_OPTS="-DzkACLProvider=org.apache.solr.common.cloud.VMParamsAllAndReadonlyDigestZkACLProvider -DzkCredentialsProvider=org.apache.solr.common.cloud.VMParamsSingleSetCredentialsDigestZkCredentialsProvider -DzkDigestUsername=admin-user -DzkDigestPassword=admin-password -DzkDigestReadonlyUsername=readonly-user -DzkDigestReadonlyPassword=readonly-password"
-#
-# To create a collection, curl "localhost:8901/solr/admin/collections?action=CREATE&name=collection1&numShards=2&replicationFactor=1&maxShardsPerNode=10"
-# To add a document, curl http://localhost:8901/solr/collection1/update -H 'Content-type:application/json' -d '[{"id" : "book1"}]'
-
-numServers=$1
-numShards=$2
-
-baseJettyPort=8900
-baseStopPort=9900
-
-zkAddress=localhost:9900/solr
-
-die () {
-    echo >&2 "$@"
-    exit 1
-}
-
-[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: solrcloud-start.sh [numServers]"
-
-cd ..
-
-for (( i=1; i <= $numServers; i++ ))
-do
- echo "try to remove existing directory: server$i"
- rm -r -f server$i
-done
-
-
-rm -r -f dist
-rm -r -f build
-rm -r -f server/solr/zoo_data
-rm -f server/server.log
-
-ant -f ../build.xml clean
-ant server dist
-
-for (( i=1; i <= $numServers; i++ ))
-do
- echo "create server$i"
- cp -r -f server server$i
-done
-  
-rm -r -f serverzk
-cp -r -f server serverzk
-cp core/src/test-files/solr/solr-no-core.xml serverzk/solr/solr.xml
-rm -r -f serverzk/solr/collection1/core.properties
-cd serverzk
-stopPort=1313
-jettyPort=8900
-exec -a jettyzk java -Xmx512m $JAVA_OPTS -Djetty.port=$jettyPort -DhostPort=$jettyPort -DzkRun=localhost:9900/solr -DzkHost=$zkAddress -DzkRunOnly=true -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>serverzk.log 2>&1 &
-cd ..
-
-# upload config files
-java -classpath "server/solr-webapp/webapp/WEB-INF/lib/*:server/lib/ext/*" $JAVA_OPTS org.apache.solr.cloud.ZkCLI -zkhost $zkAddress -cmd upconfig --confdir server/solr/configsets/basic_configs/conf --confname basic_configs
-  
-cd server
-
-for (( i=1; i <= $numServers; i++ ))
-do
-  echo "starting server$i"
-  cd ../server$i
-  stopPort=`expr $baseStopPort + $i`
-  jettyPort=`expr $baseJettyPort + $i`
-  exec -a jetty java -Xmx1g $JAVA_OPTS -Djetty.port=$jettyPort -DzkHost=$zkAddress -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key jetty.base=. 1>server$i.log 2>&1 &
-done
diff --git a/solr/cloud-dev/stop.sh b/solr/cloud-dev/stop.sh
deleted file mode 100755
index 6502199..0000000
--- a/solr/cloud-dev/stop.sh
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/bin/bash
-
-numServers=$1
-baseJettyPort=8900
-baseStopPort=9900
-
-die () {
-    echo >&2 "$@"
-    exit 1
-}
-
-[ "$#" -eq 1 ] || die "1 argument required, $# provided, usage: stop.sh {numServers}"
-
-cd ../server
-
-for (( i=1; i <= $numServers; i++ ))
-do
-  stopPort=`expr $baseStopPort + $i`
-  echo "stopping server$i, stop port is $stopPort"
-  cd ../server$i
-  java -jar start.jar --module=http STOP.PORT=$stopPort STOP.KEY=key --stop
-done
-
-
-mkdir ../server-lastlogs
-
-for (( i=1; i <= $numServers; i++ ))
-do
-   cd ../server$i
-
-  jettyPort=`expr $baseJettyPort + $i`
-  echo "Make sure jetty stops and wait for it: $jettyPort"
-
-  pid=`lsof -i:$jettyPort -sTCP:LISTEN -t`
-  echo "pid:$pid"
-  #kill $pid
-  #wait $pid
-  if [ ! -z "$pid" ]
-  then
-    while [ -e /proc/$pid ]; do sleep 1; done
-  fi
-  
-  # save the last shutdown logs
-  echo "copy server$i.log to lastlogs"
-  cp -r -f server$i.log ../server-lastlogs/server-last$i.log
-done
-
-# stop zk runner
-java -jar start.jar --module=http STOP.PORT=1313 STOP.KEY=key --stop
-
-echo "wait for port to be available: $baseJettyPort"
-
-pid=`lsof -i:$baseJettyPort -sTCP:LISTEN -t`
-echo "pid:$pid"
-#kill $pid
-#wait $pid
-if [ ! -z "$pid" ]
-then
-  while [ -e /proc/$pid ]; do sleep 0.1; done
-fi
-nc -w 30 127.0.0.1 $baseJettyPort
-
-sleep 5
- 
\ No newline at end of file
diff --git a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java
index 402ade6..ceb4a4e 100644
--- a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java
+++ b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterIntegrationTest.java
@@ -23,6 +23,7 @@
 import org.junit.Before;
 import org.junit.Test;
 
+@org.apache.lucene.util.LuceneTestCase.AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13786")
 @Slow
 public class SolrExporterIntegrationTest extends SolrExporterTestBase {
 
diff --git a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java
index 3f43843..a390be1 100644
--- a/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java
+++ b/solr/contrib/prometheus-exporter/src/test/org/apache/solr/prometheus/exporter/SolrExporterTestBase.java
@@ -107,7 +107,7 @@
 
           String[] parts = currentLine.split(" ");
 
-          assertEquals("Metric must have name and value", 2, parts.length);
+          assertEquals("Metric must have name and value: " + currentLine, 2, parts.length);
 
           metrics.put(parts[0], Double.valueOf(parts[1]));
         }
diff --git a/solr/core/ivy.xml b/solr/core/ivy.xml
index 8edbec1..9fba663 100644
--- a/solr/core/ivy.xml
+++ b/solr/core/ivy.xml
@@ -80,7 +80,6 @@
 
     <dependency org="org.apache.commons" name="commons-configuration2" rev="${/org.apache.commons/commons-configuration2}" conf="compile.hadoop"/>
     <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="compile.hadoop"/>
-    <dependency org="commons-beanutils" name="commons-beanutils" rev="${/commons-beanutils/commons-beanutils}" conf="compile.hadoop"/>
     <dependency org="com.github.ben-manes.caffeine" name="caffeine" rev="${/com.github.ben-manes.caffeine/caffeine}" conf="compile.hadoop"/>
     <dependency org="com.google.re2j" name="re2j" rev="${/com.google.re2j/re2j}" conf="compile"/>
     <dependency org="org.apache.commons" name="commons-lang3" rev="${/org.apache.commons/commons-lang3}" conf="compile.hadoop"/>
diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
new file mode 100644
index 0000000..b1be461
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
@@ -0,0 +1,275 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.api;
+
+
+import java.lang.reflect.Field;
+import java.lang.reflect.InvocationTargetException;
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.lang.reflect.ParameterizedType;
+import java.lang.reflect.Type;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SpecProvider;
+import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.common.util.ValidatingJsonMap;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.security.AuthorizationContext;
+import org.apache.solr.security.PermissionNameProvider;
+
+/**This class implements an Api just from  an annotated java class
+ * The class must have an annotation {@link EndPoint}
+ * Each method must have an annotation {@link Command}
+ * The methods that implement a command should have the first 2 parameters
+ * {@link SolrQueryRequest} and {@link SolrQueryResponse} or it may optionally
+ * have a third parameter which could be a java class annotated with jackson annotations.
+ * The third parameter is only valid if it is using a json command payload
+ *
+ */
+
+public class AnnotatedApi extends Api implements PermissionNameProvider {
+  private EndPoint endPoint;
+  private Map<String, Cmd> commands = new HashMap<>();
+  private final Api fallback;
+
+  public AnnotatedApi(Object obj) {
+    this(obj, null);
+
+  }
+
+  public AnnotatedApi(Object obj, Api fallback) {
+    super(readSpec(obj.getClass()));
+    this.fallback = fallback;
+    Class<?> klas = obj.getClass();
+    if (!Modifier.isPublic(klas.getModifiers())) {
+      throw new RuntimeException(obj.getClass().getName() + " is not public");
+    }
+
+    endPoint = klas.getAnnotation(EndPoint.class);
+
+    for (Method m : klas.getDeclaredMethods()) {
+      Command command = m.getAnnotation(Command.class);
+      if (command == null) continue;
+
+      if (commands.containsKey(command.name())) {
+        throw new RuntimeException("Duplicate commands " + command.name());
+      }
+      commands.put(command.name(), new Cmd(command, obj, m));
+    }
+
+  }
+
+  @Override
+  public Name getPermissionName(AuthorizationContext request) {
+    return endPoint.permission();
+  }
+
+  private static SpecProvider readSpec(Class klas) {
+    EndPoint endPoint = (EndPoint) klas.getAnnotation(EndPoint.class);
+    if (endPoint == null) throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Invalid class :  "+ klas.getName());
+    EndPoint endPoint1 = (EndPoint) klas.getAnnotation(EndPoint.class);
+    return () -> {
+      Map map = new LinkedHashMap();
+      List<String> methods = new ArrayList<>();
+      for (SolrRequest.METHOD method : endPoint1.method()) {
+        methods.add(method.name());
+      }
+      map.put("methods", methods);
+      map.put("url", new ValidatingJsonMap(Collections.singletonMap("paths", Arrays.asList(endPoint1.path()))));
+      Map<String, Object> cmds = new HashMap<>();
+
+      for (Method method : klas.getMethods()) {
+        Command command = method.getAnnotation(Command.class);
+        if (command != null && !command.name().isBlank()) {
+          cmds.put(command.name(), AnnotatedApi.createSchema(method));
+        }
+      }
+      if (!cmds.isEmpty()) {
+        map.put("commands", cmds);
+      }
+      return new ValidatingJsonMap(map);
+    };
+
+
+  }
+
+
+  @Override
+  public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
+    if (commands.size() == 1) {
+      Cmd cmd = commands.get("");
+      if (cmd != null) {
+        cmd.invoke(req, rsp, null);
+        return;
+      }
+    }
+
+    List<CommandOperation> cmds = req.getCommands(true);
+    boolean allExists = true;
+    for (CommandOperation cmd : cmds) {
+      if (!commands.containsKey(cmd.name)) {
+        cmd.addError("No such command supported: " + cmd.name);
+        allExists = false;
+      }
+    }
+    if (!allExists) {
+      if (fallback != null) {
+        fallback.call(req, rsp);
+        return;
+      } else {
+        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "Error processing commands",
+            CommandOperation.captureErrors(cmds));
+      }
+    }
+
+    for (CommandOperation cmd : cmds) {
+      commands.get(cmd.name).invoke(req, rsp, cmd);
+    }
+
+    List<Map> errs = CommandOperation.captureErrors(cmds);
+    if (!errs.isEmpty()) {
+      throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "Error in executing commands", errs);
+    }
+
+  }
+
+  class Cmd {
+    final Command command;
+    final Method method;
+    final Object obj;
+    ObjectMapper mapper = new ObjectMapper();
+    int paramsCount;
+    Class c;
+
+
+    Cmd(Command command, Object obj, Method method) {
+      if (Modifier.isPublic(method.getModifiers())) {
+        this.command = command;
+        this.obj = obj;
+        this.method = method;
+        Class<?>[] parameterTypes = method.getParameterTypes();
+        paramsCount = parameterTypes.length;
+        if (parameterTypes[0] != SolrQueryRequest.class || parameterTypes[1] != SolrQueryResponse.class) {
+          throw new RuntimeException("Invalid params for method " + method);
+        }
+        if (parameterTypes.length == 3) {
+          c = parameterTypes[2];
+        }
+        if (parameterTypes.length > 3) {
+          throw new RuntimeException("Invalid params count for method " + method);
+
+        }
+      } else {
+        throw new RuntimeException(method.toString() + " is not a public static method");
+      }
+
+    }
+
+    void invoke(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation cmd) {
+      try {
+
+        if (paramsCount == 2) {
+          method.invoke(obj, req, rsp);
+        } else {
+          Object o = cmd.getCommandData();
+          if (o instanceof Map && c != null) {
+            o = mapper.readValue(Utils.toJSONString(o), c);
+          }
+          method.invoke(obj, req, rsp, o);
+        }
+
+      } catch (SolrException se) {
+        throw se;
+      } catch (InvocationTargetException ite) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, ite.getCause());
+      } catch (Exception e) {
+      }
+
+    }
+  }
+
+  private static final Map<Class, String> primitives = new HashMap<>();
+
+  static {
+    primitives.put(String.class, "string");
+    primitives.put(Integer.class, "integer");
+    primitives.put(int.class, "integer");
+    primitives.put(Float.class, "number");
+    primitives.put(float.class, "number");
+    primitives.put(Double.class, "number");
+    primitives.put(double.class, "number");
+    primitives.put(Boolean.class, "boolean");
+    primitives.put(List.class, "array");
+  }
+
+
+  public static Map<String, Object> createSchema(Method m) {
+    Type[] types = m.getGenericParameterTypes();
+    Map<String, Object> result;
+    if (types.length == 3) {
+      return createSchemaFromType(types[2]);
+
+    }
+    return null;
+  }
+
+  private static Map<String, Object> createSchemaFromType(Type t) {
+    Map<String, Object> map = new LinkedHashMap<>();
+
+    if (primitives.containsKey(t)) {
+      map.put("type", primitives.get(t));
+    } else if (t == List.class) {
+
+    } else if (t instanceof ParameterizedType && ((ParameterizedType) t).getRawType() == List.class) {
+      Type typ = ((ParameterizedType) t).getActualTypeArguments()[0];
+      map.put("type", "array");
+      map.put("items", createSchemaFromType(typ));
+    } else {
+      createObjectSchema((Class) t, map);
+    }
+    return map;
+  }
+
+  private static void createObjectSchema(Class klas, Map<String, Object> map) {
+    map.put("type", "object");
+    Map<String, Object> props = new HashMap<>();
+    map.put("properties", props);
+    for (Field fld : klas.getDeclaredFields()) {
+      JsonProperty p = fld.getAnnotation(JsonProperty.class);
+      if (p == null) continue;
+      props.put(p.value(), createSchemaFromType(fld.getGenericType()));
+
+
+    }
+  }
+
+
+}
diff --git a/solr/core/src/java/org/apache/solr/api/ApiBag.java b/solr/core/src/java/org/apache/solr/api/ApiBag.java
index bfeb0ef..8a3f972 100644
--- a/solr/core/src/java/org/apache/solr/api/ApiBag.java
+++ b/solr/core/src/java/org/apache/solr/api/ApiBag.java
@@ -230,28 +230,22 @@
   }
 
   public static class ReqHandlerToApi extends Api implements PermissionNameProvider {
-     PluginBag.PluginHolder<SolrRequestHandler> rh;
+    SolrRequestHandler rh;
 
     public ReqHandlerToApi(SolrRequestHandler rh, SpecProvider spec) {
       super(spec);
-      this.rh = new PluginBag.PluginHolder(new PluginInfo(SolrRequestHandler.TYPE, Collections.emptyMap()),rh );
-    }
-
-    public ReqHandlerToApi(PluginBag.PluginHolder<SolrRequestHandler> rh, SpecProvider spec) {
-      super(spec);
       this.rh = rh;
     }
 
     @Override
     public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
-      rh.get().handleRequest(req, rsp);
+      rh.handleRequest(req, rsp);
     }
 
     @Override
     public Name getPermissionName(AuthorizationContext ctx) {
-      SolrRequestHandler handler = rh.get();
-      if (handler instanceof PermissionNameProvider) {
-        return ((PermissionNameProvider) handler).getPermissionName(ctx);
+      if (rh instanceof PermissionNameProvider) {
+        return ((PermissionNameProvider) rh).getPermissionName(ctx);
       }
       return null;
     }
@@ -345,22 +339,22 @@
   }
 
   public static class LazyLoadedApi extends Api {
+
+    private final PluginBag.PluginHolder<SolrRequestHandler> holder;
     private Api delegate;
 
     protected LazyLoadedApi(SpecProvider specProvider, PluginBag.PluginHolder<SolrRequestHandler> lazyPluginHolder) {
       super(specProvider);
-      delegate =  new ReqHandlerToApi(lazyPluginHolder, spec);
+      this.holder = lazyPluginHolder;
     }
 
     @Override
     public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
+      if (!holder.isLoaded()) {
+        delegate = new ReqHandlerToApi(holder.get(), ApiBag.EMPTY_SPEC);
+      }
       delegate.call(req, rsp);
     }
-
-    @Override
-    public ValidatingJsonMap getSpec() {
-      return super.getSpec();
-    }
   }
 
 }
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/java/org/apache/solr/api/Command.java
similarity index 61%
copy from solr/core/src/test-files/runtimecode/MyDocCache.java
copy to solr/core/src/java/org/apache/solr/api/Command.java
index 406b950..d18d064 100644
--- a/solr/core/src/test-files/runtimecode/MyDocCache.java
+++ b/solr/core/src/java/org/apache/solr/api/Command.java
@@ -15,21 +15,23 @@
  * limitations under the License.
  */
 
-package runtimecode;
+package org.apache.solr.api;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.StoredField;
-import org.apache.solr.search.LRUCache;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
 
-public  class MyDocCache<K,V> extends LRUCache<K,V> {
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.METHOD)
+public @interface Command {
+  /**if this is not a json command , leave it empty.
+   * Keep in mind that you cannot have duplicates.
+   * Only one method per name
+   *
+   */
+  String name() default "";
 
-  static String fld_name= "my_synthetic_fld_s";
-  @Override
-  public V put(K key, V value) {
-    if(value instanceof Document){
-      Document d = (Document) value;
-      d.add(new StoredField(fld_name, "version_2"));
-    }
-    return super.put(key, value);
-  }
+  String jsonSchema() default "";
+
 }
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/java/org/apache/solr/api/EndPoint.java
similarity index 62%
copy from solr/core/src/test-files/runtimecode/MyDocCache.java
copy to solr/core/src/java/org/apache/solr/api/EndPoint.java
index 406b950..6cbe500 100644
--- a/solr/core/src/test-files/runtimecode/MyDocCache.java
+++ b/solr/core/src/java/org/apache/solr/api/EndPoint.java
@@ -15,21 +15,22 @@
  * limitations under the License.
  */
 
-package runtimecode;
+package org.apache.solr.api;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.StoredField;
-import org.apache.solr.search.LRUCache;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
 
-public  class MyDocCache<K,V> extends LRUCache<K,V> {
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.security.PermissionNameProvider;
 
-  static String fld_name= "my_synthetic_fld_s";
-  @Override
-  public V put(K key, V value) {
-    if(value instanceof Document){
-      Document d = (Document) value;
-      d.add(new StoredField(fld_name, "version_2"));
-    }
-    return super.put(key, value);
-  }
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE})
+public @interface EndPoint {
+  SolrRequest.METHOD[] method();
+
+  String[] path();
+
+  PermissionNameProvider.Name permission();
 }
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
index 81cf374..4a5b45d 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/EmbeddedSolrServer.java
@@ -16,6 +16,8 @@
  */
 package org.apache.solr.client.solrj.embedded;
 
+import static org.apache.solr.common.params.CommonParams.PATH;
+
 import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
@@ -24,8 +26,8 @@
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.Set;
+import java.util.function.Supplier;
 
-import com.google.common.base.Strings;
 import org.apache.commons.io.output.ByteArrayOutputStream;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
@@ -57,8 +59,6 @@
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.servlet.SolrRequestParsers;
 
-import static org.apache.solr.common.params.CommonParams.PATH;
-
 /**
  * SolrClient that connects directly to a CoreContainer.
  *
@@ -69,12 +69,27 @@
   protected final CoreContainer coreContainer;
   protected final String coreName;
   private final SolrRequestParsers _parser;
+  private final RequestWriterSupplier supplier;
+
+  public enum RequestWriterSupplier {
+    JavaBin(() -> new BinaryRequestWriter()), XML(() -> new RequestWriter());
+
+    private Supplier<RequestWriter> supplier;
+
+    private RequestWriterSupplier(final Supplier<RequestWriter> supplier) {
+      this.supplier = supplier;
+    }
+
+    public RequestWriter newRequestWriter() {
+      return supplier.get();
+    }
+  }
 
   /**
    * Create an EmbeddedSolrServer using a given solr home directory
    *
    * @param solrHome        the solr home directory
-   * @param defaultCoreName the core to route requests to by default
+   * @param defaultCoreName the core to route requests to by default (optional)
    */
   public EmbeddedSolrServer(Path solrHome, String defaultCoreName) {
     this(load(new CoreContainer(SolrXmlConfig.fromSolrHome(solrHome))), defaultCoreName);
@@ -84,7 +99,7 @@
    * Create an EmbeddedSolrServer using a NodeConfig
    *
    * @param nodeConfig      the configuration
-   * @param defaultCoreName the core to route requests to by default
+   * @param defaultCoreName the core to route requests to by default (optional)
    */
   public EmbeddedSolrServer(NodeConfig nodeConfig, String defaultCoreName) {
     this(load(new CoreContainer(nodeConfig)), defaultCoreName);
@@ -109,17 +124,33 @@
    * {@link #close()} is called.
    *
    * @param coreContainer the core container
-   * @param coreName      the core to route requests to by default
+   * @param coreName      the core to route requests to by default (optional)
    */
   public EmbeddedSolrServer(CoreContainer coreContainer, String coreName) {
+    this(coreContainer, coreName, RequestWriterSupplier.JavaBin);
+  }
+
+  /**
+   * Create an EmbeddedSolrServer wrapping a CoreContainer.
+   * <p>
+   * Note that EmbeddedSolrServer will shutdown the wrapped CoreContainer when {@link #close()} is called.
+   *
+   * @param coreContainer
+   *          the core container
+   * @param coreName
+   *          the core to route requests to by default
+   * @param supplier
+   *          the supplier used to create a {@link RequestWriter}
+   */
+  public EmbeddedSolrServer(CoreContainer coreContainer, String coreName,
+      RequestWriterSupplier supplier) {
     if (coreContainer == null) {
       throw new NullPointerException("CoreContainer instance required");
     }
-    if (Strings.isNullOrEmpty(coreName))
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Core name cannot be empty");
     this.coreContainer = coreContainer;
     this.coreName = coreName;
     _parser = new SolrRequestParsers(null);
+    this.supplier = supplier;
   }
 
   // TODO-- this implementation sends the response to XML and then parses it.
@@ -150,8 +181,13 @@
       }
     }
 
-    if (coreName == null)
+    if (coreName == null) {
       coreName = this.coreName;
+      if (coreName == null) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "No core specified on request and no default core has been set.");
+      }
+    }
 
     // Check for cores action
     SolrQueryRequest req = null;
@@ -240,32 +276,41 @@
   private Set<ContentStream> getContentStreams(SolrRequest request) throws IOException {
     if (request.getMethod() == SolrRequest.METHOD.GET) return null;
     if (request instanceof ContentStreamUpdateRequest) {
-      ContentStreamUpdateRequest csur = (ContentStreamUpdateRequest) request;
-      Collection<ContentStream> cs = csur.getContentStreams();
+      final ContentStreamUpdateRequest csur = (ContentStreamUpdateRequest) request;
+      final Collection<ContentStream> cs = csur.getContentStreams();
       if (cs != null) return new HashSet<>(cs);
     }
-    RequestWriter.ContentWriter contentWriter = request.getContentWriter(CommonParams.JAVABIN_MIME);
-    final String cType = contentWriter == null ? CommonParams.JAVABIN_MIME : contentWriter.getContentType();
 
-    return Collections.singleton(new ContentStreamBase() {
+    final RequestWriter.ContentWriter contentWriter = request.getContentWriter(null);
 
-      @Override
-      public InputStream getStream() throws IOException {
-        BAOS baos = new BAOS();
-        if (contentWriter != null) {
-          contentWriter.write(baos);
-        } else {
-          new BinaryRequestWriter().write(request, baos);
+    String cType;
+    final BAOS baos = new BAOS();
+    if (contentWriter != null) {
+      contentWriter.write(baos);
+      cType = contentWriter.getContentType();
+    } else {
+      final RequestWriter rw = supplier.newRequestWriter();
+      cType = rw.getUpdateContentType();
+      rw.write(request, baos);
+    }
+
+    final byte[] buf = baos.toByteArray();
+    if (buf.length > 0) {
+      return Collections.singleton(new ContentStreamBase() {
+
+        @Override
+        public InputStream getStream() throws IOException {
+          return new ByteArrayInputStream(buf);
         }
-        return new ByteArrayInputStream(baos.toByteArray());
-      }
 
-      @Override
-      public String getContentType() {
-        return cType;
+        @Override
+        public String getContentType() {
+          return cType;
+        }
+      });
+    }
 
-      }
-    });
+    return null;
   }
 
   private JavaBinCodec createJavaBinCodec(final StreamingResponseCallback callback, final BinaryResponseWriter.Resolver resolver) {
diff --git a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
index c98bbb4..0e52c35 100644
--- a/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
+++ b/solr/core/src/java/org/apache/solr/client/solrj/embedded/JettySolrRunner.java
@@ -490,10 +490,10 @@
     Map<String, String> prevContext = MDC.getCopyOfContextMap();
     MDC.clear();
 
-    log.info("Start Jetty (original configured port={})", this.config.port);
-
     try {
       int port = reusePort && jettyPort != -1 ? jettyPort : this.config.port;
+      log.info("Start Jetty (configured port={}, binding port={})", this.config.port, port);
+
 
       // if started before, make a new server
       if (startedBefore) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
index 17a6ec3..957b321 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
@@ -133,7 +133,7 @@
 
   public void stopReplication() {
     if (replicationProcess != null) {
-      replicationProcess.shutdown();
+      replicationProcess.close();
     }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java
index b499d8d..6d0b8aa 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/InactiveMarkersPlanAction.java
@@ -37,6 +37,9 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE;
+
 /**
  * This plan simply removes nodeAdded and nodeLost markers from Zookeeper if their TTL has
  * expired. These markers are used by {@link NodeAddedTrigger} and {@link NodeLostTrigger} to
@@ -105,12 +108,14 @@
           log.trace(" -- ignore {}: either missing or unsupported format", markerPath);
           return;
         }
+        boolean activeMarker = payload.getOrDefault(MARKER_STATE, MARKER_ACTIVE)
+            .equals(MARKER_ACTIVE);
         long timestamp = ((Number)payload.get("timestamp")).longValue();
         long delta = TimeUnit.NANOSECONDS.toSeconds(currentTimeNs - timestamp);
-        if (delta > cleanupTTL) {
+        if (delta > cleanupTTL || !activeMarker) {
           try {
             stateManager.removeData(markerPath, -1);
-            log.trace(" -- remove {}, delta={}, ttl={}", markerPath, delta, cleanupTTL);
+            log.trace(" -- remove {}, delta={}, ttl={}, active={}", markerPath, delta, cleanupTTL, activeMarker);
             cleanedUp.add(m);
           } catch (NoSuchElementException nse) {
             // someone already removed it - ignore
@@ -121,7 +126,7 @@
             log.error("Marker znode should be empty but it's not! Ignoring {} ({})", markerPath, ne.toString());
           }
         } else {
-          log.trace(" -- keep {}, delta={}, ttl={}", markerPath, delta, cleanupTTL);
+          log.trace(" -- keep {}, delta={}, ttl={}, active={}", markerPath, delta, cleanupTTL, activeMarker);
         }
       } catch (InterruptedException e) {
         Thread.currentThread().interrupt();
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
index 6023f43..f32669c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
@@ -24,13 +24,13 @@
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
+import java.util.Locale;
 
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
index 6b87fc3..e150bf9 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeAddedTrigger.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.cloud.autoscaling;
 
+import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -36,10 +37,15 @@
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
+import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE;
 import static org.apache.solr.common.params.AutoScalingParams.PREFERRED_OP;
 import static org.apache.solr.common.params.AutoScalingParams.REPLICA_TYPE;
 
@@ -71,6 +77,16 @@
     try {
       List<String> added = stateManager.listData(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH);
       added.forEach(n -> {
+        String markerPath = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + n;
+        try {
+          Map<String, Object> markerData = Utils.getJson(stateManager, markerPath);
+          // skip inactive markers
+          if (markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE)) {
+            return;
+          }
+        } catch (InterruptedException | IOException | KeeperException e) {
+          log.debug("-- ignoring marker " + markerPath + " state due to error", e);
+        }
         // don't add nodes that have since gone away
         if (lastLiveNodes.contains(n) && !nodeNameVsTimeAdded.containsKey(n)) {
           // since {@code #restoreState(AutoScaling.Trigger)} is called first, the timeAdded for a node may also be restored
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
index 047db90..a1b9168 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/NodeLostTrigger.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.cloud.autoscaling;
 
+import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Collection;
@@ -36,10 +37,15 @@
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
+import org.apache.zookeeper.KeeperException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE;
 import static org.apache.solr.common.params.AutoScalingParams.PREFERRED_OP;
 
 /**
@@ -68,6 +74,16 @@
     try {
       List<String> lost = stateManager.listData(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH);
       lost.forEach(n -> {
+        String markerPath = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + n;
+        try {
+          Map<String, Object> markerData = Utils.getJson(stateManager, markerPath);
+          // skip inactive markers
+          if (markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE)) {
+            return;
+          }
+        } catch (InterruptedException | IOException | KeeperException e) {
+          log.debug("-- ignoring marker " + markerPath + " state due to error", e);
+        }
         // don't add nodes that have since came back
         if (!lastLiveNodes.contains(n) && !nodeNameVsTimeRemoved.containsKey(n)) {
           // since {@code #restoreState(AutoScaling.Trigger)} is called first, the timeRemoved for a node may also be restored
@@ -149,7 +165,9 @@
 
       Set<String> newLiveNodes = new HashSet<>(cloudManager.getClusterStateProvider().getLiveNodes());
       log.debug("Running NodeLostTrigger: {} with currently live nodes: {} and last live nodes: {}", name, newLiveNodes.size(), lastLiveNodes.size());
-
+      log.trace("Current Live Nodes for {}: {}", name, newLiveNodes);
+      log.trace("Last Live Nodes for {}: {}", name, lastLiveNodes);
+      
       // have any nodes that we were tracking been added to the cluster?
       // if so, remove them from the tracking map
       Set<String> trackingKeySet = nodeNameVsTimeRemoved.keySet();
@@ -191,6 +209,7 @@
             log.debug("NodeLostTrigger processor for lost nodes: {} is not ready, will try later", nodeNames);
           }
         } else  {
+          log.debug("NodeLostTrigger firing, but no processor - so removing lost nodes: {}", nodeNames);
           nodeNames.forEach(n -> {
             nodeNameVsTimeRemoved.remove(n);
           });
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java
index 5758627..a73743c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/OverseerTriggerThread.java
@@ -22,12 +22,14 @@
 import java.net.ConnectException;
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.List;
 import java.util.Map;
 import java.util.NoSuchElementException;
 import java.util.Set;
 import java.util.concurrent.locks.Condition;
 import java.util.concurrent.locks.ReentrantLock;
 
+import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.BadVersionException;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
@@ -55,6 +57,11 @@
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  public static final String MARKER_STATE = "state";
+  public static final String MARKER_ACTIVE = "active";
+  public static final String MARKER_INACTIVE = "inactive";
+
+
   private final SolrCloudManager cloudManager;
 
   private final CloudConfig cloudConfig;
@@ -252,20 +259,31 @@
           throw new IllegalStateException("Caught AlreadyClosedException from ScheduledTriggers, but we're not closed yet!", e);
         }
       }
-      log.debug("-- cleaning old nodeLost / nodeAdded markers");
-      removeMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH);
-      removeMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH);
+      log.debug("-- deactivating old nodeLost / nodeAdded markers");
+      deactivateMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH);
+      deactivateMarkers(ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH);
       processedZnodeVersion = znodeVersion;
     }
   }
 
-  private void removeMarkers(String path) {
+  private void deactivateMarkers(String path) {
+    DistribStateManager stateManager = cloudManager.getDistribStateManager();
     try {
-      cloudManager.getDistribStateManager().removeRecursively(path, true, false);
+      List<String> markers = stateManager.listData(path);
+      for (String marker : markers) {
+        String markerPath = path + "/" + marker;
+        try {
+          Map<String, Object> markerMap = new HashMap<>(Utils.getJson(stateManager, markerPath));
+          markerMap.put(MARKER_STATE, MARKER_INACTIVE);
+          stateManager.setData(markerPath, Utils.toJSON(markerMap), -1);
+        } catch (NoSuchElementException e) {
+          // ignore - already deleted
+        }
+      }
     } catch (NoSuchElementException e) {
       // ignore
     } catch (Exception e) {
-      log.warn("Error removing old markers", e);
+      log.warn("Error deactivating old markers", e);
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/core/BlobRepository.java b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
index ea2f6d7..59bd795 100644
--- a/solr/core/src/java/org/apache/solr/core/BlobRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
@@ -16,20 +16,17 @@
  */
 package org.apache.solr.core;
 
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
 import java.io.InputStream;
 import java.lang.invoke.MethodHandles;
+import java.math.BigInteger;
 import java.nio.ByteBuffer;
-import java.nio.file.Path;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collection;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
@@ -37,14 +34,10 @@
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.regex.Pattern;
 
-import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
 import org.apache.http.client.HttpClient;
 import org.apache.http.client.methods.HttpGet;
-import org.apache.lucene.util.IOUtils;
-import org.apache.solr.api.Api;
-import org.apache.solr.api.V2HttpCall;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
@@ -52,34 +45,24 @@
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionAdminParams;
-import org.apache.solr.common.params.CommonParams;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
-import org.apache.solr.handler.RequestHandlerBase;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.security.AuthorizationContext;
-import org.apache.solr.security.PermissionNameProvider;
 import org.apache.solr.util.SimplePostTool;
 import org.apache.zookeeper.server.ByteBufferInputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.SolrException.ErrorCode.BAD_REQUEST;
 import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
 import static org.apache.solr.common.SolrException.ErrorCode.SERVICE_UNAVAILABLE;
 import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
-import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM;
 
 /**
  * The purpose of this class is to store the Jars loaded in memory and to keep only one copy of the Jar in a single node.
  */
 public class BlobRepository {
-  private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtime.lib.size", String.valueOf(5 * 1024 * 1024)));
+  private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtme.lib.size", String.valueOf(5 * 1024 * 1024)));
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  static final Random RANDOM;
+  public static final Random RANDOM;
   static final Pattern BLOB_KEY_PATTERN_CHECKER = Pattern.compile(".*/\\d+");
 
   static {
@@ -105,14 +88,6 @@
     this.coreContainer = coreContainer;
   }
 
-  public Collection<String> getFiles() {
-    return Arrays.asList(getBlobsPath().toFile().list());
-  }
-
-  public Path getBlobsPath() {
-    return SolrResourceLoader.getBlobsDirPath(this.coreContainer.getResourceLoader().getInstancePath());
-  }
-
   // I wanted to {@link SolrCore#loadDecodeAndCacheBlob(String, Decoder)} below but precommit complains
 
   /**
@@ -141,12 +116,12 @@
     return getBlobIncRef(key.concat(decoder.getName()), () -> addBlob(key, decoder));
   }
 
-  BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha256) {
+  BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha512) {
     StringBuffer keyBuilder = new StringBuffer(key);
     if (decoder != null) keyBuilder.append(decoder.getName());
-    keyBuilder.append("/").append(sha256);
+    keyBuilder.append("/").append(sha512);
 
-    return getBlobIncRef(keyBuilder.toString(), () -> new BlobContent<>(key, fetchBlobAndVerify(key, url, sha256), decoder));
+    return getBlobIncRef(keyBuilder.toString(), () -> new BlobContent<>(key, fetchBlobAndVerify(key, url, sha512), decoder));
   }
 
   // do the actual work returning the appropriate type...
@@ -191,79 +166,33 @@
     return aBlob;
   }
 
-  static String INVALID_JAR_MSG = "Invalid jar from {0} , expected sha256 hash : {1} , actual : {2}";
+  static String INVALID_JAR_MSG = "Invalid jar from {0} , expected sha512 hash : {1} , actual : {2}";
 
-  private ByteBuffer fetchBlobAndVerify(String key, String url, String sha256) throws IOException {
-    ByteBuffer byteBuffer = null;
-    if (sha256 != null) {
-      byteBuffer = getFromLocalFs(sha256);
-    }
-    if (byteBuffer == null) byteBuffer = getAndValidate(key, url, sha256);
-    return byteBuffer;
-  }
-
-  private ByteBuffer getAndValidate(String key, String url, String sha256) throws IOException {
+  private ByteBuffer fetchBlobAndVerify(String key, String url, String sha512) {
     ByteBuffer byteBuffer = fetchFromUrl(key, url);
-    String computedDigest = sha256Digest(byteBuffer);
-    if (!computedDigest.equals(sha256)) {
-      throw new SolrException(SERVER_ERROR, StrUtils.formatString(INVALID_JAR_MSG, url, sha256, computedDigest));
-    }
-    File file = new File(getBlobsPath().toFile(), sha256);
-    try (FileOutputStream fos = new FileOutputStream(file)) {
-      fos.write(byteBuffer.array(), byteBuffer.arrayOffset(), byteBuffer.limit());
-      IOUtils.fsync(file.toPath(), false);
+    String computedDigest = sha512Digest(byteBuffer);
+    if (!computedDigest.equals(sha512)) {
+      throw new SolrException(SERVER_ERROR, StrUtils.formatString(INVALID_JAR_MSG, url, sha512, computedDigest));
+
     }
     return byteBuffer;
   }
 
-  public String putBlob(InputStream is) throws SolrException {
-    byte[] b = new byte[(int) MAX_JAR_SIZE + 1];
-    String sha256 = null;
+  public static String sha512Digest(ByteBuffer byteBuffer) {
+    MessageDigest digest = null;
     try {
-      int sz = is.read(b);
-
-      if (sz > MAX_JAR_SIZE)
-        throw new SolrException(BAD_REQUEST, "size is more than permitted , use system property runtime.lib.size to change it");
-      sha256 = sha256Digest(ByteBuffer.wrap(b, 0, sz));
-      File file = new File(getBlobsPath().toFile(), sha256);
-      try (FileOutputStream fos = new FileOutputStream(file)) {
-        fos.write(b, 0, sz);
-      }
-      IOUtils.fsync(file.toPath(), false);
-    } catch (IOException e) {
-      throw new SolrException(BAD_REQUEST, e);
+      digest = MessageDigest.getInstance("SHA-512");
+    } catch (NoSuchAlgorithmException e) {
+      //unlikely
+      throw new SolrException(SERVER_ERROR, e);
     }
-    return sha256;
-
+    digest.update(byteBuffer);
+    return String.format(
+        Locale.ROOT,
+        "%0128x",
+        new BigInteger(1, digest.digest()));
   }
 
-  private ByteBuffer getFromLocalFs(String sha256) throws IOException {
-    Path p = getBlobsPath();
-    File f = new File(p.toFile(), sha256);
-    if (!f.exists()) return null;
-    byte[] b = new byte[(int) f.length()];
-    try (FileInputStream fis = new FileInputStream(f)) {
-      fis.read(b);
-      ByteBuffer byteBuffer = ByteBuffer.wrap(b);
-      if (sha256.equals(sha256Digest(byteBuffer))) {
-        return byteBuffer;
-      } else {
-        return null;
-
-      }
-    }
-  }
-
-  public static String sha256Digest(ByteBuffer buf) {
-    try {
-      return DigestUtils.sha256Hex(new ByteBufferInputStream(ByteBuffer.wrap( buf.array(), buf.arrayOffset(), buf.limit())));
-    } catch (IOException e) {
-      throw new RuntimeException("Unable to compute sha256", e);
-    }
-  }
-
-
-
 
   /**
    * Package local for unit tests only please do not use elsewhere
@@ -285,14 +214,13 @@
       entity = response.getEntity();
       int statusCode = response.getStatusLine().getStatusCode();
       if (statusCode != 200) {
-        throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such resource available: " + key + ", url : " + url);
+        throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such blob or version available: " + key);
       }
 
       try (InputStream is = entity.getContent()) {
         b = SimplePostTool.inputStreamToByteArray(is, MAX_JAR_SIZE);
       }
     } catch (Exception e) {
-      log.error("Error loading resource " + url, e);
       if (e instanceof SolrException) {
         throw (SolrException) e;
       } else {
@@ -353,68 +281,6 @@
     }
   }
 
-  BlobRead blobRead = new BlobRead();
-
-
-  class BlobRead extends RequestHandlerBase implements PermissionNameProvider {
-
-
-    @Override
-    public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) {
-
-    }
-
-    @Override
-    public String getDescription() {
-      return "List fetch blobs";
-    }
-
-    @Override
-    public Name getPermissionName(AuthorizationContext request) {
-      return null;
-    }
-
-    @Override
-    public Collection<Api> getApis() {
-      return Collections.singleton(new Api(Utils.getSpec("node.blob.GET")) {
-        @Override
-        public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
-          String sha256 = ((V2HttpCall) req.getHttpSolrCall()).getUrlParts().get("sha256");
-          if (sha256 == null) {
-            rsp.add("blob", getFiles());
-          } else {
-            try {
-              ByteBuffer buf = getFromLocalFs(sha256);
-              if(buf == null){
-                throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "No such blob");
-              } else {
-                ModifiableSolrParams solrParams = new ModifiableSolrParams();
-                solrParams.add(CommonParams.WT, FILE_STREAM);
-                req.setParams( SolrParams.wrapDefaults(solrParams, req.getParams()));
-                rsp.add(FILE_STREAM, (SolrCore.RawWriter) os -> os.write(buf.array(), buf.arrayOffset(), buf.limit()));
-              }
-
-            } catch (IOException e) {
-              throw new SolrException(SERVER_ERROR,e);
-            }
-          }
-
-        }
-      });
-    }
-
-    @Override
-    public Boolean registerV1() {
-      return Boolean.FALSE;
-    }
-
-    @Override
-    public Boolean registerV2() {
-      return Boolean.TRUE;
-    }
-  }
-
-
   public static class BlobContent<T> {
     public final String key;
     private final T content; // holds byte buffer or cached object, holding both is a waste of memory
@@ -468,7 +334,7 @@
   public static class BlobContentRef<T> {
     public final BlobContent<T> blob;
 
-    public BlobContentRef(BlobContent<T> blob) {
+    private BlobContentRef(BlobContent<T> blob) {
       this.blob = blob;
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index ee4ed90..0d525b3 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -47,6 +47,7 @@
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.store.Directory;
+import org.apache.solr.api.AnnotatedApi;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
@@ -69,7 +70,6 @@
 import org.apache.solr.common.cloud.Replica;
 import org.apache.solr.common.cloud.Replica.State;
 import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CollectionAdminParams;
 import org.apache.solr.common.util.ExecutorUtil;
 import org.apache.solr.common.util.IOUtils;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
@@ -77,6 +77,7 @@
 import org.apache.solr.core.DirectoryFactory.DirContext;
 import org.apache.solr.core.backup.repository.BackupRepository;
 import org.apache.solr.core.backup.repository.BackupRepositoryFactory;
+import org.apache.solr.filestore.PackageStoreAPI;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.handler.SnapShooter;
 import org.apache.solr.handler.admin.AutoscalingHistoryHandler;
@@ -221,7 +222,7 @@
 
   protected volatile AutoscalingHistoryHandler autoscalingHistoryHandler;
 
-  private final PackageManager clusterPropertiesListener = new PackageManager(this);
+  private PackageStoreAPI packageStoreAPI;
 
 
   // Bits for the state variable.
@@ -609,6 +610,10 @@
       }
     }
 
+    packageStoreAPI = new PackageStoreAPI(this);
+    containerHandlers.getApiBag().register(new AnnotatedApi(packageStoreAPI.readAPI), Collections.EMPTY_MAP);
+    containerHandlers.getApiBag().register(new AnnotatedApi(packageStoreAPI.writeAPI), Collections.EMPTY_MAP);
+
     metricManager = new SolrMetricManager(loader, cfg.getMetricsConfig());
 
     coreContainerWorkExecutor = MetricUtils.instrumentedExecutorService(
@@ -634,7 +639,6 @@
 
     zkSys.initZooKeeper(this, solrHome, cfg.getCloudConfig());
     if (isZooKeeperAware()) {
-      getZkController().getZkStateReader().registerClusterPropertiesListener(clusterPropertiesListener);
       pkiAuthenticationPlugin = new PKIAuthenticationPlugin(this, zkSys.getZkController().getNodeName(),
           (PublicKeyHandler) containerHandlers.get(PublicKeyHandler.PATH));
       pkiAuthenticationPlugin.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "/authentication/pki");
@@ -647,8 +651,6 @@
     reloadSecurityProperties();
     this.backupRepoFactory = new BackupRepositoryFactory(cfg.getBackupRepositoryPlugins());
 
-    containerHandlers.put("/ext", clusterPropertiesListener.extHandler);
-    containerHandlers.put("/blob-get", blobRepository.blobRead);
     createHandler(ZK_PATH, ZookeeperInfoHandler.class.getName(), ZookeeperInfoHandler.class);
     createHandler(ZK_STATUS_PATH, ZookeeperStatusHandler.class.getName(), ZookeeperStatusHandler.class);
     collectionsHandler = createHandler(COLLECTIONS_HANDLER_PATH, cfg.getCollectionsHandlerClass(), CollectionsHandler.class);
@@ -850,7 +852,7 @@
         name = "localhost";
       }
       cloudManager = null;
-      client = new EmbeddedSolrServer(this, CollectionAdminParams.SYSTEM_COLL) {
+      client = new EmbeddedSolrServer(this, null) {
         @Override
         public void close() throws IOException {
           // do nothing - we close the container ourselves
@@ -1550,7 +1552,7 @@
       } catch (SolrCoreState.CoreIsClosedException e) {
         throw e;
       } catch (Exception e) {
-        coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e));
+        coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, (Exception) e));
         throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e);
       } finally {
         if (!success && newCore != null && newCore.getOpenCount() > 0) {
@@ -1795,14 +1797,6 @@
     return handler;
   }
 
-  public PluginBag<SolrRequestHandler> getContainerHandlers() {
-    return containerHandlers;
-  }
-
-  public PackageManager getPackageManager(){
-    return clusterPropertiesListener;
-  }
-
   public CoreAdminHandler getMultiCoreHandler() {
     return coreAdminHandler;
   }
diff --git a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
index 997e0b4..cf6bb4d 100644
--- a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
@@ -26,7 +26,6 @@
 import java.security.ProtectionDomain;
 import java.security.cert.Certificate;
 import java.util.ArrayList;
-import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -44,28 +43,20 @@
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private boolean allJarsLoaded = false;
   private final SolrResourceLoader parentLoader;
-  private List<RuntimeLib> libs = new ArrayList<>();
+  private List<PluginBag.RuntimeLib> libs = new ArrayList<>();
   private Map<String, Class> classCache = new HashMap<>();
   private List<String> errors = new ArrayList<>();
 
 
-  public MemClassLoader(List<RuntimeLib> libs, SolrResourceLoader resourceLoader) {
+  public MemClassLoader(List<PluginBag.RuntimeLib> libs, SolrResourceLoader resourceLoader) {
     this.parentLoader = resourceLoader;
     this.libs = libs;
   }
 
-  public int getZnodeVersion(){
-    int result = -1;
-    for (RuntimeLib lib : libs) {
-      if(lib.znodeVersion > result) result = lib.znodeVersion;
-    }
-    return result;
-  }
-
   synchronized void loadRemoteJars() {
     if (allJarsLoaded) return;
     int count = 0;
-    for (RuntimeLib lib : libs) {
+    for (PluginBag.RuntimeLib lib : libs) {
       if (lib.getUrl() != null) {
         try {
           lib.loadJar();
@@ -79,13 +70,10 @@
     if (count == libs.size()) allJarsLoaded = true;
   }
 
-  public Collection<String> getErrors(){
-    return errors;
-  }
   public synchronized void loadJars() {
     if (allJarsLoaded) return;
 
-    for (RuntimeLib lib : libs) {
+    for (PluginBag.RuntimeLib lib : libs) {
       try {
         lib.loadJar();
         lib.verify();
@@ -145,7 +133,7 @@
 
     String path = name.replace('.', '/').concat(".class");
     ByteBuffer buf = null;
-    for (RuntimeLib lib : libs) {
+    for (PluginBag.RuntimeLib lib : libs) {
       try {
         buf = lib.getFileContent(path);
         if (buf != null) {
@@ -162,7 +150,7 @@
 
   @Override
   public void close() throws Exception {
-    for (RuntimeLib lib : libs) {
+    for (PluginBag.RuntimeLib lib : libs) {
       try {
         lib.close();
       } catch (Exception e) {
@@ -188,7 +176,6 @@
     try {
       return findClass(cname).asSubclass(expectedType);
     } catch (Exception e) {
-      log.error("Error loading class from runtime libs ", e);
       if (e instanceof SolrException) {
         throw (SolrException) e;
       } else {
diff --git a/solr/core/src/java/org/apache/solr/core/PackageManager.java b/solr/core/src/java/org/apache/solr/core/PackageManager.java
deleted file mode 100644
index 7eb00a5..0000000
--- a/solr/core/src/java/org/apache/solr/core/PackageManager.java
+++ /dev/null
@@ -1,370 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.LinkedHashMap;
-import java.util.Map;
-import java.util.Objects;
-import java.util.stream.Collectors;
-
-import org.apache.lucene.analysis.util.ResourceLoader;
-import org.apache.solr.api.Api;
-import org.apache.solr.api.V2HttpCall;
-import org.apache.solr.common.MapWriter;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.ClusterPropertiesListener;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CoreAdminParams;
-import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.handler.RequestHandlerBase;
-import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.request.SolrRequestHandler;
-import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.schema.FieldType;
-import org.apache.solr.security.AuthorizationContext;
-import org.apache.solr.security.PermissionNameProvider;
-import org.apache.solr.util.plugin.PluginInfoInitialized;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.apache.solr.common.params.CommonParams.NAME;
-import static org.apache.solr.common.params.CommonParams.PACKAGE;
-import static org.apache.solr.common.params.CommonParams.VERSION;
-import static org.apache.solr.core.RuntimeLib.SHA256;
-
-public class PackageManager implements ClusterPropertiesListener {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  private final CoreContainer coreContainer;
-
-  private Map<String, Package> pkgs = new HashMap<>();
-
-  final ExtHandler extHandler;
-  private int myversion = -1;
-
-  public int getZNodeVersion(String pkg) {
-    Package p = pkgs.get(pkg);
-    return p == null ? -1 : p.lib.getZnodeVersion();
-  }
-  public RuntimeLib getLib(String name){
-    Package p = pkgs.get(name);
-    return p == null? null: p.lib;
-  }
-
-  static class Package implements MapWriter {
-    final RuntimeLib lib;
-    final MemClassLoader loader;
-    final String name;
-
-    @Override
-    public void writeMap(EntryWriter ew) throws IOException {
-      lib.writeMap(ew);
-    }
-
-    Package(RuntimeLib lib, MemClassLoader loader, int zkVersion, String name) {
-      this.lib = lib;
-      this.loader = loader;
-      this.name = name;
-    }
-
-    public String getName() {
-      return name;
-    }
-
-
-    public boolean isModified(Map map) {
-      return (!Objects.equals(lib.getSha256(), (map).get(SHA256)) ||
-          !Objects.equals(lib.getSig(), (map).get(SHA256)));
-    }
-  }
-
-  PackageManager(CoreContainer coreContainer) {
-    this.coreContainer = coreContainer;
-    extHandler = new ExtHandler(this);
-  }
-
-
-  public <T> T newInstance(String cName, Class<T> expectedType, String pkg) {
-    try {
-      return coreContainer.getResourceLoader().newInstance(cName, expectedType,
-          null, new Class[]{CoreContainer.class}, new Object[]{coreContainer});
-    } catch (SolrException e) {
-      Package p = pkgs.get(pkg);
-
-      if (p != null) {
-        try {
-          Class<? extends T> klas = p.loader.findClass(cName, expectedType);
-          try {
-            return klas.getConstructor(CoreContainer.class).newInstance(coreContainer);
-          } catch (NoSuchMethodException ex) {
-            return klas.getConstructor().newInstance();
-          }
-        } catch (Exception ex) {
-          if (!p.loader.getErrors().isEmpty()) {
-            //some libraries were no loaded due to some errors. May the class was there in those libraries
-            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                "There were errors loading some libraries: " + StrUtils.join(p.loader.getErrors(), ','), ex);
-          }
-          //there were no errors in loading any libraries. The class was probably not suppoed to be there in those libraries
-          // so throw the original exception
-          throw e;
-        }
-      } else {
-        throw e;
-      }
-    }
-  }
-
-  @Override
-  public boolean onChange(Map<String, Object> properties) {
-    log.info("clusterprops.json changed , version {}", coreContainer.getZkController().getZkStateReader().getClusterPropsVersion());
-    int v = coreContainer.getZkController().getZkStateReader().getClusterPropsVersion();
-    boolean modified = updatePackages(properties, v);
-    extHandler.updateReqHandlers(properties, modified);
-    for (SolrCore core : coreContainer.solrCores.getCores()) {
-      pkgs.forEach((s, pkg) -> core.packageUpdated(pkg.lib));
-    }
-    myversion = v;
-    return false;
-  }
-
-
-  private boolean updatePackages(Map<String, Object> properties, int ver) {
-    Map m = (Map) properties.getOrDefault(PACKAGE, Collections.emptyMap());
-    if (pkgs.isEmpty() && m.isEmpty()) return false;
-    boolean[] needsReload = new boolean[1];
-    if (m.size() == pkgs.size()) {
-      m.forEach((k, v) -> {
-        if (v instanceof Map) {
-          Package pkg = pkgs.get(k);
-          if (pkg == null || pkg.isModified((Map) v)) {
-            needsReload[0] = true;
-          }
-        }
-      });
-    } else {
-      needsReload[0] = true;
-    }
-    if (needsReload[0]) {
-      createNewClassLoaders(m, ver);
-    }
-    return needsReload[0];
-  }
-
-  public ResourceLoader getResourceLoader(String pkg) {
-    Package p = pkgs.get(pkg);
-    return p == null ? coreContainer.getResourceLoader() : p.loader;
-  }
-
-  void createNewClassLoaders(Map m, int ver) {
-    boolean[] loadedAll = new boolean[1];
-    loadedAll[0] = true;
-    Map<String, Package> newPkgs = new LinkedHashMap<>();
-    m.forEach((k, v) -> {
-      if (v instanceof Map) {
-        Map map = new HashMap((Map) v);
-        map.put(CoreAdminParams.NAME, String.valueOf(k));
-        String name = (String) k;
-        Package existing = pkgs.get(name);
-        if (existing != null && !existing.isModified(map)) {
-          //this package has not changed
-          newPkgs.put(name, existing);
-        }
-
-        RuntimeLib lib = new RuntimeLib(coreContainer);
-        lib.znodeVersion = ver;
-        try {
-          lib.init(new PluginInfo(RuntimeLib.TYPE, map));
-          if (lib.getUrl() == null) {
-            log.error("Unable to initialize runtimeLib : " + Utils.toJSONString(v));
-            loadedAll[0] = false;
-          }
-          lib.loadJar();
-
-          newPkgs.put(name, new Package(lib,
-              new MemClassLoader(Collections.singletonList(lib), coreContainer.getResourceLoader()),
-              ver, name));
-        } catch (Exception e) {
-          log.error("error loading a runtimeLib " + Utils.toJSONString(v), e);
-          loadedAll[0] = false;
-
-        }
-      }
-    });
-
-    if (loadedAll[0]) {
-      log.info("Libraries changed. New memclassloader created with jars {}",
-          newPkgs.values().stream().map(it -> it.lib.getUrl()).collect(Collectors.toList()));
-      this.pkgs = newPkgs;
-
-    }
-  }
-
-  static class ExtHandler extends RequestHandlerBase implements PermissionNameProvider {
-    final PackageManager packageManager;
-
-    private Map<String, Handler> customHandlers = new HashMap<>();
-
-    ExtHandler(PackageManager packageManager) {
-      this.packageManager = packageManager;
-    }
-
-
-    @Override
-    public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) {
-      int v = req.getParams().getInt(ConfigOverlay.ZNODEVER, -1);
-      if (v >= 0) {
-        log.debug("expected version : {} , my version {}", v, packageManager.myversion);
-        ZkStateReader zkStateReader = packageManager.coreContainer.getZkController().getZkStateReader();
-        try {
-          zkStateReader.forceRefreshClusterProps(v);
-        } catch (SolrException e) {
-          log.error("Error refreshing state ", e);
-          throw e;
-        }
-      }
-      rsp.add("metadata", (MapWriter) ew -> ew.putIfNotNull(VERSION,
-          packageManager.coreContainer.getZkController().zkStateReader.getClusterPropsVersion()));
-      rsp.add(RuntimeLib.TYPE, packageManager.pkgs.values());
-      rsp.add(SolrRequestHandler.TYPE, customHandlers.values());
-
-    }
-
-    @Override
-    public Collection<Api> getApis() {
-      return Collections.singleton(new Api(Utils.getSpec("node.ext")) {
-        @Override
-        public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
-          String name = ((V2HttpCall) req.getHttpSolrCall()).getUrlParts().get("handlerName");
-          if (name == null) {
-            handleRequestBody(req, rsp);
-            return;
-          }
-          Handler handler = customHandlers.get(name);
-          if (handler == null) {
-            String err = StrUtils.formatString(" No such handler: {0}, available handlers : {1}", name, customHandlers.keySet());
-            log.error(err);
-            throw new SolrException(SolrException.ErrorCode.NOT_FOUND, err);
-          }
-          handler.handler.handleRequest(req, rsp);
-        }
-      });
-    }
-
-    private void updateReqHandlers(Map<String, Object> properties, boolean forceReload) {
-      Map m = (Map) properties.getOrDefault(SolrRequestHandler.TYPE, Collections.emptyMap());
-      if (m.isEmpty() && customHandlers.isEmpty()) return;
-      boolean hasChanged = true;
-      if (customHandlers.size() == m.size() && customHandlers.keySet().containsAll(m.keySet())) hasChanged = false;
-      if (forceReload || hasChanged) {
-        log.debug("RequestHandlers being reloaded : {}", m.keySet());
-        Map<String, Handler> newCustomHandlers = new HashMap<>();
-        m.forEach((k, v) -> {
-          if (v instanceof Map) {
-            Map metaData = (Map) v;
-            Handler existing = customHandlers.get(k);
-            String name = (String) k;
-            if (existing == null || existing.shouldReload(metaData, packageManager.pkgs)) {
-              String klas = (String) metaData.get(FieldType.CLASS_NAME);
-              if (klas != null) {
-                String pkg = (String) metaData.get(PACKAGE);
-                SolrRequestHandler inst = packageManager.newInstance(klas, SolrRequestHandler.class, pkg);
-                if (inst instanceof PluginInfoInitialized) {
-                  ((PluginInfoInitialized) inst).init(new PluginInfo(SolrRequestHandler.TYPE, metaData));
-                }
-                Package p = packageManager.pkgs.get(pkg);
-                newCustomHandlers.put(name, new Handler(inst, pkg, p == null ? -1 : p.lib.getZnodeVersion(), metaData, name));
-              } else {
-                log.error("Invalid requestHandler {}", Utils.toJSONString(v));
-              }
-
-            } else {
-              newCustomHandlers.put(name, existing);
-            }
-
-          } else {
-            log.error("Invalid data for requestHandler : {} , {}", k, v);
-          }
-        });
-
-        log.debug("Registering request handlers {} ", newCustomHandlers.keySet());
-        Map<String, Handler> old = customHandlers;
-        customHandlers = newCustomHandlers;
-        old.forEach((s, h) -> PluginBag.closeQuietly(h));
-      }
-    }
-
-    @Override
-    public String getDescription() {
-      return "Custom Handlers";
-    }
-
-
-    @Override
-    public Boolean registerV1() {
-      return Boolean.FALSE;
-    }
-
-    @Override
-    public Boolean registerV2() {
-      return Boolean.TRUE;
-    }
-
-    @Override
-    public Name getPermissionName(AuthorizationContext request) {
-      if (request.getResource().endsWith("/node/ext")) return Name.COLL_READ_PERM;
-      return Name.CUSTOM_PERM;
-    }
-
-    static class Handler implements MapWriter {
-      final SolrRequestHandler handler;
-      final String pkg;
-      final int zkversion;
-      final Map meta;
-      final String name;
-
-      @Override
-      public void writeMap(EntryWriter ew) throws IOException {
-        ew.put(NAME, name);
-        ew.put(ConfigOverlay.ZNODEVER, zkversion);
-        meta.forEach(ew.getBiConsumer());
-      }
-
-      Handler(SolrRequestHandler handler, String pkg, int version, Map meta, String name) {
-        this.handler = handler;
-        this.pkg = pkg;
-        this.zkversion = version;
-        this.meta = Utils.getDeepCopy(meta, 3);
-        this.name = name;
-      }
-
-      public boolean shouldReload(Map metaData, Map<String, Package> pkgs) {
-        Package p = pkgs.get(pkg);
-        //the metadata is same and the package has not changed since we last loaded
-        return !meta.equals(metaData) || p == null || p.lib.getZnodeVersion() > zkversion;
-      }
-    }
-  }
-
-}
diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java
index a9ea65b..6088f52 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginBag.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java
@@ -16,8 +16,11 @@
  */
 package org.apache.solr.core;
 
+import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -27,21 +30,24 @@
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
 
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
 import org.apache.solr.api.ApiSupport;
-import org.apache.solr.common.MapWriter;
+import org.apache.solr.cloud.CloudUtil;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.update.processor.UpdateRequestProcessorChain;
 import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
+import org.apache.solr.util.CryptoKeys;
+import org.apache.solr.util.SimplePostTool;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 import org.apache.solr.util.plugin.PluginInfoInitialized;
 import org.apache.solr.util.plugin.SolrCoreAware;
@@ -50,6 +56,7 @@
 
 import static java.util.Collections.singletonMap;
 import static org.apache.solr.api.ApiBag.HANDLER_NAME;
+import static org.apache.solr.common.params.CommonParams.NAME;
 
 /**
  * This manages the lifecycle of a set of plugin of the same type .
@@ -117,36 +124,24 @@
     return result;
   }
 
-  private static <T> T createInitInstance(PluginInfo pluginInfo, SolrConfig.SolrPluginInfo pluginMeta,
-                                          SolrCore core, ResourceLoader resourceLoader,
-                                          boolean isRuntimeLib) {
-    T localInst = null;
-    try {
-      localInst = (T) SolrCore.createInstance(pluginInfo.className, pluginMeta.clazz, pluginMeta.getCleanTag(), core, resourceLoader);
-    } catch (SolrException e) {
-      if (isRuntimeLib && !(resourceLoader instanceof MemClassLoader)) {
-        throw new SolrException(SolrException.ErrorCode.getErrorCode(e.code()),
-            e.getMessage() + ". runtime library loading is not enabled, start Solr with -Denable.runtime.lib=true",
-            e.getCause());
-      }
-      throw e;
+  public PluginHolder<T> createPlugin(PluginInfo info) {
+    if ("true".equals(String.valueOf(info.attributes.get("runtimeLib")))) {
+      log.debug(" {} : '{}'  created with runtimeLib=true ", meta.getCleanTag(), info.name);
+      LazyPluginHolder<T> holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ?
+          core.getMemClassLoader() :
+          core.getResourceLoader(), true);
 
-
+      return meta.clazz == UpdateRequestProcessorFactory.class ?
+          (PluginHolder<T>) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder(holder) :
+          holder;
+    } else if ("lazy".equals(info.attributes.get("startup")) && meta.options.contains(SolrConfig.PluginOpts.LAZY)) {
+      log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name);
+      return new LazyPluginHolder<T>(meta, info, core, core.getResourceLoader(), false);
+    } else {
+      T inst = core.createInstance(info.className, (Class<T>) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader());
+      initInstance(inst, info);
+      return new PluginHolder<>(info, inst);
     }
-    initInstance(localInst, pluginInfo);
-    if (localInst instanceof SolrCoreAware) {
-      SolrResourceLoader.assertAwareCompatibility(SolrCoreAware.class, localInst);
-      ((SolrCoreAware) localInst).inform(core);
-    }
-    if (localInst instanceof ResourceLoaderAware) {
-      SolrResourceLoader.assertAwareCompatibility(ResourceLoaderAware.class, localInst);
-      try {
-        ((ResourceLoaderAware) localInst).inform(core.getResourceLoader());
-      } catch (IOException e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error initializing component", e);
-      }
-    }
-    return localInst;
   }
 
   /** make a plugin available in an alternate name. This is an internal API and not for public use
@@ -200,7 +195,7 @@
     return old == null ? null : old.get();
   }
 
-  public PluginHolder<T> put(String name, PluginHolder<T> plugin) {
+  PluginHolder<T> put(String name, PluginHolder<T> plugin) {
     Boolean registerApi = null;
     Boolean disableHandler = null;
     if (plugin.pluginInfo != null) {
@@ -329,60 +324,13 @@
     }
   }
 
-  public static void closeQuietly(Object inst)  {
-    try {
-      if (inst != null && inst instanceof AutoCloseable) ((AutoCloseable) inst).close();
-    } catch (Exception e) {
-      log.error("Error closing "+ inst , e);
-    }
-  }
-
-  public PluginHolder<T> createPlugin(PluginInfo info) {
-    String pkg = info.attributes.get(CommonParams.PACKAGE);
-    if (pkg != null) {
-      log.debug(" {} : '{}'  created with package={} ", meta.getCleanTag(), info.name, pkg);
-      PluginHolder<T> holder = new PackagePluginHolder<T>(info, core, meta);
-      return meta.clazz == UpdateRequestProcessorFactory.class ?
-          (PluginHolder<T>) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder((PluginHolder<UpdateRequestProcessorFactory>) holder) :
-          holder;
-
-    } else if (info.isRuntimePlugin()) {
-      log.debug(" {} : '{}'  created with runtimeLib=true ", meta.getCleanTag(), info.name);
-      LazyPluginHolder<T> holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ?
-          core.getMemClassLoader() :
-          core.getResourceLoader(), true);
-
-      return meta.clazz == UpdateRequestProcessorFactory.class ?
-          (PluginHolder<T>) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder((PluginHolder<UpdateRequestProcessorFactory>) holder) :
-          holder;
-    } else if ("lazy".equals(info.attributes.get("startup")) && meta.options.contains(SolrConfig.PluginOpts.LAZY)) {
-      log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name);
-      return new LazyPluginHolder<T>(meta, info, core, core.getResourceLoader(), false);
-    } else {
-      T inst = SolrCore.createInstance(info.className, (Class<T>) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader());
-      initInstance(inst, info);
-      return new PluginHolder<>(info, inst);
-    }
-  }
-
-  public Api v2lookup(String path, String method, Map<String, String> parts) {
-    if (apiBag == null) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "this should not happen, looking up for v2 API at the wrong place");
-    }
-    return apiBag.lookup(path, method, parts);
-  }
-
-  public ApiBag getApiBag() {
-    return apiBag;
-  }
-
   /**
    * An indirect reference to a plugin. It just wraps a plugin instance.
    * subclasses may choose to lazily load the plugin
    */
   public static class PluginHolder<T> implements AutoCloseable {
+    private T inst;
     protected final PluginInfo pluginInfo;
-    T inst;
     boolean registerAPI = false;
 
     public PluginHolder(PluginInfo info) {
@@ -410,7 +358,7 @@
       // can close() be called concurrently with other methods?
       if (isLoaded()) {
         T myInst = get();
-        closeQuietly(myInst);
+        if (myInst != null && myInst instanceof AutoCloseable) ((AutoCloseable) myInst).close();
       }
     }
 
@@ -478,62 +426,209 @@
         MemClassLoader loader = (MemClassLoader) resourceLoader;
         loader.loadJars();
       }
-      lazyInst = createInitInstance(pluginInfo,pluginMeta,core,resourceLoader, isRuntimeLib);
+      Class<T> clazz = (Class<T>) pluginMeta.clazz;
+      T localInst = null;
+      try {
+        localInst = core.createInstance(pluginInfo.className, clazz, pluginMeta.getCleanTag(), null, resourceLoader);
+      } catch (SolrException e) {
+        if (isRuntimeLib && !(resourceLoader instanceof MemClassLoader)) {
+          throw new SolrException(SolrException.ErrorCode.getErrorCode(e.code()),
+              e.getMessage() + ". runtime library loading is not enabled, start Solr with -Denable.runtime.lib=true",
+              e.getCause());
+        }
+        throw e;
+
+
+      }
+      initInstance(localInst, pluginInfo);
+      if (localInst instanceof SolrCoreAware) {
+        SolrResourceLoader.assertAwareCompatibility(SolrCoreAware.class, localInst);
+        ((SolrCoreAware) localInst).inform(core);
+      }
+      if (localInst instanceof ResourceLoaderAware) {
+        SolrResourceLoader.assertAwareCompatibility(ResourceLoaderAware.class, localInst);
+        try {
+          ((ResourceLoaderAware) localInst).inform(core.getResourceLoader());
+        } catch (IOException e) {
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error initializing component", e);
+        }
+      }
+      lazyInst = localInst;  // only assign the volatile until after the plugin is completely ready to use
       return true;
     }
 
 
   }
 
-  public class PackagePluginHolder<T> extends PluginHolder<T> {
-    private final SolrCore core;
-    private final SolrConfig.SolrPluginInfo pluginMeta;
-    private final PackageManager packageManager;
-    private final String pkg;
-    private RuntimeLib runtimeLib;
+  /**
+   * This represents a Runtime Jar. A jar requires two details , name and version
+   */
+  public static class RuntimeLib implements PluginInfoInitialized, AutoCloseable {
+    private String name, version, sig, sha512, url;
+    private BlobRepository.BlobContentRef<ByteBuffer> jarContent;
+    private final CoreContainer coreContainer;
+    private boolean verified = false;
 
-    public PackagePluginHolder(PluginInfo info, SolrCore core, SolrConfig.SolrPluginInfo pluginMeta) {
-      super(info);
-      this.core = core;
-      this.pluginMeta = pluginMeta;
-      this.pkg = info.attributes.get(CommonParams.PACKAGE);
-      this.core.addPackageListener(new SolrCore.PkgListener() {
-        @Override
-        public String packageName() {
-          return pkg;
+    @Override
+    public void init(PluginInfo info) {
+      name = info.attributes.get(NAME);
+      url = info.attributes.get("url");
+      sig = info.attributes.get("sig");
+      if(url == null) {
+        Object v = info.attributes.get("version");
+        if (name == null || v == null) {
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version");
         }
-
-        @Override
-        public PluginInfo pluginInfo() {
-          return info;
+        version = String.valueOf(v);
+      } else {
+        sha512 = info.attributes.get("sha512");
+        if(sha512 == null){
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "runtimeLib with url must have a 'sha512' attribute");
         }
+        ByteBuffer buf = null;
+        buf = coreContainer.getBlobRepository().fetchFromUrl(name, url);
 
-        @Override
-        public MapWriter lib() {
-          return runtimeLib;
+        String digest = BlobRepository.sha512Digest(buf);
+        if(!sha512.equals(digest))  {
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha512, digest)  );
         }
+        log.info("dynamic library verified {}, sha512: {}", url, sha512);
 
-        @Override
-        public void changed(RuntimeLib lib) {
-          int myVersion = runtimeLib == null? -1 : runtimeLib.znodeVersion;
-          if(lib.getZnodeVersion() > myVersion) reload();
-        }
-      });
-      this.packageManager = core.getCoreContainer().getPackageManager();
-      reload();
-    }
-
-
-    private void reload() {
-      if(inst == null) log.info("reloading plugin {} ", pluginInfo.name);
-      inst = createInitInstance(pluginInfo, pluginMeta,
-          core, packageManager.getResourceLoader(this.pkg), true);
-      this.runtimeLib = packageManager.getLib(pkg);
+      }
 
     }
 
+    public RuntimeLib(SolrCore core) {
+      coreContainer = core.getCoreContainer();
+    }
 
+    public String getUrl(){
+      return url;
+    }
+
+    void loadJar() {
+      if (jarContent != null) return;
+      synchronized (this) {
+        if (jarContent != null) return;
+
+        jarContent = url == null?
+            coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version):
+            coreContainer.getBlobRepository().getBlobIncRef(name, null,url,sha512);
+
+      }
+    }
+
+    public static boolean isEnabled() {
+      return Boolean.getBoolean("enable.runtime.lib");
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public String getVersion() {
+      return version;
+    }
+
+    public String getSig() {
+      return sig;
+
+    }
+
+    public ByteBuffer getFileContent(String entryName) throws IOException {
+      if (jarContent == null)
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name  );
+      return getFileContent(jarContent.blob, entryName);
+
+    }
+
+    public ByteBuffer getFileContent(BlobRepository.BlobContent<ByteBuffer> blobContent,  String entryName) throws IOException {
+      ByteBuffer buff = blobContent.get();
+      ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit());
+      ZipInputStream zis = new ZipInputStream(zipContents);
+      try {
+        ZipEntry entry;
+        while ((entry = zis.getNextEntry()) != null) {
+          if (entryName == null || entryName.equals(entry.getName())) {
+            SimplePostTool.BAOS out = new SimplePostTool.BAOS();
+            byte[] buffer = new byte[2048];
+            int size;
+            while ((size = zis.read(buffer, 0, buffer.length)) != -1) {
+              out.write(buffer, 0, size);
+            }
+            out.close();
+            return out.getByteBuffer();
+          }
+        }
+      } finally {
+        zis.closeEntry();
+      }
+      return null;
+    }
+
+
+    @Override
+    public void close() throws Exception {
+      if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent);
+    }
+
+    public static List<RuntimeLib> getLibObjects(SolrCore core, List<PluginInfo> libs) {
+      List<RuntimeLib> l = new ArrayList<>(libs.size());
+      for (PluginInfo lib : libs) {
+        RuntimeLib rtl = new RuntimeLib(core);
+        try {
+          rtl.init(lib);
+        } catch (Exception e) {
+          log.error("error loading runtime library", e);
+        }
+        l.add(rtl);
+      }
+      return l;
+    }
+
+    public void verify() throws Exception {
+      if (verified) return;
+      if (jarContent == null) {
+        log.error("Calling verify before loading the jar");
+        return;
+      }
+
+      if (!coreContainer.isZooKeeperAware())
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud");
+      Map<String, byte[]> keys = CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe");
+      if (keys.isEmpty()) {
+        if (sig == null) {
+          verified = true;
+          return;
+        } else {
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib  " + name);
+        }
+      } else if (sig == null) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name));
+      }
+
+      try {
+        String matchedKey = new CryptoKeys(keys).verify(sig, jarContent.blob.get());
+        if (matchedKey == null)
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version);
+        log.info("Jar {} signed with {} successfully verified", name, matchedKey);
+      } catch (Exception e) {
+        if (e instanceof SolrException) throw e;
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e);
+      }
+    }
   }
 
 
+  public Api v2lookup(String path, String method, Map<String, String> parts) {
+    if (apiBag == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "this should not happen, looking up for v2 API at the wrong place");
+    }
+    return apiBag.lookup(path, method, parts);
+  }
+
+  public ApiBag getApiBag() {
+    return apiBag;
+  }
+
 }
diff --git a/solr/core/src/java/org/apache/solr/core/PluginInfo.java b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
index e25bd92..1bc85ae 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginInfo.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
@@ -16,23 +16,14 @@
  */
 package org.apache.solr.core;
 
-import java.lang.invoke.MethodHandles;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.LinkedHashMap;
-import java.util.List;
-import java.util.Map;
-
 import org.apache.solr.common.MapSerializable;
-import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.util.DOMUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
+import java.util.*;
+
 import static java.util.Arrays.asList;
 import static java.util.Collections.unmodifiableList;
 import static java.util.Collections.unmodifiableMap;
@@ -40,26 +31,23 @@
 import static org.apache.solr.schema.FieldType.CLASS_NAME;
 
 /**
- * An Object which represents a Plugin of any type
+ * An Object which represents a Plugin of any type 
+ *
  */
 public class PluginInfo implements MapSerializable {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
   public final String name, className, type;
   public final NamedList initArgs;
   public final Map<String, String> attributes;
   public final List<PluginInfo> children;
   private boolean isFromSolrConfig;
 
-  public List<String> pathInConfig;
-
   public PluginInfo(String type, Map<String, String> attrs, NamedList initArgs, List<PluginInfo> children) {
     this.type = type;
     this.name = attrs.get(NAME);
     this.className = attrs.get(CLASS_NAME);
     this.initArgs = initArgs;
     attributes = unmodifiableMap(attrs);
-    this.children = children == null ? Collections.emptyList() : unmodifiableList(children);
+    this.children = children == null ? Collections.<PluginInfo>emptyList(): unmodifiableList(children);
     isFromSolrConfig = false;
   }
 
@@ -74,7 +62,7 @@
     isFromSolrConfig = true;
   }
 
-  public PluginInfo(String type, Map<String, Object> map) {
+  public PluginInfo(String type, Map<String,Object> map) {
     LinkedHashMap m = new LinkedHashMap<>(map);
     initArgs = new NamedList();
     for (Map.Entry<String, Object> entry : map.entrySet()) {
@@ -99,7 +87,7 @@
     this.name = (String) m.get(NAME);
     this.className = (String) m.get(CLASS_NAME);
     attributes = unmodifiableMap(m);
-    this.children = Collections.emptyList();
+    this.children =  Collections.<PluginInfo>emptyList();
     isFromSolrConfig = true;
   }
 
@@ -114,7 +102,7 @@
       PluginInfo pluginInfo = new PluginInfo(nd, null, false, false);
       if (pluginInfo.isEnabled()) children.add(pluginInfo);
     }
-    return children.isEmpty() ? Collections.emptyList() : unmodifiableList(children);
+    return children.isEmpty() ? Collections.<PluginInfo>emptyList() : unmodifiableList(children);
   }
 
   @Override
@@ -129,37 +117,37 @@
     return sb.toString();
   }
 
-  public boolean isEnabled() {
+  public boolean isEnabled(){
     String enable = attributes.get("enable");
-    return enable == null || Boolean.parseBoolean(enable);
+    return enable == null || Boolean.parseBoolean(enable); 
   }
 
   public boolean isDefault() {
     return Boolean.parseBoolean(attributes.get("default"));
   }
 
-  public PluginInfo getChild(String type) {
+  public PluginInfo getChild(String type){
     List<PluginInfo> l = getChildren(type);
-    return l.isEmpty() ? null : l.get(0);
+    return  l.isEmpty() ? null:l.get(0);
   }
 
   public Map<String, Object> toMap(Map<String, Object> map) {
     map.putAll(attributes);
     Map m = map;
-    if (initArgs != null) m.putAll(initArgs.asMap(3));
-    if (children != null) {
+    if(initArgs!=null ) m.putAll(initArgs.asMap(3));
+    if(children != null){
       for (PluginInfo child : children) {
         Object old = m.get(child.name);
-        if (old == null) {
+        if(old == null){
           m.put(child.name, child.toMap(new LinkedHashMap<>()));
         } else if (old instanceof List) {
           List list = (List) old;
           list.add(child.toMap(new LinkedHashMap<>()));
-        } else {
+        }  else {
           ArrayList l = new ArrayList();
           l.add(old);
           l.add(child.toMap(new LinkedHashMap<>()));
-          m.put(child.name, l);
+          m.put(child.name,l);
         }
       }
 
@@ -167,47 +155,36 @@
     return m;
   }
 
-  /**
-   * Filter children by type
-   *
+  /**Filter children by type
    * @param type The type name. must not be null
    * @return The mathcing children
    */
-  public List<PluginInfo> getChildren(String type) {
-    if (children.isEmpty()) return children;
+  public List<PluginInfo> getChildren(String type){
+    if(children.isEmpty()) return children;
     List<PluginInfo> result = new ArrayList<>();
-    for (PluginInfo child : children) if (type.equals(child.type)) result.add(child);
+    for (PluginInfo child : children) if(type.equals(child.type)) result.add(child);
     return result;
   }
-
-  public static final PluginInfo EMPTY_INFO = new PluginInfo("", Collections.emptyMap(), new NamedList(), Collections.emptyList());
+  public static final PluginInfo EMPTY_INFO = new PluginInfo("",Collections.<String,String>emptyMap(), new NamedList(),Collections.<PluginInfo>emptyList());
 
   private static final HashSet<String> NL_TAGS = new HashSet<>
-      (asList("lst", "arr",
-          "bool",
-          "str",
-          "int", "long",
-          "float", "double"));
+    (asList("lst", "arr",
+        "bool",
+        "str",
+        "int", "long",
+        "float", "double"));
   public static final String DEFAULTS = "defaults";
   public static final String APPENDS = "appends";
   public static final String INVARIANTS = "invariants";
 
-  public boolean isFromSolrConfig() {
+  public boolean isFromSolrConfig(){
     return isFromSolrConfig;
 
   }
-
   public PluginInfo copy() {
     PluginInfo result = new PluginInfo(type, attributes,
         initArgs != null ? initArgs.clone() : null, children);
     result.isFromSolrConfig = isFromSolrConfig;
-    result.pathInConfig = pathInConfig;
     return result;
   }
-
-  public boolean isRuntimePlugin() {
-    return "true".equals(String.valueOf(attributes.get(RuntimeLib.TYPE)))
-        || (attributes.get(CommonParams.PACKAGE) != null);
-  }
-
 }
diff --git a/solr/core/src/java/org/apache/solr/core/RuntimeLib.java b/solr/core/src/java/org/apache/solr/core/RuntimeLib.java
deleted file mode 100644
index 1e1f5f7..0000000
--- a/solr/core/src/java/org/apache/solr/core/RuntimeLib.java
+++ /dev/null
@@ -1,227 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import java.io.ByteArrayInputStream;
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Map;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
-
-import org.apache.solr.cloud.CloudUtil;
-import org.apache.solr.common.MapWriter;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.util.CryptoKeys;
-import org.apache.solr.util.SimplePostTool;
-import org.apache.solr.util.plugin.PluginInfoInitialized;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static org.apache.solr.common.params.CommonParams.NAME;
-
-/**
- * This represents a Runtime Jar. A jar requires two details , name and version
- */
-public class RuntimeLib implements PluginInfoInitialized, AutoCloseable, MapWriter {
-  public static final String TYPE = "runtimeLib";
-  public static final String SHA256 = "sha256";
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private final CoreContainer coreContainer;
-  private String name, version, sig, sha256, url;
-  private BlobRepository.BlobContentRef<ByteBuffer> jarContent;
-  private boolean verified = false;
-  int znodeVersion = -1;
-
-  @Override
-  public void writeMap(EntryWriter ew) throws IOException {
-    ew.putIfNotNull(NAME, name);
-    ew.putIfNotNull("url", url);
-    ew.putIfNotNull(version, version);
-    ew.putIfNotNull("sha256", sha256);
-    ew.putIfNotNull("sig", sig);
-    if (znodeVersion > -1) {
-      ew.put(ConfigOverlay.ZNODEVER, znodeVersion);
-    }
-  }
-  public int getZnodeVersion(){
-    return znodeVersion;
-  }
-
-  public RuntimeLib(CoreContainer coreContainer) {
-    this.coreContainer = coreContainer;
-  }
-
-  public static boolean isEnabled() {
-    return "true".equals(System.getProperty("enable.runtime.lib"));
-  }
-
-  public static List<RuntimeLib> getLibObjects(SolrCore core, List<PluginInfo> libs) {
-    List<RuntimeLib> l = new ArrayList<>(libs.size());
-    for (PluginInfo lib : libs) {
-      RuntimeLib rtl = new RuntimeLib(core.getCoreContainer());
-      try {
-        rtl.init(lib);
-      } catch (Exception e) {
-        log.error("error loading runtime library", e);
-      }
-      l.add(rtl);
-    }
-    return l;
-  }
-
-  @Override
-  public void init(PluginInfo info) {
-    name = info.attributes.get(NAME);
-    url = info.attributes.get("url");
-    sig = info.attributes.get("sig");
-    if (url == null) {
-      Object v = info.attributes.get("version");
-      if (name == null || v == null) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version");
-      }
-      version = String.valueOf(v);
-    } else {
-      sha256 = info.attributes.get(SHA256);
-      if (sha256 == null) {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "runtimeLib with url must have a 'sha256' attribute");
-      }
-      ByteBuffer buf = coreContainer.getBlobRepository().fetchFromUrl(name, url);
-
-      String digest = BlobRepository.sha256Digest(buf);
-      if (!sha256.equals(digest)) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha256, digest));
-      }
-      verifyJarSignature(buf);
-
-      log.debug("dynamic library verified {}, sha256: {}", url, sha256);
-
-    }
-
-  }
-
-  public String getUrl() {
-    return url;
-  }
-
-  void loadJar() {
-    if (jarContent != null) return;
-    synchronized (this) {
-      if (jarContent != null) return;
-
-      jarContent = url == null ?
-          coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version) :
-          coreContainer.getBlobRepository().getBlobIncRef(name, null, url, sha256);
-
-    }
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  public String getVersion() {
-    return version;
-  }
-
-  public String getSig() {
-    return sig;
-
-  }
-
-  public String getSha256() {
-    return sha256;
-  }
-
-  public ByteBuffer getFileContent(String entryName) throws IOException {
-    if (jarContent == null)
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name);
-    return getFileContent(jarContent.blob, entryName);
-
-  }
-
-  public ByteBuffer getFileContent(BlobRepository.BlobContent<ByteBuffer> blobContent, String entryName) throws IOException {
-    ByteBuffer buff = blobContent.get();
-    ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit());
-    ZipInputStream zis = new ZipInputStream(zipContents);
-    try {
-      ZipEntry entry;
-      while ((entry = zis.getNextEntry()) != null) {
-        if (entryName == null || entryName.equals(entry.getName())) {
-          SimplePostTool.BAOS out = new SimplePostTool.BAOS();
-          byte[] buffer = new byte[2048];
-          int size;
-          while ((size = zis.read(buffer, 0, buffer.length)) != -1) {
-            out.write(buffer, 0, size);
-          }
-          out.close();
-          return out.getByteBuffer();
-        }
-      }
-    } finally {
-      zis.closeEntry();
-    }
-    return null;
-  }
-
-  @Override
-  public void close() throws Exception {
-    if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent);
-  }
-
-  public void verify() throws Exception {
-    if (verified) return;
-    if (jarContent == null) {
-      log.error("Calling verify before loading the jar");
-      return;
-    }
-
-    if (!coreContainer.isZooKeeperAware())
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud");
-    verifyJarSignature(jarContent.blob.get());
-  }
-
-  void verifyJarSignature(ByteBuffer buf) {
-    Map<String, byte[]> keys = CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe");
-    if (keys.isEmpty()) {
-      if (sig == null) {
-        verified = true;
-        return;
-      } else {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib  " + name);
-      }
-    } else if (sig == null) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name));
-    }
-
-    try {
-      String matchedKey = new CryptoKeys(keys).verify(sig, buf);
-      if (matchedKey == null)
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version);
-      log.info("Jar {} signed with {} successfully verified", name, matchedKey);
-    } catch (Exception e) {
-      log.error("Signature verifying error ", e);
-      if (e instanceof SolrException) throw (SolrException) e;
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e);
-    }
-  }
-}
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index f8189a6..9ddcaab 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -29,7 +29,6 @@
 import java.nio.file.Paths;
 import java.text.ParseException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
@@ -56,12 +55,12 @@
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.IOUtils;
-import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.QueryResponseWriter;
 import org.apache.solr.response.transform.TransformerFactory;
 import org.apache.solr.rest.RestManager;
+import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.IndexSchemaFactory;
 import org.apache.solr.search.CacheConfig;
 import org.apache.solr.search.FastLRUCache;
@@ -207,7 +206,7 @@
     getOverlay();//just in case it is not initialized
     getRequestParams();
     initLibs();
-    luceneMatchVersion = SolrConfig.parseLuceneVersionString(getVal("luceneMatchVersion", true));
+    luceneMatchVersion = SolrConfig.parseLuceneVersionString(getVal(IndexSchema.LUCENE_MATCH_VERSION_PARAM, true));
     log.info("Using Lucene MatchVersion: {}", luceneMatchVersion);
 
     String indexConfigPrefix;
@@ -272,8 +271,7 @@
       args.put("size", "10000");
       args.put("initialSize", "10");
       args.put("showItems", "-1");
-      args.put("class", FastLRUCache.class.getName());
-      conf = new CacheConfig(args,"query/fieldValueCache");
+      conf = new CacheConfig(FastLRUCache.class, args, null);
     }
     fieldValueCacheConfig = conf;
     useColdSearcher = getBool("query/useColdSearcher", false);
@@ -296,11 +294,11 @@
     slowQueryThresholdMillis = getInt("query/slowQueryThresholdMillis", -1);
     for (SolrPluginInfo plugin : plugins) loadPluginInfo(plugin);
 
-    Map<String, CacheConfig> userCacheConfigs = CacheConfig.getConfigs(this, "query/cache");
+    Map<String, CacheConfig> userCacheConfigs = CacheConfig.getMultipleConfigs(this, "query/cache");
     List<PluginInfo> caches = getPluginInfos(SolrCache.class.getName());
     if (!caches.isEmpty()) {
       for (PluginInfo c : caches) {
-        userCacheConfigs.put(c.name, new CacheConfig(c.attributes, StrUtils.join(c.pathInConfig, '/')));
+        userCacheConfigs.put(c.name, CacheConfig.getConfig(this, "cache", c.attributes, null));
       }
     }
     this.userCacheConfigs = Collections.unmodifiableMap(userCacheConfigs);
@@ -374,17 +372,17 @@
       .add(new SolrPluginInfo(TransformerFactory.class, "transformer", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
       .add(new SolrPluginInfo(SearchComponent.class, "searchComponent", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
       .add(new SolrPluginInfo(UpdateRequestProcessorFactory.class, "updateProcessor", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
-      .add(new SolrPluginInfo(SolrCache.class, SolrCache.TYPE, REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
-      // TODO: WTF is up with queryConverter???
-      // it apparently *only* works as a singleton? - SOLR-4304
-      // and even then -- only if there is a single SpellCheckComponent
-      // because of queryConverter.setIndexAnalyzer
+      .add(new SolrPluginInfo(SolrCache.class, "cache", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
+          // TODO: WTF is up with queryConverter???
+          // it apparently *only* works as a singleton? - SOLR-4304
+          // and even then -- only if there is a single SpellCheckComponent
+          // because of queryConverter.setIndexAnalyzer
       .add(new SolrPluginInfo(QueryConverter.class, "queryConverter", REQUIRE_NAME, REQUIRE_CLASS))
-      .add(new SolrPluginInfo(RuntimeLib.class, RuntimeLib.TYPE, REQUIRE_NAME, MULTI_OK))
-      // this is hackish, since it picks up all SolrEventListeners,
-      // regardless of when/how/why they are used (or even if they are
-      // declared outside of the appropriate context) but there's no nice
-      // way around that in the PluginInfo framework
+      .add(new SolrPluginInfo(PluginBag.RuntimeLib.class, "runtimeLib", REQUIRE_NAME, MULTI_OK))
+          // this is hackish, since it picks up all SolrEventListeners,
+          // regardless of when/how/why they are used (or even if they are
+          // declared outside of the appropriate context) but there's no nice
+          // way around that in the PluginInfo framework
       .add(new SolrPluginInfo(InitParams.class, InitParams.TYPE, MULTI_OK, REQUIRE_NAME_IN_OVERLAY))
       .add(new SolrPluginInfo(SolrEventListener.class, "//listener", REQUIRE_CLASS, MULTI_OK, REQUIRE_NAME_IN_OVERLAY))
 
@@ -534,9 +532,6 @@
     NodeList nodes = (NodeList) evaluate(tag, XPathConstants.NODESET);
     for (int i = 0; i < nodes.getLength(); i++) {
       PluginInfo pluginInfo = new PluginInfo(nodes.item(i), "[solrconfig.xml] " + tag, requireName, requireClass);
-      if (requireName) {
-        pluginInfo.pathInConfig = Arrays.asList(tag, pluginInfo.name);
-      }
       if (pluginInfo.isEnabled()) result.add(pluginInfo);
     }
     return result;
@@ -611,7 +606,7 @@
           "cacheControl", cacheControlHeader);
     }
 
-    public enum LastModFrom {
+    public static enum LastModFrom {
       OPENTIME, DIRLASTMOD, BOGUS;
 
       /**
@@ -763,24 +758,20 @@
       Map<String, Map> infos = overlay.getNamedPlugins(info.getCleanTag());
       if (!infos.isEmpty()) {
         LinkedHashMap<String, PluginInfo> map = new LinkedHashMap<>();
-        if (result != null) {
-          for (PluginInfo pluginInfo : result) {
-            //just create a UUID for the time being so that map key is not null
-            String name = pluginInfo.name == null ?
-                UUID.randomUUID().toString().toLowerCase(Locale.ROOT) :
-                pluginInfo.name;
-            map.put(name, pluginInfo);
-          }
+        if (result != null) for (PluginInfo pluginInfo : result) {
+          //just create a UUID for the time being so that map key is not null
+          String name = pluginInfo.name == null ?
+              UUID.randomUUID().toString().toLowerCase(Locale.ROOT) :
+              pluginInfo.name;
+          map.put(name, pluginInfo);
         }
         for (Map.Entry<String, Map> e : infos.entrySet()) {
-          PluginInfo value = new PluginInfo(info.getCleanTag(), e.getValue());
-          value.pathInConfig = Arrays.asList(info.getCleanTag(),e.getKey());
-          map.put(e.getKey(), value);
+          map.put(e.getKey(), new PluginInfo(info.getCleanTag(), e.getValue()));
         }
         result = new ArrayList<>(map.values());
       }
     }
-    return result == null ? Collections.emptyList() : result;
+    return result == null ? Collections.<PluginInfo>emptyList() : result;
   }
 
   public PluginInfo getPluginInfo(String type) {
@@ -897,7 +888,7 @@
   @Override
   public Map<String, Object> toMap(Map<String, Object> result) {
     if (getZnodeVersion() > -1) result.put(ZNODEVER, getZnodeVersion());
-    result.put("luceneMatchVersion", luceneMatchVersion);
+    result.put(IndexSchema.LUCENE_MATCH_VERSION_PARAM, luceneMatchVersion);
     result.put("updateHandler", getUpdateHandlerInfo());
     Map m = new LinkedHashMap();
     result.put("query", m);
@@ -955,7 +946,7 @@
 
   private void addCacheConfig(Map queryMap, CacheConfig... cache) {
     if (cache == null) return;
-    for (CacheConfig config : cache) if (config != null) queryMap.put(config.getName(), config);
+    for (CacheConfig config : cache) if (config != null) queryMap.put(config.getNodeName(), config);
 
   }
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index ef5fbb7..3e2fb1e 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -79,7 +79,6 @@
 import org.apache.solr.cloud.CloudDescriptor;
 import org.apache.solr.cloud.RecoveryStrategy;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
-import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.cloud.ClusterState;
@@ -194,8 +193,6 @@
 
   private boolean isReloaded = false;
 
-  private StatsCache statsCache;
-
   private final SolrConfig solrConfig;
   private final SolrResourceLoader resourceLoader;
   private volatile IndexSchema schema;
@@ -239,17 +236,11 @@
   public volatile boolean searchEnabled = true;
   public volatile boolean indexEnabled = true;
   public volatile boolean readOnly = false;
-  private List<PkgListener> packageListeners = new ArrayList<>();
-
 
   public Set<String> getMetricNames() {
     return metricNames;
   }
 
-  public List<PkgListener> getPackageListeners(){
-    return Collections.unmodifiableList(packageListeners);
-  }
-
   public Date getStartTimeStamp() {
     return startTime;
   }
@@ -360,26 +351,6 @@
     }
   }
 
-  void packageUpdated(RuntimeLib lib) {
-    for (PkgListener listener : packageListeners) {
-      if(lib.getName().equals(listener.packageName())) listener.changed(lib);
-    }
-  }
-  public void addPackageListener(PkgListener listener){
-    packageListeners.add(listener);
-  }
-
-  public interface PkgListener {
-
-    String packageName();
-
-    PluginInfo pluginInfo();
-
-    void changed(RuntimeLib lib);
-
-    MapWriter lib();
-  }
-
 
   /**
    * Returns the indexdir as given in index.properties. If index.properties exists in dataDir and
@@ -865,7 +836,7 @@
       for (Constructor<?> con : cons) {
         Class<?>[] types = con.getParameterTypes();
         if (types.length == 2 && types[0] == SolrCore.class && types[1] == UpdateHandler.class) {
-          return (UpdateHandler) con.newInstance(this, updateHandler);
+          return UpdateHandler.class.cast(con.newInstance(this, updateHandler));
         }
       }
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error Instantiating " + msg + ", " + className + " could not find proper constructor for " + UpdateHandler.class.getName());
@@ -885,12 +856,7 @@
 
   public <T extends Object> T createInitInstance(PluginInfo info, Class<T> cast, String msg, String defClassName) {
     if (info == null) return null;
-    String pkg = info.attributes.get(CommonParams.PACKAGE);
-    ResourceLoader resourceLoader = pkg != null?
-        coreContainer.getPackageManager().getResourceLoader(pkg):
-        getResourceLoader();
-
-    T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, resourceLoader);
+    T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, getResourceLoader());
     if (o instanceof PluginInfoInitialized) {
       ((PluginInfoInitialized) o).init(info);
     } else if (o instanceof NamedListInitializedPlugin) {
@@ -998,7 +964,7 @@
       this.codec = initCodec(solrConfig, this.schema);
 
       memClassLoader = new MemClassLoader(
-          RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(RuntimeLib.class.getName())),
+          PluginBag.RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(PluginBag.RuntimeLib.class.getName())),
           getResourceLoader());
       initIndex(prev != null, reload);
 
@@ -1014,8 +980,6 @@
       reqHandlers = new RequestHandlers(this);
       reqHandlers.initHandlersFromConfig(solrConfig);
 
-      statsCache = initStatsCache();
-
       // cause the executor to stall so firstSearcher events won't fire
       // until after inform() has been called for all components.
       // searchExecutor must be single-threaded for this to work
@@ -1449,7 +1413,10 @@
     return factory.getCodec();
   }
 
-  private StatsCache initStatsCache() {
+  /**
+   * Create an instance of {@link StatsCache} using configured parameters.
+   */
+  public StatsCache createStatsCache() {
     final StatsCache cache;
     PluginInfo pluginInfo = solrConfig.getPluginInfo(StatsCache.class.getName());
     if (pluginInfo != null && pluginInfo.className != null && pluginInfo.className.length() > 0) {
@@ -1464,13 +1431,6 @@
   }
 
   /**
-   * Get the StatsCache.
-   */
-  public StatsCache getStatsCache() {
-    return statsCache;
-  }
-
-  /**
    * Load the request processors
    */
   private Map<String, UpdateRequestProcessorChain> loadUpdateProcessorChains() {
@@ -2437,6 +2397,7 @@
 
       if (!success) {
         newSearcherOtherErrorsCounter.inc();
+        ;
         synchronized (searcherLock) {
           onDeckSearchers--;
 
@@ -3139,7 +3100,8 @@
     try {
       Stat stat = zkClient.exists(zkPath, null, true);
       if (stat == null) {
-        return currentVersion > -1;
+        if (currentVersion > -1) return true;
+        return false;
       }
       if (stat.getVersion() > currentVersion) {
         log.debug("{} is stale will need an update from {} to {}", zkPath, currentVersion, stat.getVersion());
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index b3dc5e4..4132918 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -52,6 +52,7 @@
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
+import com.google.common.annotations.VisibleForTesting;
 import org.apache.lucene.analysis.WordlistLoader;
 import org.apache.lucene.analysis.util.CharFilterFactory;
 import org.apache.lucene.analysis.util.ResourceLoader;
@@ -479,6 +480,11 @@
    */
   private static final Map<String, String> classNameCache = new ConcurrentHashMap<>();
 
+  @VisibleForTesting
+  static void clearCache() {
+    classNameCache.clear();
+  }
+
   // Using this pattern, legacy analysis components from previous Solr versions are identified and delegated to SPI loader:
   private static final Pattern legacyAnalysisPattern = 
       Pattern.compile("((\\Q"+base+".analysis.\\E)|(\\Q"+project+".\\E))([\\p{L}_$][\\p{L}\\p{N}_$]+?)(TokenFilter|Filter|Tokenizer|CharFilter)Factory");
@@ -506,11 +512,11 @@
       if(c != null) {
         try {
           return Class.forName(c, true, classLoader).asSubclass(expectedType);
-        } catch (ClassNotFoundException e) {
-          //this is unlikely
-          log.error("Unable to load cached class-name :  "+ c +" for shortname : "+cname + e);
+        } catch (ClassNotFoundException | ClassCastException e) {
+          // this can happen if the legacyAnalysisPattern below caches the wrong thing
+          log.warn("Unable to load cached class, attempting lookup. name={} shortname={} reason={}", c, cname, e);
+          classNameCache.remove(cname);
         }
-
       }
     }
     
@@ -576,8 +582,8 @@
       }
     }
   }
-
-  static final String[] empty = new String[0];
+  
+  static final String empty[] = new String[0];
   
   @Override
   public <T> T newInstance(String name, Class<T> expectedType) {
@@ -808,7 +814,6 @@
    * manipulated using select Solr features (e.g. streaming expressions).
    */
   public static final String USER_FILES_DIRECTORY = "userfiles";
-  public static final String BLOBS_DIRECTORY = "blobs";
   public static void ensureUserFilesDataDir(Path solrHome) {
     final Path userFilesPath = getUserFilesPath(solrHome);
     final File userFilesDirectory = new File(userFilesPath.toString());
@@ -824,28 +829,10 @@
     }
   }
 
-  public static void ensureBlobsDir(Path solrHome) {
-    final Path blobsDir = getBlobsDirPath(solrHome);
-    final File blobsFilesDirectory = new File(blobsDir.toString());
-    if (! blobsFilesDirectory.exists()) {
-      try {
-        final boolean created = blobsFilesDirectory.mkdir();
-        if (! created) {
-          log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.", BLOBS_DIRECTORY, solrHome);
-        }
-      } catch (Exception e) {
-          log.warn("Unable to create [" + BLOBS_DIRECTORY + "] directory in SOLR_HOME [" + solrHome + "].  Features requiring this directory may fail.", e);
-      }
-    }
-  }
-
-  public static Path getBlobsDirPath(Path solrHome) {
-    return Paths.get(solrHome.toAbsolutePath().toString(), BLOBS_DIRECTORY).toAbsolutePath();
-  }
-
   public static Path getUserFilesPath(Path solrHome) {
     return Paths.get(solrHome.toAbsolutePath().toString(), USER_FILES_DIRECTORY).toAbsolutePath();
   }
+
   // Logs a message only once per startup
   private static void logOnceInfo(String key, String msg) {
     if (!loggedOnce.contains(key)) {
@@ -942,7 +929,7 @@
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
         }
       }
-      try (OutputStream out = new FileOutputStream(confFile)) {
+      try (OutputStream out = new FileOutputStream(confFile);) {
         out.write(content);
       }
       log.info("Written confile " + resourceName);
diff --git a/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
new file mode 100644
index 0000000..910f29b
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/filestore/DistribPackageStore.java
@@ -0,0 +1,495 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.filestore;
+
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Consumer;
+import java.util.function.Predicate;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.lucene.util.IOUtils;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.BlobRepository;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.filestore.PackageStoreAPI.MetaData;
+import org.apache.zookeeper.server.ByteBufferInputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.SolrException.ErrorCode.BAD_REQUEST;
+import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
+
+
+public class DistribPackageStore implements PackageStore {
+  static final long MAX_PKG_SIZE = Long.parseLong(System.getProperty("max.file.store.size", String.valueOf(100 * 1024 * 1024)));
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private final CoreContainer coreContainer;
+  private Map<String, FileInfo> tmpFiles = new ConcurrentHashMap<>();
+  public DistribPackageStore(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+    ensurePackageStoreDir(coreContainer.getResourceLoader().getInstancePath());
+
+  }
+
+  private String myNode() {
+    return coreContainer.getZkController().getNodeName();
+  }
+
+
+  /**
+   * get a list of nodes randomly shuffled
+   * * @lucene.internal
+   */
+  public ArrayList<String> shuffledNodes() {
+    Set<String> liveNodes = coreContainer.getZkController().getZkStateReader().getClusterState().getLiveNodes();
+    ArrayList<String> l = new ArrayList(liveNodes);
+    l.remove(myNode());
+    Collections.shuffle(l, BlobRepository.RANDOM);
+    return l;
+  }
+
+
+  @Override
+  public Path getRealpath(String path) {
+    if (File.separatorChar == '\\') {
+      path = path.replaceAll("/", File.separator);
+    }
+    if (path.charAt(0) != File.separatorChar) {
+      path = File.separator + path;
+    }
+    return new File(this.coreContainer.getResourceLoader().getInstancePath() +
+        "/" + PackageStoreAPI.PACKAGESTORE_DIRECTORY + path).toPath();
+  }
+
+  class FileInfo {
+    final String path;
+    String metaPath;
+    ByteBuffer fileData, metaData;
+
+
+    FileInfo(String path) {
+      this.path = path;
+    }
+
+    public String getMetaPath() {
+      if (metaPath == null) {
+        int idx = path.lastIndexOf('/');
+        metaPath = path.substring(0, idx + 1) + "." + path.substring(idx + 1) + ".json";
+      }
+      return metaPath;
+    }
+
+
+    private void persistToFile(ByteBuffer data, ByteBuffer meta) throws IOException {
+      synchronized (DistribPackageStore.this) {
+        this.metaData = meta;
+        this.fileData = data;
+        Path realpath = getRealpath(path);
+        File file = realpath.toFile();
+        File parent = file.getParentFile();
+        if (!parent.exists()) {
+          parent.mkdirs();
+        }
+        Map m = (Map) Utils.fromJSON(meta.array());
+        if (m == null || m.isEmpty()) {
+          throw new SolrException(SERVER_ERROR, "invalid metadata , discarding : " + path);
+        }
+
+
+        File metdataFile = getRealpath(getMetaPath()).toFile();
+
+        try (FileOutputStream fos = new FileOutputStream(metdataFile)) {
+          fos.write(meta.array(), 0, meta.limit());
+        }
+        IOUtils.fsync(metdataFile.toPath(), false);
+
+        try (FileOutputStream fos = new FileOutputStream(file)) {
+          fos.write(data.array(), 0, data.limit());
+        }
+        log.info("persisted a file {} and metadata. sizes {} {}", path, data.limit(), meta.limit());
+        IOUtils.fsync(file.toPath(), false);
+      }
+    }
+
+
+    public boolean exists(boolean validateContent, boolean fetchMissing) throws IOException {
+      File file = getRealpath(path).toFile();
+      if (!file.exists()) {
+        if (fetchMissing) {
+          return fetchFromAnyNode();
+        } else {
+          return false;
+        }
+      }
+
+      if (validateContent) {
+        MetaData metaData = readMetaData();
+        if (metaData == null) return false;
+        try (InputStream is = new FileInputStream(getRealpath(path).toFile())) {
+          if (!Objects.equals(DigestUtils.sha512Hex(is), metaData.sha512)) {
+            deleteFile();
+          } else {
+            return true;
+          }
+        } catch (Exception e) {
+          throw new SolrException(SERVER_ERROR, "unable to parse metadata json file");
+        }
+      } else {
+        return true;
+      }
+
+      return false;
+    }
+
+    private void deleteFile() {
+      try {
+        IOUtils.deleteFilesIfExist(getRealpath(path), getRealpath(getMetaPath()));
+      } catch (IOException e) {
+        log.error("Unable to delete files: "+path);
+      }
+
+    }
+
+    private boolean fetchFileFromNodeAndPersist(String fromNode) {
+      log.info("fetching a file {} from {} ", path, fromNode);
+      String url = coreContainer.getZkController().getZkStateReader().getBaseUrlForNodeName(fromNode);
+      if (url == null) throw new SolrException(BAD_REQUEST, "No such node");
+      String baseUrl = url.replace("/solr", "/api");
+
+      ByteBuffer metadata = null;
+      Map m = null;
+      try {
+        metadata = Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
+            baseUrl + "/node/files" + getMetaPath(),
+            Utils.newBytesConsumer((int) MAX_PKG_SIZE));
+        m = (Map) Utils.fromJSON(metadata.array());
+      } catch (SolrException e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error fetching metadata", e);
+      }
+
+      try {
+        ByteBuffer filedata = Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(),
+            baseUrl + "/node/files" + path,
+            Utils.newBytesConsumer((int) MAX_PKG_SIZE));
+        String sha512 = DigestUtils.sha512Hex(new ByteBufferInputStream(filedata));
+        String expected = (String) m.get("sha512");
+        if (!sha512.equals(expected)) {
+          throw new SolrException(SERVER_ERROR, "sha512 mismatch downloading : " + path + " from node : " + fromNode);
+        }
+        persistToFile(filedata, metadata);
+        return true;
+      } catch (SolrException e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error fetching data", e);
+      } catch (IOException ioe) {
+        throw new SolrException(SERVER_ERROR, "Error persisting file", ioe);
+      }
+
+    }
+
+    boolean fetchFromAnyNode() {
+
+      ArrayList<String> l = shuffledNodes();
+      ZkStateReader stateReader = coreContainer.getZkController().getZkStateReader();
+      for (String liveNode : l) {
+        try {
+          String baseurl = stateReader.getBaseUrlForNodeName(liveNode);
+          String url = baseurl.replace("/solr", "/api");
+          String reqUrl = url + "/node/files" + path +
+              "?meta=true&wt=javabin&omitHeader=true";
+          boolean nodeHasBlob = false;
+          Object nl = Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), reqUrl, Utils.JAVABINCONSUMER);
+          if (Utils.getObjectByPath(nl, false, Arrays.asList("files", getMetaPath())) != null) {
+            nodeHasBlob = true;
+          }
+
+          if (nodeHasBlob) {
+            boolean success = fetchFileFromNodeAndPersist(liveNode);
+            if (success) return true;
+          }
+        } catch (Exception e) {
+          //it's OK for some nodes to fail
+        }
+      }
+
+      return false;
+    }
+
+    String getSimpleName() {
+      int idx = path.lastIndexOf("/");
+      if (idx == -1) return path;
+      return path.substring(idx + 1);
+    }
+
+    public Path realPath() {
+      return getRealpath(path);
+    }
+
+    MetaData readMetaData() throws IOException {
+      File file = getRealpath(getMetaPath()).toFile();
+      if (file.exists()) {
+        try (InputStream fis = new FileInputStream(file)) {
+          return new MetaData((Map) Utils.fromJSON(fis));
+        }
+      }
+      return null;
+
+    }
+
+
+
+
+    public FileDetails getDetails() {
+      FileType type = getType(path);
+
+      return new FileDetails() {
+        @Override
+        public MetaData getMetaData() {
+          try {
+            return readMetaData();
+          } catch (Exception e){
+            throw new RuntimeException(e);
+          }
+        }
+
+        @Override
+        public Date getTimeStamp() {
+          return new Date(realPath().toFile().lastModified());
+        }
+
+        @Override
+        public boolean isDir() {
+          return type == FileType.DIRECTORY;
+        }
+
+        @Override
+        public void writeMap(EntryWriter ew) throws IOException {
+          MetaData metaData = readMetaData();
+          ew.put(CommonParams.NAME, getSimpleName());
+          if (type == FileType.DIRECTORY) {
+            ew.put("dir", true);
+            return;
+          }
+          ew.put("timestamp", getTimeStamp());
+          metaData.writeMap(ew);
+
+        }
+      };
+
+
+    }
+
+    public void readData(Consumer<FileEntry> consumer) throws IOException {
+      MetaData meta = readMetaData();
+      try (InputStream is = new FileInputStream(realPath().toFile())) {
+        consumer.accept(new FileEntry(null, meta,path ){
+          @Override
+          public InputStream getInputStream() {
+            return is;
+          }
+        });
+      }
+    }
+  }
+
+
+  @Override
+  public void put(FileEntry entry) throws IOException {
+    FileInfo info = new FileInfo(entry.path);
+    ByteArrayOutputStream baos = new ByteArrayOutputStream();
+    Utils.writeJson(entry.getMetaData(), baos, true);
+    byte[] bytes = baos.toByteArray();
+    info.persistToFile(entry.buf, ByteBuffer.wrap(bytes, 0, bytes.length));
+    tmpFiles.put(entry.getPath(), info);
+    List<String> nodes = shuffledNodes();
+    int i = 0;
+    int FETCHFROM_SRC = 50;
+    String myNodeName = myNode();
+    try {
+      for (String node : nodes) {
+        String baseUrl = coreContainer.getZkController().getZkStateReader().getBaseUrlForNodeName(node);
+        String url = baseUrl.replace("/solr", "/api") + "/node/files" + entry.getPath() + "?getFrom=";
+        if (i < FETCHFROM_SRC) {
+          // this is to protect very large clusters from overwhelming a single node
+          // the first FETCHFROM_SRC nodes will be asked to fetch from this node.
+          // it's there in  the memory now. So , it must be served fast
+          url += myNodeName;
+        } else {
+          if (i == FETCHFROM_SRC) {
+            // This is just an optimization
+            // at this point a bunch of nodes are already downloading from me
+            // I'll wait for them to finish before asking other nodes to download from each other
+            try {
+              Thread.sleep(2 * 1000);
+            } catch (Exception e) {
+            }
+          }
+          // trying to avoid the thundering herd problem when there are a very large no:of nodes
+          // others should try to fetch it from any node where it is available. By now,
+          // almost FETCHFROM_SRC other nodes may have it
+          url += "*";
+        }
+        try {
+          //fire and forget
+          Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), url, null);
+        } catch (Exception e) {
+          log.info("Node: " + node +
+              " failed to respond for blob notification", e);
+          //ignore the exception
+          // some nodes may be down or not responding
+        }
+        i++;
+      }
+    } finally {
+      new Thread(() -> {
+        try {
+          // keep the jar in memory for 10 secs , so that
+          //every node can download it from memory without the file system
+          Thread.sleep(10 * 1000);
+        } catch (Exception e) {
+          //don't care
+        } finally {
+          tmpFiles.remove(entry.getPath());
+        }
+      }).start();
+
+
+    }
+
+  }
+
+  @Override
+  public synchronized boolean fetch(String path, String from) {
+    if (path == null || path.isEmpty()) return false;
+    FileInfo f = new FileInfo(path);
+    try {
+      if(f.exists(true, false)){
+        return true;
+      }
+    } catch (IOException e) {
+      log.error("Error fetching file ", e);
+      return false;
+
+    }
+
+    if (from == null || "*".equals(from)) {
+      f.fetchFromAnyNode();
+
+    } else {
+      f.fetchFileFromNodeAndPersist(from);
+    }
+
+    return false;
+  }
+
+  @Override
+  public synchronized void get(String path, Consumer<FileEntry> consumer) throws IOException {
+    File file = getRealpath(path).toFile();
+    String simpleName = file.getName();
+    if (isMetaDataFile(simpleName)) {
+      try (InputStream is = new FileInputStream(file)) {
+        consumer.accept(new FileEntry(null, null, path) {
+          //no metadata for metadata file
+          @Override
+          public InputStream getInputStream() {
+            return is;
+          }
+        });
+      }
+      return;
+    }
+
+    new FileInfo(path).readData(consumer);
+  }
+
+
+  @Override
+  public synchronized List list(String path, Predicate<String> predicate) {
+    File file = getRealpath(path).toFile();
+    List<FileDetails> fileDetails = new ArrayList<>();
+    FileType type = getType(path);
+    if (type == FileType.DIRECTORY) {
+      file.list((dir, name) -> {
+        if (predicate == null || predicate.test(name)) {
+          if (!isMetaDataFile(name)) {
+            fileDetails.add(new FileInfo(path + "/" + name).getDetails());
+          }
+        }
+        return false;
+      });
+
+    } else if (type == FileType.FILE) {
+
+      fileDetails.add(new FileInfo(path).getDetails());
+    }
+
+    return fileDetails;
+  }
+
+
+  @Override
+  public synchronized FileType getType(String path) {
+    File file = getRealpath(path).toFile();
+    if (!file.exists()) return FileType.NOFILE;
+    if (file.isDirectory()) return FileType.DIRECTORY;
+    return isMetaDataFile(file.getName()) ? FileType.METADATA : FileType.FILE;
+  }
+
+  private boolean isMetaDataFile(String file) {
+    return file.charAt(0) == '.' && file.endsWith(".json");
+  }
+
+  private void ensurePackageStoreDir(Path solrHome) {
+    final File packageStoreDir = getPackageStoreDirPath(solrHome).toFile();
+    if (!packageStoreDir.exists()) {
+      try {
+        final boolean created = packageStoreDir.mkdirs();
+        if (!created) {
+          log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.", packageStoreDir, solrHome);
+        }
+      } catch (Exception e) {
+        log.warn("Unable to create [" + packageStoreDir + "] directory in SOLR_HOME [" + solrHome + "].  Features requiring this directory may fail.", e);
+      }
+    }
+  }
+
+  public static Path getPackageStoreDirPath(Path solrHome) {
+    return Paths.get(solrHome.toAbsolutePath().toString(), PackageStoreAPI.PACKAGESTORE_DIRECTORY).toAbsolutePath();
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/filestore/PackageStore.java b/solr/core/src/java/org/apache/solr/filestore/PackageStore.java
new file mode 100644
index 0000000..b9be691
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/filestore/PackageStore.java
@@ -0,0 +1,122 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.filestore;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.ByteBuffer;
+import java.nio.file.Path;
+import java.util.Date;
+import java.util.List;
+import java.util.function.Consumer;
+import java.util.function.Predicate;
+
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.filestore.PackageStoreAPI.MetaData;
+import org.apache.zookeeper.server.ByteBufferInputStream;
+
+/**
+ * The interface to be implemented by any package store provider
+ * * @lucene.experimental
+ */
+public interface PackageStore {
+
+  /**
+   * Store a file into the filestore. This should ensure that it is replicated
+   * across all nodes in the cluster
+   */
+  void put(FileEntry fileEntry) throws IOException;
+
+  /**
+   * read file content from a given path
+   */
+  void get(String path, Consumer<FileEntry> filecontent) throws IOException;
+
+  /**
+   * Fetch a resource from another node
+   * internal
+   */
+  boolean fetch(String path, String from);
+
+  List<FileDetails> list(String path, Predicate<String> predicate);
+
+  /**
+   * get the real path on filesystem
+   */
+  Path getRealpath(String path);
+
+  /**
+   * The type of the resource
+   */
+  FileType getType(String path);
+
+  public class FileEntry {
+    final ByteBuffer buf;
+    final MetaData meta;
+    final String path;
+
+    FileEntry(ByteBuffer buf, MetaData meta, String path) {
+      this.buf = buf;
+      this.meta = meta;
+      this.path = path;
+    }
+
+    public String getPath() {
+      return path;
+    }
+
+
+    public InputStream getInputStream() {
+      if (buf != null) return new ByteBufferInputStream(buf);
+      return null;
+
+    }
+
+    /**
+     * For very large files , only a stream would be available
+     * This method would return null;
+     */
+    public ByteBuffer getBuffer() {
+      return buf;
+
+    }
+
+    public MetaData getMetaData() {
+      return meta;
+    }
+
+
+  }
+
+  enum FileType {
+    FILE, DIRECTORY, NOFILE, METADATA
+  }
+
+  interface FileDetails extends MapWriter {
+
+    MetaData getMetaData();
+
+    Date getTimeStamp();
+
+    boolean isDir();
+
+
+  }
+
+
+}
diff --git a/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
new file mode 100644
index 0000000..71ee9d8
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/filestore/PackageStoreAPI.java
@@ -0,0 +1,273 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.filestore;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.solr.api.Command;
+import org.apache.solr.api.EndPoint;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.cloud.CloudUtil;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.ContentStream;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrCore;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.security.PermissionNameProvider;
+import org.apache.solr.util.CryptoKeys;
+import org.apache.solr.util.SimplePostTool;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.KeeperException;
+import org.apache.zookeeper.server.ByteBufferInputStream;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM;
+
+
+public class PackageStoreAPI {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  public static final String PACKAGESTORE_DIRECTORY = "filestore";
+
+
+  private final CoreContainer coreContainer;
+  PackageStore packageStore;
+  public final FSRead readAPI = new FSRead();
+  public final FSWrite writeAPI = new FSWrite();
+
+  public PackageStoreAPI(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+    packageStore = new DistribPackageStore(coreContainer);
+  }
+
+  public PackageStore getPackageStore() {
+    return packageStore;
+  }
+
+  @EndPoint(
+      path = "/cluster/files/*",
+      method = SolrRequest.METHOD.PUT,
+      permission = PermissionNameProvider.Name.FILESTORE_WRITE_PERM)
+  public class FSWrite {
+
+    static final String TMP_ZK_NODE = "/packageStoreWriteInProgress";
+
+    @Command
+    public void upload(SolrQueryRequest req, SolrQueryResponse rsp) {
+      try {
+        coreContainer.getZkController().getZkClient().create(TMP_ZK_NODE, "true".getBytes(UTF_8),
+            CreateMode.EPHEMERAL, true);
+
+        Iterable<ContentStream> streams = req.getContentStreams();
+        if (streams == null) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no payload");
+        String path = req.getPathTemplateValues().get("*");
+        if (path == null) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No path");
+        }
+        validateName(path);
+        ContentStream stream = streams.iterator().next();
+        try {
+          ByteBuffer buf = SimplePostTool.inputStreamToByteArray(stream.getStream());
+          String sha512 = DigestUtils.sha512Hex(new ByteBufferInputStream(buf));
+          List<String> signatures = readSignatures(req, buf);
+          Map<String, Object> vals = new HashMap<>();
+          vals.put(MetaData.SHA512, sha512);
+          if (signatures != null) {
+            vals.put("sig", signatures);
+          }
+          packageStore.put(new PackageStore.FileEntry(buf, new MetaData(vals), path));
+          rsp.add(CommonParams.FILE, path);
+        } catch (IOException e) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+        }
+      } catch (InterruptedException e) {
+        log.error("Unexpected error", e);
+      } catch (KeeperException.NodeExistsException e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "A write is already in process , try later");
+      } catch (KeeperException e) {
+        log.error("Unexpected error", e);
+      } finally {
+        try {
+          coreContainer.getZkController().getZkClient().delete(TMP_ZK_NODE, -1, true);
+        } catch (Exception e) {
+          log.error("Unexpected error  ", e);
+        }
+      }
+    }
+
+    private List<String> readSignatures(SolrQueryRequest req, ByteBuffer buf)
+        throws SolrException {
+      String[] signatures = req.getParams().getParams("sig");
+      if (signatures == null || signatures.length == 0) return null;
+      List<String> sigs = Arrays.asList(signatures);
+      validate(sigs, buf);
+      return sigs;
+    }
+
+    public void validate(List<String> sigs,
+                         ByteBuffer buf) throws SolrException {
+      Map<String, byte[]> keys = CloudUtil.getTrustedKeys(
+          coreContainer.getZkController().getZkClient(), "exe");
+      if (keys == null || keys.isEmpty()) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "ZK does not have any keys");
+      }
+      CryptoKeys cryptoKeys = null;
+      try {
+        cryptoKeys = new CryptoKeys(keys);
+      } catch (Exception e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+            "Error parsing public keyts in ZooKeeper");
+      }
+      for (String sig : sigs) {
+        if (cryptoKeys.verify(sig, buf) == null) {
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Signature does not match any public key : " + sig);
+        }
+
+      }
+    }
+
+  }
+
+  @EndPoint(
+      path = "/node/files/*",
+      method = SolrRequest.METHOD.GET,
+      permission = PermissionNameProvider.Name.FILESTORE_READ_PERM)
+  public class FSRead {
+    @Command
+    public void read(SolrQueryRequest req, SolrQueryResponse rsp) {
+      String path = req.getPathTemplateValues().get("*");
+      String pathCopy = path;
+      String getFrom = req.getParams().get("getFrom");
+      if (getFrom != null) {
+        coreContainer.getUpdateShardHandler().getUpdateExecutor().submit(() -> {
+          log.debug("Downloading file {}", pathCopy);
+          try {
+            packageStore.fetch(pathCopy, getFrom);
+          } catch (Exception e) {
+            log.error("Failed to download file: " + pathCopy, e);
+          }
+          log.info("downloaded file: {}", pathCopy);
+        });
+        return;
+
+      }
+      if (path == null) {
+        path = "";
+      }
+
+      PackageStore.FileType typ = packageStore.getType(path);
+      if (typ == PackageStore.FileType.NOFILE) {
+        rsp.add("files", Collections.singletonMap(path, null));
+        return;
+      }
+      if (typ == PackageStore.FileType.DIRECTORY) {
+        rsp.add("files", Collections.singletonMap(path, packageStore.list(path, null)));
+        return;
+      }
+      if (req.getParams().getBool("meta", false)) {
+        if (typ == PackageStore.FileType.FILE) {
+          int idx = path.lastIndexOf('/');
+          String fileName = path.substring(idx + 1);
+          String parentPath = path.substring(0, path.lastIndexOf('/'));
+          List l = packageStore.list(parentPath, s -> s.equals(fileName));
+          rsp.add("files", Collections.singletonMap(path, l.isEmpty() ? null : l.get(0)));
+          return;
+        }
+      } else {
+        writeRawFile(req, rsp, path);
+      }
+    }
+
+    private void writeRawFile(SolrQueryRequest req, SolrQueryResponse rsp, String path) {
+      ModifiableSolrParams solrParams = new ModifiableSolrParams();
+      solrParams.add(CommonParams.WT, FILE_STREAM);
+      req.setParams(SolrParams.wrapDefaults(solrParams, req.getParams()));
+      rsp.add(FILE_STREAM, (SolrCore.RawWriter) os -> {
+        packageStore.get(path, (it) -> {
+          try {
+            org.apache.commons.io.IOUtils.copy(it.getInputStream(), os);
+          } catch (IOException e) {
+            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error reading file" + path);
+          }
+        });
+
+      });
+    }
+
+  }
+
+  static class MetaData implements MapWriter {
+    public static final String SHA512 = "sha512";
+    String sha512;
+    List<String> signatures;
+    Map<String, Object> otherAttribs;
+
+    public MetaData(Map m) {
+      m = Utils.getDeepCopy(m, 3);
+      this.sha512 = (String) m.remove(SHA512);
+      this.signatures = (List<String>) m.remove("sig");
+      this.otherAttribs = m;
+    }
+
+    @Override
+    public void writeMap(EntryWriter ew) throws IOException {
+      ew.putIfNotNull("sha512", sha512);
+      ew.putIfNotNull("sig", signatures);
+      if (!otherAttribs.isEmpty()) {
+        otherAttribs.forEach(ew.getBiConsumer());
+      }
+    }
+  }
+
+  static final String INVALIDCHARS = " /\\#&*\n\t%@~`=+^$><?{}[]|:;!";
+
+  public static void validateName(String path) {
+    if (path == null) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "empty path");
+    }
+    List<String> parts = StrUtils.splitSmart(path, '/', true);
+    for (String part : parts) {
+      if (part.charAt(0) == '.') {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "cannot start with period");
+      }
+      for (int i = 0; i < part.length(); i++) {
+        for (int j = 0; j < INVALIDCHARS.length(); j++) {
+          if (part.charAt(i) == INVALIDCHARS.charAt(j))
+            throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unsupported char in file name: " + part);
+        }
+      }
+    }
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index 5098a0d..dc1d1b1 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -1387,7 +1387,7 @@
     });
   }
 
-  public void shutdown() {
+  public void close() {
     if (executorService != null) executorService.shutdown();
     if (pollingIndexFetcher != null) {
       pollingIndexFetcher.destroy();
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index 212c30c..eca391b 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -22,14 +22,11 @@
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
+import com.codahale.metrics.MetricRegistry;
+import com.google.common.collect.ImmutableList;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
-import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
-import com.google.common.collect.ImmutableList;
-import org.apache.solr.api.Api;
-import org.apache.solr.api.ApiBag;
-import org.apache.solr.api.ApiSupport;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
@@ -46,6 +43,9 @@
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.util.SolrPluginUtils;
+import org.apache.solr.api.Api;
+import org.apache.solr.api.ApiBag;
+import org.apache.solr.api.ApiSupport;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index 789526e..11c6404 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -36,7 +36,6 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
-import java.util.function.Consumer;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
@@ -48,7 +47,6 @@
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
-import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
@@ -64,9 +62,9 @@
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.ConfigOverlay;
+import org.apache.solr.core.PluginBag;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.RequestParams;
-import org.apache.solr.core.RuntimeLib;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
@@ -152,262 +150,11 @@
 
   public static boolean getImmutable(SolrCore core) {
     NamedList configSetProperties = core.getConfigSetProperties();
-    if (configSetProperties == null) return false;
+    if(configSetProperties == null) return false;
     Object immutable = configSetProperties.get(IMMUTABLE_CONFIGSET_ARG);
-    return immutable != null && Boolean.parseBoolean(immutable.toString());
+    return immutable != null ? Boolean.parseBoolean(immutable.toString()) : false;
   }
 
-  public static String validateName(String s) {
-    for (int i = 0; i < s.length(); i++) {
-      char c = s.charAt(i);
-      if ((c >= 'A' && c <= 'Z') ||
-          (c >= 'a' && c <= 'z') ||
-          (c >= '0' && c <= '9') ||
-          c == '_' ||
-          c == '-' ||
-          c == '.'
-      ) continue;
-      else {
-        return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s);
-      }
-    }
-    return null;
-  }
-
-  /**
-   * Block up to a specified maximum time until we see agreement on the schema
-   * version in ZooKeeper across all replicas for a collection.
-   */
-  public static void waitForAllReplicasState(String collection,
-                                             ZkController zkController,
-                                             String prop,
-                                             int expectedVersion,
-                                             int maxWaitSecs) {
-    final RTimer timer = new RTimer();
-    // get a list of active replica cores to query for the schema zk version (skipping this core of course)
-    List<PerReplicaCallable> concurrentTasks = new ArrayList<>();
-
-    for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) {
-      PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs);
-      concurrentTasks.add(e);
-    }
-    if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
-
-    log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}",
-        maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection));
-
-    // use an executor service to invoke schema zk version requests in parallel with a max wait time
-    execInparallel(concurrentTasks, parallelExecutor -> {
-      try {
-        List<String> failedList = executeAll(expectedVersion, maxWaitSecs, concurrentTasks, parallelExecutor);
-        // if any tasks haven't completed within the specified timeout, it's an error
-        if (failedList != null)
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-              formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
-                  failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList));
-      } catch (InterruptedException e) {
-        log.warn(formatString(
-            "Core  was interrupted . trying to set the property {0} to version {1} to propagate to {2} replicas for collection {3}",
-            prop, expectedVersion, concurrentTasks.size(), collection));
-        Thread.currentThread().interrupt();
-      }
-    });
-
-    log.info("Took {}ms to set the property {} to be of version {} for collection {}",
-        timer.getTime(), prop, expectedVersion, collection);
-  }
-
-  public static void execInparallel(List<? extends PerReplicaCallable> concurrentTasks, Consumer<ExecutorService> fun) {
-    int poolSize = Math.min(concurrentTasks.size(), 10);
-    ExecutorService parallelExecutor =
-        ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor"));
-    try {
-
-      fun.accept(parallelExecutor);
-
-    } finally {
-      ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor);
-    }
-  }
-
-  @Override
-  public SolrRequestHandler getSubHandler(String path) {
-    if (subPaths.contains(path)) return this;
-    if (path.startsWith("/params/")) return this;
-    List<String> p = StrUtils.splitSmart(path, '/', true);
-    if (p.size() > 1) {
-      if (subPaths.contains("/" + p.get(0))) return this;
-    }
-    return null;
-  }
-
-
-  private static Set<String> subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler",
-      "/query", "/jmx", "/requestDispatcher", "/znodeVersion"));
-
-  static {
-    for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins)
-      subPaths.add("/" + solrPluginInfo.getCleanTag());
-
-  }
-
-  //////////////////////// SolrInfoMBeans methods //////////////////////
-
-
-  @Override
-  public String getDescription() {
-    return "Edit solrconfig.xml";
-  }
-
-  @Override
-  public Category getCategory() {
-    return Category.ADMIN;
-  }
-
-
-  public static final String SET_PROPERTY = "set-property";
-  public static final String UNSET_PROPERTY = "unset-property";
-  public static final String SET_USER_PROPERTY = "set-user-property";
-  public static final String UNSET_USER_PROPERTY = "unset-user-property";
-  public static final String SET = "set";
-  public static final String UPDATE = "update";
-  public static final String CREATE = "create";
-  private static Set<String> cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add");
-
-  public static List<String> executeAll(int expectedVersion, int maxWaitSecs, List<? extends PerReplicaCallable> concurrentTasks, ExecutorService parallelExecutor) throws InterruptedException {
-    List<Future<Boolean>> results =
-        parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS);
-
-    // determine whether all replicas have the update
-    List<String> failedList = null; // lazily init'd
-    for (int f = 0; f < results.size(); f++) {
-      Boolean success = false;
-      Future<Boolean> next = results.get(f);
-      if (next.isDone() && !next.isCancelled()) {
-        // looks to have finished, but need to check if it succeeded
-        try {
-          success = next.get();
-        } catch (ExecutionException e) {
-          // shouldn't happen since we checked isCancelled
-        }
-      }
-
-      if (!success) {
-        String coreUrl = concurrentTasks.get(f).coreUrl;
-        log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion);
-        if (failedList == null) failedList = new ArrayList<>();
-        failedList.add(coreUrl);
-      }
-    }
-    return failedList;
-  }
-
-  public static class PerReplicaCallable extends SolrRequest implements Callable<Boolean> {
-    protected String coreUrl;
-    String prop;
-    protected int expectedZkVersion;
-    protected Number remoteVersion = null;
-    int maxWait;
-
-    public PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) {
-      super(METHOD.GET, "/config/" + ZNODEVER);
-      this.coreUrl = coreUrl;
-      this.expectedZkVersion = expectedZkVersion;
-      this.prop = prop;
-      this.maxWait = maxWait;
-    }
-
-    @Override
-    public SolrParams getParams() {
-      return new ModifiableSolrParams()
-          .set(prop, expectedZkVersion)
-          .set(CommonParams.WT, CommonParams.JAVABIN);
-    }
-
-    @Override
-    public Boolean call() throws Exception {
-      final RTimer timer = new RTimer();
-      int attempts = 0;
-      try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
-        // eventually, this loop will get killed by the ExecutorService's timeout
-        while (true) {
-          try {
-            long timeElapsed = (long) timer.getTime() / 1000;
-            if (timeElapsed >= maxWait) {
-              return false;
-            }
-            log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait);
-            Thread.sleep(100);
-            MapWriter resp = solr.httpUriRequest(this).future.get();
-            if (verifyResponse(resp, attempts)) break;
-            attempts++;
-          } catch (Exception e) {
-            if (e instanceof InterruptedException) {
-              break; // stop looping
-            } else {
-              log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e);
-            }
-          }
-        }
-      }
-      return true;
-    }
-
-    protected boolean verifyResponse(MapWriter mw, int attempts) {
-      NamedList resp = (NamedList) mw;
-      if (resp != null) {
-        Map m = (Map) resp.get(ZNODEVER);
-        if (m != null) {
-          remoteVersion = (Number) m.get(prop);
-          if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) return true;
-          log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2}   after {3} attempts", expectedZkVersion, coreUrl, prop, attempts));
-
-        }
-      }
-      return false;
-    }
-
-
-    @Override
-    protected SolrResponse createResponse(SolrClient client) {
-      return null;
-    }
-  }
-
-  public static List<String> getActiveReplicaCoreUrls(ZkController zkController,
-                                                      String collection) {
-    List<String> activeReplicaCoreUrls = new ArrayList<>();
-    ClusterState clusterState = zkController.getZkStateReader().getClusterState();
-    Set<String> liveNodes = clusterState.getLiveNodes();
-    final DocCollection docCollection = clusterState.getCollectionOrNull(collection);
-    if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) {
-      final Collection<Slice> activeSlices = docCollection.getActiveSlices();
-      for (Slice next : activeSlices) {
-        Map<String, Replica> replicasMap = next.getReplicasMap();
-        if (replicasMap != null) {
-          for (Map.Entry<String, Replica> entry : replicasMap.entrySet()) {
-            Replica replica = entry.getValue();
-            if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
-              activeReplicaCoreUrls.add(replica.getCoreUrl());
-            }
-          }
-        }
-      }
-    }
-    return activeReplicaCoreUrls;
-  }
-
-  @Override
-  public Name getPermissionName(AuthorizationContext ctx) {
-    switch (ctx.getHttpMethod()) {
-      case "GET":
-        return Name.CONFIG_READ_PERM;
-      case "POST":
-        return Name.CONFIG_EDIT_PERM;
-      default:
-        return null;
-    }
-  }
 
   private class Command {
     private final SolrQueryRequest req;
@@ -510,54 +257,25 @@
 
     private Map<String, Object> getConfigDetails(String componentType, SolrQueryRequest req) {
       String componentName = componentType == null ? null : req.getParams().get("componentName");
-      if(componentName == null && parts.size() > 2){
-        componentName = parts.get(2);
-        if(SolrRequestHandler.TYPE.equals(componentType)){
-          componentName = "/"+componentName;
-        }
-      }
-
       boolean showParams = req.getParams().getBool("expandParams", false);
       Map<String, Object> map = this.req.getCore().getSolrConfig().toMap(new LinkedHashMap<>());
-      if (SolrRequestHandler.TYPE.equals(componentType) || componentType == null) {
-        Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE);
-        if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>());
-        List<PluginInfo> plugins = this.req.getCore().getImplicitHandlers();
-        for (PluginInfo plugin : plugins) {
-          if (SolrRequestHandler.TYPE.equals(plugin.type)) {
-            if (!reqHandlers.containsKey(plugin.name)) {
-              reqHandlers.put(plugin.name, plugin);
-            }
+      if (componentType != null && !SolrRequestHandler.TYPE.equals(componentType)) return map;
+      Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE);
+      if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>());
+      List<PluginInfo> plugins = this.req.getCore().getImplicitHandlers();
+      for (PluginInfo plugin : plugins) {
+        if (SolrRequestHandler.TYPE.equals(plugin.type)) {
+          if (!reqHandlers.containsKey(plugin.name)) {
+            reqHandlers.put(plugin.name, plugin);
           }
         }
-        if (showParams) {
-          for (Object o : reqHandlers.entrySet()) {
-            Map.Entry e = (Map.Entry) o;
-            if (componentName == null || e.getKey().equals(componentName)) {
-              Map<String, Object> m = expandUseParams(req, e.getValue());
-              e.setValue(m);
-            }
-          }
-        }
-
       }
-
-      if (req.getParams().getBool("meta", false)) {
-        for (SolrCore.PkgListener pkgListener : req.getCore().getPackageListeners()) {
-          PluginInfo meta = pkgListener.pluginInfo();
-          if (meta.pathInConfig != null) {
-            Object obj = Utils.getObjectByPath(map, false, meta.pathInConfig);
-            if (obj instanceof Map) {
-              Map m = (Map) obj;
-              m.put("_packageinfo_", pkgListener.lib());
-            } else if(obj instanceof MapWriter){
-              MapWriter mw = (MapWriter) obj;
-              Utils.setObjectByPath(map, meta.pathInConfig, (MapWriter) ew -> {
-                mw.writeMap(ew);
-                ew.put("_packageinfo_", pkgListener.lib());
-              }, false);
-            }
-          }
+      if (!showParams) return map;
+      for (Object o : reqHandlers.entrySet()) {
+        Map.Entry e = (Map.Entry) o;
+        if (componentName == null || e.getKey().equals(componentName)) {
+          Map<String, Object> m = expandUseParams(req, e.getValue());
+          e.setValue(m);
         }
       }
 
@@ -633,8 +351,6 @@
           }
         }
       } catch (Exception e) {
-
-        log.error("error executing commands " + Utils.toJSONString(ops), e);
         resp.setException(e);
         resp.add(CommandOperation.ERR_MSGS, singletonList(SchemaManager.getErrorStr(e)));
       }
@@ -709,7 +425,7 @@
 
       List errs = CommandOperation.captureErrors(ops);
       if (!errs.isEmpty()) {
-        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing params", errs);
+        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing params", errs);
       }
 
       SolrResourceLoader loader = req.getCore().getResourceLoader();
@@ -772,7 +488,7 @@
       }
       List errs = CommandOperation.captureErrors(ops);
       if (!errs.isEmpty()) {
-        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing commands", errs);
+        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing commands", errs);
       }
 
       SolrResourceLoader loader = req.getCore().getResourceLoader();
@@ -810,20 +526,20 @@
       op.getMap(PluginInfo.INVARIANTS, null);
       op.getMap(PluginInfo.APPENDS, null);
       if (op.hasError()) return overlay;
-      if (info.clazz == RuntimeLib.class) {
-        if (!RuntimeLib.isEnabled()) {
+      if(info.clazz == PluginBag.RuntimeLib.class) {
+        if(!PluginBag.RuntimeLib.isEnabled()){
           op.addError("Solr not started with -Denable.runtime.lib=true");
           return overlay;
         }
         try {
-          new RuntimeLib(req.getCore().getCoreContainer()).init(new PluginInfo(info.tag, op.getDataMap()));
+          new PluginBag.RuntimeLib(req.getCore()).init(new PluginInfo(info.tag, op.getDataMap()));
         } catch (Exception e) {
           op.addError(e.getMessage());
           log.error("can't load this plugin ", e);
           return overlay;
         }
       }
-      if (!verifyClass(op, clz, info)) return overlay;
+      if (!verifyClass(op, clz, info.clazz)) return overlay;
       if (pluginExists(info, overlay, name)) {
         if (isCeate) {
           op.addError(formatString(" ''{0}'' already exists . Do an ''{1}'' , if you want to change it ", name, "update-" + info.getTagCleanLower()));
@@ -843,23 +559,16 @@
 
     private boolean pluginExists(SolrConfig.SolrPluginInfo info, ConfigOverlay overlay, String name) {
       List<PluginInfo> l = req.getCore().getSolrConfig().getPluginInfos(info.clazz.getName());
-      for (PluginInfo pluginInfo : l) if (name.equals(pluginInfo.name)) return true;
+      for (PluginInfo pluginInfo : l) if(name.equals( pluginInfo.name)) return true;
       return overlay.getNamedPlugins(info.getCleanTag()).containsKey(name);
     }
 
-    private boolean verifyClass(CommandOperation op, String clz, SolrConfig.SolrPluginInfo pluginMeta) {
+    private boolean verifyClass(CommandOperation op, String clz, Class expected) {
       if (clz == null) return true;
-      PluginInfo info = new PluginInfo(pluginMeta.getCleanTag(), op.getDataMap());
-
-      if (info.isRuntimePlugin() && !RuntimeLib.isEnabled()) {
-        op.addError("node not started with enable.runtime.lib=true");
-        return false;
-      }
-
-      if (!"true".equals(String.valueOf(op.getStr(RuntimeLib.TYPE, null)))) {
+      if (!"true".equals(String.valueOf(op.getStr("runtimeLib", null)))) {
         //this is not dynamically loaded so we can verify the class right away
         try {
-          req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), pluginMeta.clazz, clz, "");
+          req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), expected, clz, "");
         } catch (Exception e) {
           op.addError(e.getMessage());
           return false;
@@ -957,6 +666,235 @@
 
   }
 
+  public static String validateName(String s) {
+    for (int i = 0; i < s.length(); i++) {
+      char c = s.charAt(i);
+      if ((c >= 'A' && c <= 'Z') ||
+          (c >= 'a' && c <= 'z') ||
+          (c >= '0' && c <= '9') ||
+          c == '_' ||
+          c == '-' ||
+          c == '.'
+          ) continue;
+      else {
+        return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s);
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public SolrRequestHandler getSubHandler(String path) {
+    if (subPaths.contains(path)) return this;
+    if (path.startsWith("/params/")) return this;
+    return null;
+  }
+
+
+  private static Set<String> subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler",
+      "/query", "/jmx", "/requestDispatcher", "/znodeVersion"));
+
+  static {
+    for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins)
+      subPaths.add("/" + solrPluginInfo.getCleanTag());
+
+  }
+
+  //////////////////////// SolrInfoMBeans methods //////////////////////
+
+
+  @Override
+  public String getDescription() {
+    return "Edit solrconfig.xml";
+  }
+
+  @Override
+  public Category getCategory() {
+    return Category.ADMIN;
+  }
+
+
+  public static final String SET_PROPERTY = "set-property";
+  public static final String UNSET_PROPERTY = "unset-property";
+  public static final String SET_USER_PROPERTY = "set-user-property";
+  public static final String UNSET_USER_PROPERTY = "unset-user-property";
+  public static final String SET = "set";
+  public static final String UPDATE = "update";
+  public static final String CREATE = "create";
+  private static Set<String> cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add");
+
+  /**
+   * Block up to a specified maximum time until we see agreement on the schema
+   * version in ZooKeeper across all replicas for a collection.
+   */
+  private static void waitForAllReplicasState(String collection,
+                                              ZkController zkController,
+                                              String prop,
+                                              int expectedVersion,
+                                              int maxWaitSecs) {
+    final RTimer timer = new RTimer();
+    // get a list of active replica cores to query for the schema zk version (skipping this core of course)
+    List<PerReplicaCallable> concurrentTasks = new ArrayList<>();
+
+    for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) {
+      PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs);
+      concurrentTasks.add(e);
+    }
+    if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
+
+    log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}",
+        maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection));
+
+    // use an executor service to invoke schema zk version requests in parallel with a max wait time
+    int poolSize = Math.min(concurrentTasks.size(), 10);
+    ExecutorService parallelExecutor =
+        ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor"));
+    try {
+      List<Future<Boolean>> results =
+          parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS);
+
+      // determine whether all replicas have the update
+      List<String> failedList = null; // lazily init'd
+      for (int f = 0; f < results.size(); f++) {
+        Boolean success = false;
+        Future<Boolean> next = results.get(f);
+        if (next.isDone() && !next.isCancelled()) {
+          // looks to have finished, but need to check if it succeeded
+          try {
+            success = next.get();
+          } catch (ExecutionException e) {
+            // shouldn't happen since we checked isCancelled
+          }
+        }
+
+        if (!success) {
+          String coreUrl = concurrentTasks.get(f).coreUrl;
+          log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion);
+          if (failedList == null) failedList = new ArrayList<>();
+          failedList.add(coreUrl);
+        }
+      }
+
+      // if any tasks haven't completed within the specified timeout, it's an error
+      if (failedList != null)
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+            formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
+                failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList));
+
+    } catch (InterruptedException ie) {
+      log.warn(formatString(
+          "Core  was interrupted . trying to set the property {1} to version {2} to propagate to {3} replicas for collection {4}",
+          prop, expectedVersion, concurrentTasks.size(), collection));
+      Thread.currentThread().interrupt();
+    } finally {
+      ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor);
+    }
+
+    log.info("Took {}ms to set the property {} to be of version {} for collection {}",
+        timer.getTime(), prop, expectedVersion, collection);
+  }
+
+  public static List<String> getActiveReplicaCoreUrls(ZkController zkController,
+                                                      String collection) {
+    List<String> activeReplicaCoreUrls = new ArrayList<>();
+    ClusterState clusterState = zkController.getZkStateReader().getClusterState();
+    Set<String> liveNodes = clusterState.getLiveNodes();
+    final DocCollection docCollection = clusterState.getCollectionOrNull(collection);
+    if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) {
+      final Collection<Slice> activeSlices = docCollection.getActiveSlices();
+      for (Slice next : activeSlices) {
+        Map<String, Replica> replicasMap = next.getReplicasMap();
+        if (replicasMap != null) {
+          for (Map.Entry<String, Replica> entry : replicasMap.entrySet()) {
+            Replica replica = entry.getValue();
+            if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
+              activeReplicaCoreUrls.add(replica.getCoreUrl());
+            }
+          }
+        }
+      }
+    }
+    return activeReplicaCoreUrls;
+  }
+
+  @Override
+  public Name getPermissionName(AuthorizationContext ctx) {
+    switch (ctx.getHttpMethod()) {
+      case "GET":
+        return Name.CONFIG_READ_PERM;
+      case "POST":
+        return Name.CONFIG_EDIT_PERM;
+      default:
+        return null;
+    }
+  }
+
+  private static class PerReplicaCallable extends SolrRequest implements Callable<Boolean> {
+    String coreUrl;
+    String prop;
+    int expectedZkVersion;
+    Number remoteVersion = null;
+    int maxWait;
+
+    PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) {
+      super(METHOD.GET, "/config/" + ZNODEVER);
+      this.coreUrl = coreUrl;
+      this.expectedZkVersion = expectedZkVersion;
+      this.prop = prop;
+      this.maxWait = maxWait;
+    }
+
+    @Override
+    public SolrParams getParams() {
+      return new ModifiableSolrParams()
+          .set(prop, expectedZkVersion)
+          .set(CommonParams.WT, CommonParams.JAVABIN);
+    }
+
+    @Override
+    public Boolean call() throws Exception {
+      final RTimer timer = new RTimer();
+      int attempts = 0;
+      try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
+        // eventually, this loop will get killed by the ExecutorService's timeout
+        while (true) {
+          try {
+            long timeElapsed = (long) timer.getTime() / 1000;
+            if (timeElapsed >= maxWait) {
+              return false;
+            }
+            log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait);
+            Thread.sleep(100);
+            NamedList<Object> resp = solr.httpUriRequest(this).future.get();
+            if (resp != null) {
+              Map m = (Map) resp.get(ZNODEVER);
+              if (m != null) {
+                remoteVersion = (Number) m.get(prop);
+                if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) break;
+              }
+            }
+
+            attempts++;
+            log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2}   after {3} attempts", expectedZkVersion, coreUrl, prop, attempts));
+          } catch (Exception e) {
+            if (e instanceof InterruptedException) {
+              break; // stop looping
+            } else {
+              log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e);
+            }
+          }
+        }
+      }
+      return true;
+    }
+
+
+    @Override
+    protected SolrResponse createResponse(SolrClient client) {
+      return null;
+    }
+  }
+
   @Override
   public Collection<Api> getApis() {
     return ApiBag.wrapRequestHandlers(this,
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
index 2259a0e..d7d179a 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
@@ -17,71 +17,40 @@
 
 package org.apache.solr.handler.admin;
 
-import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.EnumMap;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Objects;
 
-import org.apache.solr.api.ApiBag;
-import org.apache.solr.client.solrj.SolrRequest;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.client.solrj.request.CollectionApiMapping;
 import org.apache.solr.client.solrj.request.CollectionApiMapping.CommandMeta;
 import org.apache.solr.client.solrj.request.CollectionApiMapping.Meta;
 import org.apache.solr.client.solrj.request.CollectionApiMapping.V2EndPoint;
-import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.Callable;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.cloud.ClusterProperties;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.CommandOperation;
-import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.ConfigOverlay;
-import org.apache.solr.core.CoreContainer;
-import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.RuntimeLib;
-import org.apache.solr.handler.SolrConfigHandler;
 import org.apache.solr.handler.admin.CollectionsHandler.CollectionOperation;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.util.RTimer;
-import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static java.util.Arrays.asList;
-import static java.util.Collections.singletonList;
-import static org.apache.solr.common.util.CommandOperation.captureErrors;
-import static org.apache.solr.common.util.StrUtils.formatString;
-import static org.apache.solr.core.RuntimeLib.SHA256;
-
 public class CollectionHandlerApi extends BaseHandlerApiSupport {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   final CollectionsHandler handler;
   static Collection<ApiCommand> apiCommands = createCollMapping();
 
-  public CollectionHandlerApi(CollectionsHandler handler) {
-    this.handler = handler;
-  }
-
   private static Collection<ApiCommand> createCollMapping() {
-    Map<Meta, ApiCommand> apiMapping = new EnumMap<>(Meta.class);
+    Map<Meta, ApiCommand> result = new EnumMap<>(Meta.class);
 
     for (Meta meta : Meta.values()) {
       for (CollectionOperation op : CollectionOperation.values()) {
         if (op.action == meta.action) {
-          apiMapping.put(meta, new ApiCommand() {
+          result.put(meta, new ApiCommand() {
             @Override
             public CommandMeta meta() {
               return meta;
@@ -96,209 +65,30 @@
       }
     }
     //The following APIs have only V2 implementations
-    addApi(apiMapping, Meta.GET_NODES, CollectionHandlerApi::getNodes);
-    addApi(apiMapping, Meta.SET_CLUSTER_PROPERTY_OBJ, CollectionHandlerApi::setClusterObj);
-    addApi(apiMapping, Meta.ADD_PACKAGE, wrap(CollectionHandlerApi::addUpdatePackage));
-    addApi(apiMapping, Meta.UPDATE_PACKAGE, wrap(CollectionHandlerApi::addUpdatePackage));
-    addApi(apiMapping, Meta.DELETE_RUNTIME_LIB, wrap(CollectionHandlerApi::deletePackage));
-    addApi(apiMapping, Meta.ADD_REQ_HANDLER, wrap(CollectionHandlerApi::addRequestHandler));
-    addApi(apiMapping, Meta.DELETE_REQ_HANDLER, wrap(CollectionHandlerApi::deleteReqHandler));
+    addApi(result, Meta.GET_NODES, params -> params.rsp.add("nodes", ((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes()));
+    addApi(result, Meta.SET_CLUSTER_PROPERTY_OBJ, params -> {
+      List<CommandOperation> commands = params.req.getCommands(true);
+      if (commands == null || commands.isEmpty()) throw new RuntimeException("Empty commands");
+      ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
+
+      try {
+        clusterProperties.setClusterProperties(commands.get(0).getDataMap());
+      } catch (Exception e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error in API", e);
+      }
+    });
 
     for (Meta meta : Meta.values()) {
-      if (apiMapping.get(meta) == null) {
+      if (result.get(meta) == null) {
         log.error("ERROR_INIT. No corresponding API implementation for : " + meta.commandName);
       }
     }
 
-    return apiMapping.values();
+    return result.values();
   }
 
-  static Command wrap(Command cmd) {
-    return info -> {
-      CoreContainer cc = ((CollectionHandlerApi) info.apiHandler).handler.coreContainer;
-      boolean modified = cmd.call(info);
-      if (modified) {
-        Stat stat = new Stat();
-        Map<String, Object> clusterProperties = new ClusterProperties(cc.getZkController().getZkClient()).getClusterProperties(stat);
-        try {
-          cc.getPackageManager().onChange(clusterProperties);
-        } catch (SolrException e) {
-          log.error("error executing command : " + info.op.jsonStr(), e);
-          throw e;
-        } catch (Exception e) {
-          log.error("error executing command : " + info.op.jsonStr(), e);
-          throw new SolrException(ErrorCode.SERVER_ERROR, "error executing command : ", e);
-        }
-        log.info("current version of clusterprops.json is {} , trying to get every node to update ", stat.getVersion());
-        log.debug("The current clusterprops.json:  {}", clusterProperties);
-        ((CollectionHandlerApi) info.apiHandler).waitForStateSync(stat.getVersion(), cc);
-
-      }
-      if (info.op != null && info.op.hasError()) {
-        log.error("Error in running command {} , current clusterprops.json : {}", Utils.toJSONString(info.op), Utils.toJSONString(new ClusterProperties(cc.getZkController().getZkClient()).getClusterProperties()));
-      }
-      return modified;
-
-    };
-  }
-
-  private static boolean getNodes(ApiInfo params) {
-    params.rsp.add("nodes", ((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes());
-    return false;
-  }
-
-  private static boolean deleteReqHandler(ApiInfo params) throws Exception {
-    String name = params.op.getStr("");
-    ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
-    Map<String, Object> map = clusterProperties.getClusterProperties();
-    if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) == null) {
-      params.op.addError("NO such requestHandler with name :");
-      return false;
-    }
-    Map m = new LinkedHashMap();
-    Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), null, true);
-    clusterProperties.setClusterProperties(m);
-    return true;
-  }
-
-  private static boolean addRequestHandler(ApiInfo params) throws Exception {
-    Map data = params.op.getDataMap();
-    String name = (String) data.get("name");
-    CoreContainer coreContainer = ((CollectionHandlerApi) params.apiHandler).handler.coreContainer;
-    ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
-    Map<String, Object> map = clusterProperties.getClusterProperties();
-    if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) != null) {
-      params.op.addError("A requestHandler already exists with the said name");
-      return false;
-    }
-    Map m = new LinkedHashMap();
-    Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), data, true);
-    clusterProperties.setClusterProperties(m);
-    return true;
-  }
-
-  private static boolean deletePackage(ApiInfo params) throws Exception {
-    if (!RuntimeLib.isEnabled()) {
-      params.op.addError("node not started with enable.runtime.lib=true");
-      return false;
-    }
-    String name = params.op.getStr(CommandOperation.ROOT_OBJ);
-    ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
-    Map<String, Object> props = clusterProperties.getClusterProperties();
-    List<String> pathToLib = asList(CommonParams.PACKAGE, name);
-    Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib);
-    if (existing == null) {
-      params.op.addError("No such runtimeLib : " + name);
-      return false;
-    }
-    Map delta = new LinkedHashMap();
-    Utils.setObjectByPath(delta, pathToLib, null, true);
-    clusterProperties.setClusterProperties(delta);
-    return true;
-  }
-
-  private static boolean addUpdatePackage(ApiInfo params) throws Exception {
-    if (!RuntimeLib.isEnabled()) {
-      params.op.addError("node not started with enable.runtime.lib=true");
-      return false;
-    }
-
-    CollectionHandlerApi handler = (CollectionHandlerApi) params.apiHandler;
-    RuntimeLib lib = new RuntimeLib(handler.handler.coreContainer);
-    CommandOperation op = params.op;
-    String name = op.getStr("name");
-    ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
-    Map<String, Object> props = clusterProperties.getClusterProperties();
-    List<String> pathToLib = asList(CommonParams.PACKAGE, name);
-    Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib);
-    if (Meta.ADD_PACKAGE.commandName.equals(op.name)) {
-      if (existing != null) {
-        op.addError(StrUtils.formatString("The jar with a name ''{0}'' already exists ", name));
-        return false;
-      }
-    } else {
-      if (existing == null) {
-        op.addError(StrUtils.formatString("The jar with a name ''{0}'' does not exist", name));
-        return false;
-      }
-      if (Objects.equals(existing.get(SHA256), op.getDataMap().get(SHA256))) {
-        op.addError("Trying to update a jar with the same sha256");
-        return false;
-      }
-    }
-    try {
-      lib.init(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()));
-    } catch (SolrException e) {
-      log.error("Error loading runtimelib ", e);
-      op.addError(e.getMessage());
-      return false;
-    }
-
-    Map delta = new LinkedHashMap();
-    Utils.setObjectByPath(delta, pathToLib, op.getDataMap(), true);
-    clusterProperties.setClusterProperties(delta);
-    return true;
-
-  }
-
-  private static boolean setClusterObj(ApiInfo params) {
-    ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
-    try {
-      clusterProperties.setClusterProperties(params.op.getDataMap());
-    } catch (Exception e) {
-      throw new SolrException(ErrorCode.SERVER_ERROR, "Error in API", e);
-    }
-    return false;
-  }
-
-  private void waitForStateSync(int expectedVersion, CoreContainer coreContainer) {
-    final RTimer timer = new RTimer();
-    int waitTimeSecs = 30;
-    // get a list of active replica cores to query for the schema zk version (skipping this core of course)
-    List<PerNodeCallable> concurrentTasks = new ArrayList<>();
-
-    ZkStateReader zkStateReader = coreContainer.getZkController().getZkStateReader();
-    for (String nodeName : zkStateReader.getClusterState().getLiveNodes()) {
-      PerNodeCallable e = new PerNodeCallable(zkStateReader.getBaseUrlForNodeName(nodeName), expectedVersion, waitTimeSecs);
-      concurrentTasks.add(e);
-    }
-    if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
-
-    log.info("Waiting up to {} secs for {} nodes to update clusterprops to be of version {} ",
-        waitTimeSecs, concurrentTasks.size(), expectedVersion);
-    SolrConfigHandler.execInparallel(concurrentTasks, parallelExecutor -> {
-      try {
-        List<String> failedList = SolrConfigHandler.executeAll(expectedVersion, waitTimeSecs, concurrentTasks, parallelExecutor);
-
-        // if any tasks haven't completed within the specified timeout, it's an error
-        if (failedList != null)
-          throw new SolrException(ErrorCode.SERVER_ERROR,
-              formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
-                  failedList.size(), concurrentTasks.size() + 1, expectedVersion, 30, failedList));
-      } catch (InterruptedException e) {
-        log.warn(formatString(
-            "Request was interrupted . trying to set the clusterprops to version {0} to propagate to {1} nodes ",
-            expectedVersion, concurrentTasks.size()));
-        Thread.currentThread().interrupt();
-
-      }
-    });
-
-    log.info("Took {}ms to update the clusterprops to be of version {}  on {} nodes",
-        timer.getTime(), expectedVersion, concurrentTasks.size());
-
-  }
-
-  interface Command {
-
-
-    boolean call(ApiInfo info) throws Exception;
-
-  }
-
-  private static void addApi(Map<Meta, ApiCommand> mapping, Meta metaInfo, Command fun) {
-    mapping.put(metaInfo, new ApiCommand() {
-
+  private static void addApi(Map<Meta, ApiCommand> result, Meta metaInfo, Callable<ApiParams> fun) {
+    result.put(metaInfo, new ApiCommand() {
       @Override
       public CommandMeta meta() {
         return metaInfo;
@@ -306,25 +96,25 @@
 
       @Override
       public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
-        CommandOperation op = null;
-        if (metaInfo.method == SolrRequest.METHOD.POST) {
-          List<CommandOperation> commands = req.getCommands(true);
-          if (commands == null || commands.size() != 1)
-            throw new SolrException(ErrorCode.BAD_REQUEST, "should have exactly one command");
-          op = commands.get(0);
-        }
-
-        fun.call(new ApiInfo(req, rsp, apiHandler, op));
-        if (op != null && op.hasError()) {
-          throw new ApiBag.ExceptionWithErrObject(ErrorCode.BAD_REQUEST, "error processing commands", captureErrors(singletonList(op)));
-        }
+        fun.call(new ApiParams(req, rsp, apiHandler));
       }
     });
   }
 
-  @Override
-  protected List<V2EndPoint> getEndPoints() {
-    return asList(CollectionApiMapping.EndPoint.values());
+  static class ApiParams {
+    final SolrQueryRequest req;
+    final SolrQueryResponse rsp;
+    final BaseHandlerApiSupport apiHandler;
+
+    ApiParams(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) {
+      this.req = req;
+      this.rsp = rsp;
+      this.apiHandler = apiHandler;
+    }
+  }
+
+  public CollectionHandlerApi(CollectionsHandler handler) {
+    this.handler = handler;
   }
 
   @Override
@@ -332,46 +122,9 @@
     return apiCommands;
   }
 
-  public static class PerNodeCallable extends SolrConfigHandler.PerReplicaCallable {
-
-    static final List<String> path = Arrays.asList("metadata", CommonParams.VERSION);
-
-    PerNodeCallable(String baseUrl, int expectedversion, int waitTime) {
-      super(baseUrl, ConfigOverlay.ZNODEVER, expectedversion, waitTime);
-    }
-
-    @Override
-    protected boolean verifyResponse(MapWriter mw, int attempts) {
-      remoteVersion = (Number) mw._get(path, -1);
-      if (remoteVersion.intValue() >= expectedZkVersion) return true;
-      log.info(formatString("Could not get expectedVersion {0} from {1} , remote val= {2}   after {3} attempts", expectedZkVersion, coreUrl, remoteVersion, attempts));
-
-      return false;
-    }
-
-    public String getPath() {
-      return "/____v2/node/ext";
-    }
-  }
-
-  static class ApiInfo {
-    final SolrQueryRequest req;
-    final SolrQueryResponse rsp;
-    final BaseHandlerApiSupport apiHandler;
-    final CommandOperation op;
-
-    ApiInfo(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler, CommandOperation op) {
-      this.req = req;
-      this.rsp = rsp;
-      this.apiHandler = apiHandler;
-      this.op = op;
-    }
-  }
-
-  public static void postBlob(String baseUrl, ByteBuffer buf) throws IOException {
-    try(HttpSolrClient client = new HttpSolrClient.Builder(baseUrl+"/____v2/node/blob" ).build()){
-
-    }
+  @Override
+  protected List<V2EndPoint> getEndPoints() {
+    return Arrays.asList(CollectionApiMapping.EndPoint.values());
   }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 445c0c5..5843a94 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -149,10 +149,10 @@
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
 import static org.apache.solr.common.params.CommonAdminParams.IN_PLACE_MOVE;
 import static org.apache.solr.common.params.CommonAdminParams.NUM_SUB_SHARDS;
-import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX;
 import static org.apache.solr.common.params.CommonAdminParams.SPLIT_FUZZ;
 import static org.apache.solr.common.params.CommonAdminParams.SPLIT_METHOD;
 import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE;
+import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX;
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.params.CommonParams.TIMING;
 import static org.apache.solr.common.params.CommonParams.VALUE_LONG;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java
index c9768a4..7d96494 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/DebugComponent.java
@@ -37,7 +37,9 @@
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.DocList;
 import org.apache.solr.search.QueryParsing;
+import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.search.facet.FacetDebugInfo;
+import org.apache.solr.search.stats.StatsCache;
 import org.apache.solr.util.SolrPluginUtils;
 
 import static org.apache.solr.common.params.CommonParams.FQ;
@@ -74,7 +76,7 @@
       map.put(ResponseBuilder.STAGE_DONE, "DONE");
       stages = Collections.unmodifiableMap(map);
   }
-  
+
   @Override
   public void prepare(ResponseBuilder rb) throws IOException
   {
@@ -89,6 +91,9 @@
   public void process(ResponseBuilder rb) throws IOException
   {
     if( rb.isDebug() ) {
+      SolrQueryRequest req = rb.req;
+      StatsCache statsCache = req.getSearcher().getStatsCache();
+      req.getContext().put(SolrIndexSearcher.STATS_SOURCE, statsCache.get(req));
       DocList results = null;
       //some internal grouping requests won't have results value set
       if(rb.getResults() != null) {
@@ -173,6 +178,11 @@
     // Turn on debug to get explain only when retrieving fields
     if ((sreq.purpose & ShardRequest.PURPOSE_GET_FIELDS) != 0) {
       sreq.purpose |= ShardRequest.PURPOSE_GET_DEBUG;
+      // always distribute the latest version of global stats
+      sreq.purpose |= ShardRequest.PURPOSE_SET_TERM_STATS;
+      StatsCache statsCache = rb.req.getSearcher().getStatsCache();
+      statsCache.sendGlobalStats(rb, sreq);
+
       if (rb.isDebugAll()) {
         sreq.params.set(CommonParams.DEBUG_QUERY, "true");
       } else {
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
index 98c5b47..9a34014 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryComponent.java
@@ -330,11 +330,11 @@
       return;
     }
 
-    StatsCache statsCache = req.getCore().getStatsCache();
+    SolrIndexSearcher searcher = req.getSearcher();
+    StatsCache statsCache = searcher.getStatsCache();
     
     int purpose = params.getInt(ShardParams.SHARDS_PURPOSE, ShardRequest.PURPOSE_GET_TOP_IDS);
     if ((purpose & ShardRequest.PURPOSE_GET_TERM_STATS) != 0) {
-      SolrIndexSearcher searcher = req.getSearcher();
       statsCache.returnLocalStats(rb, searcher);
       return;
     }
@@ -686,7 +686,7 @@
   }
 
   protected void createDistributedStats(ResponseBuilder rb) {
-    StatsCache cache = rb.req.getCore().getStatsCache();
+    StatsCache cache = rb.req.getSearcher().getStatsCache();
     if ( (rb.getFieldFlags() & SolrIndexSearcher.GET_SCORES)!=0 || rb.getSortSpec().includesScore()) {
       ShardRequest sreq = cache.retrieveStatsRequest(rb);
       if (sreq != null) {
@@ -696,7 +696,7 @@
   }
 
   protected void updateStats(ResponseBuilder rb, ShardRequest sreq) {
-    StatsCache cache = rb.req.getCore().getStatsCache();
+    StatsCache cache = rb.req.getSearcher().getStatsCache();
     cache.mergeToGlobalStats(rb.req, sreq.responses);
   }
 
@@ -776,8 +776,9 @@
 
     // TODO: should this really sendGlobalDfs if just includeScore?
 
-    if (shardQueryIncludeScore) {
-      StatsCache statsCache = rb.req.getCore().getStatsCache();
+    if (shardQueryIncludeScore || rb.isDebug()) {
+      StatsCache statsCache = rb.req.getSearcher().getStatsCache();
+      sreq.purpose |= ShardRequest.PURPOSE_SET_TERM_STATS;
       statsCache.sendGlobalStats(rb, sreq);
     }
 
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
index 203d8f7..4f4f232 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
@@ -79,8 +79,8 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.XmlConfigFile;
+import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.transform.ElevatedMarkerFactory;
 import org.apache.solr.response.transform.ExcludedMarkerFactory;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
index 61b1013..40af722 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/ResponseBuilder.java
@@ -166,8 +166,6 @@
     }
   }
 
-  public GlobalCollectionStat globalCollectionStat;
-
   public Map<Object, ShardDoc> resultIds;
   // Maps uniqueKeyValue to ShardDoc, which may be used to
   // determine order of the doc or uniqueKey in the final
@@ -417,18 +415,6 @@
     this.timer = timer;
   }
 
-
-  public static class GlobalCollectionStat {
-    public final long numDocs;
-
-    public final Map<String, Long> dfMap;
-
-    public GlobalCollectionStat(int numDocs, Map<String, Long> dfMap) {
-      this.numDocs = numDocs;
-      this.dfMap = dfMap;
-    }
-  }
-
   /**
    * Creates a SolrIndexSearcher.QueryCommand from this
    * ResponseBuilder.  TimeAllowed is left unset.
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
index 3ede10d..2d6fdb1 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
@@ -90,7 +90,7 @@
 
   protected SolrMetricManager metricManager;
   protected String registryName;
-
+  
   /**
    * Key is the dictionary name used in SolrConfig, value is the corresponding {@link SolrSuggester}
    */
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index f029e60..7d2877d 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -690,13 +690,13 @@
     }
   }
 
-    /**
-     * This is a wrapper for {@link Gauge} metrics, which are usually implemented as
-     * lambdas that often keep a reference to their parent instance. In order to make sure that
-     * all such metrics are removed when their parent instance is removed / closed the
-     * metric is associated with an instance tag, which can be used then to remove
-     * wrappers with the matching tag using {@link #unregisterGauges(String, String)}.
-     */
+  /**
+   * This is a wrapper for {@link Gauge} metrics, which are usually implemented as
+   * lambdas that often keep a reference to their parent instance. In order to make sure that
+   * all such metrics are removed when their parent instance is removed / closed the
+   * metric is associated with an instance tag, which can be used then to remove
+   * wrappers with the matching tag using {@link #unregisterGauges(String, String)}.
+   */
   public static class GaugeWrapper<T> implements Gauge<T> {
     private final Gauge<T> gauge;
     private final String tag;
@@ -736,7 +736,7 @@
         removed.incrementAndGet();
         return true;
       } else {
-      return false;
+        return false;
       }
     });
     return removed.get();
@@ -774,7 +774,6 @@
       sb.append(name);
       return sb.toString();
     }
-
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
index deb2b18..265d7e4 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
@@ -17,19 +17,19 @@
 package org.apache.solr.metrics;
 
 /**
- * Used by objects that expose metrics through {@link SolrCoreMetricManager}.
+ * Used by objects that expose metrics through {@link SolrMetricManager}.
  */
 public interface SolrMetricProducer {
 
   /**
    * Initializes metrics specific to this producer
-   * @param manager  an instance of {@link SolrMetricManager}
+   * @param manager an instance of {@link SolrMetricManager}
    * @param registry registry name where metrics are registered
-   * @param tag      a symbolic tag that represents this instance of the producer,
-   *                 or a group of related instances that have the same life-cycle. This tag is
-   *                 used when managing life-cycle of some metrics and is set when
-   *                 {@link #initializeMetrics(SolrMetricManager, String, String, String)} is called.
-   * @param scope    scope of the metrics (eg. handler name) to separate metrics of
+   * @param tag a symbolic tag that represents this instance of the producer,
+   * or a group of related instances that have the same life-cycle. This tag is
+   * used when managing life-cycle of some metrics and is set when
+   * {@link #initializeMetrics(SolrMetricManager, String, String, String)} is called.
+   * @param scope scope of the metrics (eg. handler name) to separate metrics of
    */
   void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope);
 }
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java
index 468ba60..54b4530 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/SolrJmxReporter.java
@@ -70,7 +70,7 @@
   protected synchronized void doInit() {
     if (serviceUrl != null && agentId != null) {
       mBeanServer = JmxUtil.findFirstMBeanServer();
-      log.warn("No more than one of serviceUrl({}) and agentId({}) should be configured, using first MBeanServer instead of configuration.",
+      log.warn("No more than one of serviceUrl({}) and agentId({}) should be configured, using first MBeanServer {} instead of configuration.",
           serviceUrl, agentId, mBeanServer);
     } else if (serviceUrl != null) {
       // reuse existing services
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java
index 189d14d..56f295f 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/jmx/JmxMetricsReporter.java
@@ -28,7 +28,6 @@
 import javax.management.QueryExp;
 import java.io.Closeable;
 import java.lang.invoke.MethodHandles;
-import java.lang.management.ManagementFactory;
 import java.util.HashMap;
 import java.util.Locale;
 import java.util.Map;
@@ -157,9 +156,6 @@
     }
 
     public JmxMetricsReporter build() {
-      if (mBeanServer == null) {
-        mBeanServer = ManagementFactory.getPlatformMBeanServer();
-      }
       if (tag == null) {
         tag = Integer.toHexString(this.hashCode());
       }
diff --git a/solr/core/src/java/org/apache/solr/request/IntervalFacets.java b/solr/core/src/java/org/apache/solr/request/IntervalFacets.java
index 6e492e7..188c07a 100644
--- a/solr/core/src/java/org/apache/solr/request/IntervalFacets.java
+++ b/solr/core/src/java/org/apache/solr/request/IntervalFacets.java
@@ -558,7 +558,7 @@
       } else if (intervalStr.charAt(lastNdx) == ']') {
         endOpen = false;
       } else {
-        throw new SyntaxError("Invalid end character " + intervalStr.charAt(0) + " in facet interval " + intervalStr);
+        throw new SyntaxError("Invalid end character " + intervalStr.charAt(lastNdx) + " in facet interval " + intervalStr);
       }
 
       StringBuilder startStr = new StringBuilder(lastNdx);
diff --git a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java
index fc67781..b9c73bc 100644
--- a/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java
+++ b/solr/core/src/java/org/apache/solr/request/json/ObjectUtil.java
@@ -22,6 +22,8 @@
 import java.util.List;
 import java.util.Map;
 
+import org.apache.solr.common.SolrException;
+
 public class ObjectUtil {
 
   public static class ConflictHandler {
@@ -103,10 +105,14 @@
         // OK, now we need to merge values
         handler.handleConflict(outer, path, key, val, existingVal);
       }
-    } else {
+    } else if (val instanceof Map) {
       // merging at top level...
       Map<String,Object> newMap = (Map<String,Object>)val;
       handler.mergeMap(outer, newMap, path);
+    } else {
+      // todo: find a way to return query param in error message
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Expected JSON Object but got " + val.getClass().getSimpleName() + "=" + val);
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
index 6370bef..e1ddfcf 100644
--- a/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
+++ b/solr/core/src/java/org/apache/solr/request/json/RequestUtil.java
@@ -270,6 +270,8 @@
           ObjectUtil.mergeObjects(json, path, o, handler);
         }
       }
+    } catch (JSONParser.ParseException e ) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
     } catch (IOException e) {
       // impossible
     }
diff --git a/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java b/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java
index 1af6634..82a5301 100644
--- a/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java
+++ b/solr/core/src/java/org/apache/solr/response/SchemaXmlWriter.java
@@ -219,6 +219,8 @@
         if ( ! "solr.TokenizerChain".equals(analyzerProperties.getVal(i))) {
           writeAttr(name, analyzerProperties.getVal(i).toString());
         }
+      } else if (name.equals(IndexSchema.LUCENE_MATCH_VERSION_PARAM)) {
+        writeAttr(name, analyzerProperties.getVal(i).toString());
       }
     }
     boolean isEmptyTag
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
index f437296..4abeedd 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
@@ -16,11 +16,12 @@
  */
 package org.apache.solr.rest.schema;
 
+import java.util.List;
+import java.util.Map;
+
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
-import java.util.List;
-import java.util.Map;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
diff --git a/solr/core/src/java/org/apache/solr/schema/BoolField.java b/solr/core/src/java/org/apache/solr/schema/BoolField.java
index 8cad743..5fb2d85 100644
--- a/solr/core/src/java/org/apache/solr/schema/BoolField.java
+++ b/solr/core/src/java/org/apache/solr/schema/BoolField.java
@@ -45,6 +45,7 @@
 import org.apache.solr.search.QParser;
 import org.apache.solr.search.function.OrdFieldSource;
 import org.apache.solr.uninverting.UninvertingReader.Type;
+
 /**
  *
  */
@@ -260,8 +261,8 @@
           return -1;
         }
       }
+
       @Override
-      
       public boolean boolVal(int doc) throws IOException {
         return getOrdForDoc(doc) == trueOrd;
       }
@@ -298,9 +299,10 @@
   }
 
   private static final int hcode = OrdFieldSource.class.hashCode();
+
   @Override
   public int hashCode() {
     return hcode + field.hashCode();
-  };
+  }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java
index c8da0bb..781e199 100644
--- a/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java
+++ b/solr/core/src/java/org/apache/solr/schema/FieldTypePluginLoader.java
@@ -234,10 +234,9 @@
           schema.getDefaultLuceneMatchVersion() :
           SolrConfig.parseLuceneVersionString(matchVersionStr);
         if (luceneMatchVersion == null) {
-          throw new SolrException
-            ( SolrException.ErrorCode.SERVER_ERROR,
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
               "Configuration Error: Analyzer '" + clazz.getName() +
-              "' needs a 'luceneMatchVersion' parameter");
+                  "' needs a '" + IndexSchema.LUCENE_MATCH_VERSION_PARAM + "' parameter");
         }
         analyzer.setVersion(luceneMatchVersion);
         return analyzer;
diff --git a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
index 980be8d..02168f1 100644
--- a/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/IndexSchema.java
@@ -138,7 +138,7 @@
 
   protected List<SchemaField> fieldsWithDefaultValue = new ArrayList<>();
   protected Collection<SchemaField> requiredFields = new HashSet<>();
-  protected volatile DynamicField[] dynamicFields;
+  protected DynamicField[] dynamicFields = new DynamicField[] {};
   public DynamicField[] getDynamicFields() { return dynamicFields; }
 
   protected Map<String, SchemaField> dynamicFieldCache = new ConcurrentHashMap<>();
@@ -151,7 +151,7 @@
   protected Map<String, List<CopyField>> copyFieldsMap = new HashMap<>();
   public Map<String,List<CopyField>> getCopyFieldsMap() { return Collections.unmodifiableMap(copyFieldsMap); }
 
-  protected DynamicCopy[] dynamicCopyFields;
+  protected DynamicCopy[] dynamicCopyFields = new DynamicCopy[] {};
   public DynamicCopy[] getDynamicCopyFields() { return dynamicCopyFields; }
 
   private Map<FieldType, PayloadDecoder> decoders = new HashMap<>();  // cache to avoid scanning token filters repeatedly, unnecessarily
@@ -962,18 +962,12 @@
   private void incrementCopyFieldTargetCount(SchemaField dest) {
     copyFieldTargetCounts.put(dest, copyFieldTargetCounts.containsKey(dest) ? copyFieldTargetCounts.get(dest) + 1 : 1);
   }
-  
-  private void registerDynamicCopyField( DynamicCopy dcopy ) {
-    if( dynamicCopyFields == null ) {
-      dynamicCopyFields = new DynamicCopy[] {dcopy};
-    }
-    else {
-      DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length+1];
-      System.arraycopy(dynamicCopyFields,0,temp,0,dynamicCopyFields.length);
-      temp[temp.length -1] = dcopy;
-      dynamicCopyFields = temp;
-    }
-    log.trace("Dynamic Copy Field:" + dcopy);
+
+  private void registerDynamicCopyField(DynamicCopy dcopy) {
+    DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length + 1];
+    System.arraycopy(dynamicCopyFields, 0, temp, 0, dynamicCopyFields.length);
+    temp[temp.length - 1] = dcopy;
+    dynamicCopyFields = temp;
   }
 
   static SimilarityFactory readSimilarity(SolrResourceLoader loader, Node node) {
@@ -1337,11 +1331,9 @@
         }
       }
     }
-    if (null != dynamicCopyFields) {
-      for (DynamicCopy dynamicCopy : dynamicCopyFields) {
-        if (dynamicCopy.getDestFieldName().equals(destField)) {
-          fieldNames.add(dynamicCopy.getRegex());
-        }
+    for (DynamicCopy dynamicCopy : dynamicCopyFields) {
+      if (dynamicCopy.getDestFieldName().equals(destField)) {
+        fieldNames.add(dynamicCopy.getRegex());
       }
     }
     return fieldNames;
@@ -1356,11 +1348,9 @@
   // This is useful when we need the maxSize param of each CopyField
   public List<CopyField> getCopyFieldsList(final String sourceField){
     final List<CopyField> result = new ArrayList<>();
-    if (null != dynamicCopyFields) {
-      for (DynamicCopy dynamicCopy : dynamicCopyFields) {
-        if (dynamicCopy.matches(sourceField)) {
-          result.add(new CopyField(getField(sourceField), dynamicCopy.getTargetField(sourceField), dynamicCopy.maxChars));
-        }
+    for (DynamicCopy dynamicCopy : dynamicCopyFields) {
+      if (dynamicCopy.matches(sourceField)) {
+        result.add(new CopyField(getField(sourceField), dynamicCopy.getTargetField(sourceField), dynamicCopy.maxChars));
       }
     }
     List<CopyField> fixedCopyFields = copyFieldsMap.get(sourceField);
@@ -1556,48 +1546,46 @@
         }
       }
     }
-    if (null != dynamicCopyFields) {
-      for (IndexSchema.DynamicCopy dynamicCopy : dynamicCopyFields) {
-        final String source = dynamicCopy.getRegex();
-        final String destination = dynamicCopy.getDestFieldName();
-        if ((null == requestedSourceFields || requestedSourceFields.contains(source))
-            && (null == requestedDestinationFields || requestedDestinationFields.contains(destination))) {
-          SimpleOrderedMap<Object> dynamicCopyProps = new SimpleOrderedMap<>();
+    for (IndexSchema.DynamicCopy dynamicCopy : dynamicCopyFields) {
+      final String source = dynamicCopy.getRegex();
+      final String destination = dynamicCopy.getDestFieldName();
+      if ((null == requestedSourceFields || requestedSourceFields.contains(source))
+          && (null == requestedDestinationFields || requestedDestinationFields.contains(destination))) {
+        SimpleOrderedMap<Object> dynamicCopyProps = new SimpleOrderedMap<>();
 
-          dynamicCopyProps.add(SOURCE, dynamicCopy.getRegex());
-          if (showDetails) {
-            IndexSchema.DynamicField sourceDynamicBase = dynamicCopy.getSourceDynamicBase();
-            if (null != sourceDynamicBase) {
-              dynamicCopyProps.add(SOURCE_DYNAMIC_BASE, sourceDynamicBase.getRegex());
-            } else if (source.contains("*")) {
-              List<String> sourceExplicitFields = new ArrayList<>();
-              Pattern pattern = Pattern.compile(source.replace("*", ".*"));   // glob->regex
-              for (String field : fields.keySet()) {
-                if (pattern.matcher(field).matches()) {
-                  sourceExplicitFields.add(field);
-                }
-              }
-              if (sourceExplicitFields.size() > 0) {
-                Collections.sort(sourceExplicitFields);
-                dynamicCopyProps.add(SOURCE_EXPLICIT_FIELDS, sourceExplicitFields);
+        dynamicCopyProps.add(SOURCE, dynamicCopy.getRegex());
+        if (showDetails) {
+          IndexSchema.DynamicField sourceDynamicBase = dynamicCopy.getSourceDynamicBase();
+          if (null != sourceDynamicBase) {
+            dynamicCopyProps.add(SOURCE_DYNAMIC_BASE, sourceDynamicBase.getRegex());
+          } else if (source.contains("*")) {
+            List<String> sourceExplicitFields = new ArrayList<>();
+            Pattern pattern = Pattern.compile(source.replace("*", ".*")); // glob->regex
+            for (String field : fields.keySet()) {
+              if (pattern.matcher(field).matches()) {
+                sourceExplicitFields.add(field);
               }
             }
-          }
-
-          dynamicCopyProps.add(DESTINATION, dynamicCopy.getDestFieldName());
-          if (showDetails) {
-            IndexSchema.DynamicField destDynamicBase = dynamicCopy.getDestDynamicBase();
-            if (null != destDynamicBase) {
-              dynamicCopyProps.add(DESTINATION_DYNAMIC_BASE, destDynamicBase.getRegex());
+            if (sourceExplicitFields.size() > 0) {
+              Collections.sort(sourceExplicitFields);
+              dynamicCopyProps.add(SOURCE_EXPLICIT_FIELDS, sourceExplicitFields);
             }
           }
-
-          if (0 != dynamicCopy.getMaxChars()) {
-            dynamicCopyProps.add(MAX_CHARS, dynamicCopy.getMaxChars());
-          }
-
-          copyFieldProperties.add(dynamicCopyProps);
         }
+
+        dynamicCopyProps.add(DESTINATION, dynamicCopy.getDestFieldName());
+        if (showDetails) {
+          IndexSchema.DynamicField destDynamicBase = dynamicCopy.getDestDynamicBase();
+          if (null != destDynamicBase) {
+            dynamicCopyProps.add(DESTINATION_DYNAMIC_BASE, destDynamicBase.getRegex());
+          }
+        }
+
+        if (0 != dynamicCopy.getMaxChars()) {
+          dynamicCopyProps.add(MAX_CHARS, dynamicCopy.getMaxChars());
+        }
+
+        copyFieldProperties.add(dynamicCopyProps);
       }
     }
     return copyFieldProperties;
diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
index c7fbf27..57b0c90 100644
--- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
@@ -81,7 +81,7 @@
 /** Solr-managed schema - non-user-editable, but can be mutable via internal and external REST API requests. */
 public final class ManagedIndexSchema extends IndexSchema {
 
-  private boolean isMutable = false;
+  private final boolean isMutable;
 
   @Override public boolean isMutable() { return isMutable; }
 
@@ -654,7 +654,7 @@
           System.arraycopy(newSchema.dynamicFields, dfPos + 1, temp, dfPos, newSchema.dynamicFields.length - dfPos - 1);
           newSchema.dynamicFields = temp;
         } else {
-          newSchema.dynamicFields = new DynamicField[0];
+          newSchema.dynamicFields = new DynamicField[] {};
         }
       }
       // After removing all dynamic fields, rebuild affected dynamic copy fields.
@@ -840,26 +840,24 @@
     boolean found = false;
 
     if (null == destSchemaField || null == sourceSchemaField) { // Must be dynamic copy field
-      if (dynamicCopyFields != null) {
-        for (int i = 0 ; i < dynamicCopyFields.length ; ++i) {
-          DynamicCopy dynamicCopy = dynamicCopyFields[i];
-          if (source.equals(dynamicCopy.getRegex()) && dest.equals(dynamicCopy.getDestFieldName())) {
-            found = true;
-            SchemaField destinationPrototype = dynamicCopy.getDestination().getPrototype();
-            if (copyFieldTargetCounts.containsKey(destinationPrototype)) {
-              decrementCopyFieldTargetCount(destinationPrototype);
-            }
-            if (dynamicCopyFields.length > 1) {
-              DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length - 1];
-              System.arraycopy(dynamicCopyFields, 0, temp, 0, i);
-              // skip over the dynamic copy field to be deleted
-              System.arraycopy(dynamicCopyFields, i + 1, temp, i, dynamicCopyFields.length - i - 1);
-              dynamicCopyFields = temp;
-            } else {
-              dynamicCopyFields = null;
-            }
-            break;
+      for (int i = 0; i < dynamicCopyFields.length; ++i) {
+        DynamicCopy dynamicCopy = dynamicCopyFields[i];
+        if (source.equals(dynamicCopy.getRegex()) && dest.equals(dynamicCopy.getDestFieldName())) {
+          found = true;
+          SchemaField destinationPrototype = dynamicCopy.getDestination().getPrototype();
+          if (copyFieldTargetCounts.containsKey(destinationPrototype)) {
+            decrementCopyFieldTargetCount(destinationPrototype);
           }
+          if (dynamicCopyFields.length > 1) {
+            DynamicCopy[] temp = new DynamicCopy[dynamicCopyFields.length - 1];
+            System.arraycopy(dynamicCopyFields, 0, temp, 0, i);
+            // skip over the dynamic copy field to be deleted
+            System.arraycopy(dynamicCopyFields, i + 1, temp, i, dynamicCopyFields.length - i - 1);
+            dynamicCopyFields = temp;
+          } else {
+            dynamicCopyFields = new DynamicCopy[] {};
+          }
+          break;
         }
       }
     } else { // non-dynamic copy field directive
diff --git a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
index afc3b04..31a7206 100644
--- a/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
+++ b/solr/core/src/java/org/apache/solr/schema/SchemaManager.java
@@ -138,6 +138,7 @@
             //only for non cloud stuff
             managedIndexSchema.persistManagedSchema(false);
             core.setLatestSchema(managedIndexSchema);
+            core.getCoreContainer().reload(core.getName());
           } catch (SolrException e) {
             log.warn(errorMsg);
             errors = singletonList(errorMsg + e.getMessage());
diff --git a/solr/core/src/java/org/apache/solr/search/CacheConfig.java b/solr/core/src/java/org/apache/solr/search/CacheConfig.java
index 753762a..16a9d57 100644
--- a/solr/core/src/java/org/apache/solr/search/CacheConfig.java
+++ b/solr/core/src/java/org/apache/solr/search/CacheConfig.java
@@ -14,150 +14,148 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
 package org.apache.solr.search;
 
 import javax.xml.xpath.XPathConstants;
-import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
-import org.apache.lucene.analysis.util.ResourceLoader;
-import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
-import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.ConfigOverlay;
-import org.apache.solr.core.MemClassLoader;
-import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.RuntimeLib;
+import org.apache.solr.common.MapSerializable;
 import org.apache.solr.core.SolrConfig;
-import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.util.DOMUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
 
-public class CacheConfig implements MapWriter {
-  final PluginInfo args;
-  private CacheRegenerator defRegen;
-  private final String name;
-  private String cacheImpl, regenImpl;
-  Object[] persistence = new Object[1];
+/**
+ * Contains the knowledge of how cache config is
+ * stored in the solrconfig.xml file, and implements a
+ * factory to create caches.
+ *
+ *
+ */
+public class CacheConfig implements MapSerializable{
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  
+  private String nodeName;
 
+  private Class<? extends SolrCache> clazz;
+  private Map<String,String> args;
+  private CacheRegenerator regenerator;
 
-  public CacheConfig(Map<String, String> args, String path) {
-    this.args = new PluginInfo(SolrCache.TYPE, (Map) copyValsAsString(args));
-    this.name = args.get(NAME);
-    this.cacheImpl = args.getOrDefault("class", "solr.LRUCache");
-    this.regenImpl = args.get("regenerator");
-    this.args.pathInConfig = StrUtils.splitSmart(path, '/', true);
+  private String cacheImpl;
+
+  private Object[] persistence = new Object[1];
+
+  private String regenImpl;
+
+  public CacheConfig() {}
+
+  public CacheConfig(Class<? extends SolrCache> clazz, Map<String,String> args, CacheRegenerator regenerator) {
+    this.clazz = clazz;
+    this.args = args;
+    this.regenerator = regenerator;
   }
 
-  static Map<String, String> copyValsAsString(Map m) {
-    Map<String, String> copy = new LinkedHashMap(m.size());
-    m.forEach((k, v) -> copy.put(String.valueOf(k), String.valueOf(v)));
-    return copy;
+  public CacheRegenerator getRegenerator() {
+    return regenerator;
   }
 
-  public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) {
-    Node node = solrConfig.getNode(xpath, false);
-    if (node == null || !"true".equals(DOMUtil.getAttrOrDefault(node, "enabled", "true"))) {
-      Map<String, String> m = solrConfig.getOverlay().getEditableSubProperties(xpath);
-      if (m == null) return null;
-      List<String> pieces = StrUtils.splitSmart(xpath, '/');
-      String name = pieces.get(pieces.size() - 1);
-      m = Utils.getDeepCopy(m, 2);
-      m.put(NAME, name);
-      return new CacheConfig(m, xpath);
-    } else {
-      Map<String, String> attrs = DOMUtil.toMap(node.getAttributes());
-      attrs.put(NAME, node.getNodeName());
-      return new CacheConfig(applyOverlay(xpath, solrConfig.getOverlay(), attrs), xpath);
-
-    }
-
-
+  public void setRegenerator(CacheRegenerator regenerator) {
+    this.regenerator = regenerator;
   }
 
-  private static Map applyOverlay(String xpath, ConfigOverlay overlay, Map args) {
-    Map<String, String> map = xpath == null ? null : overlay.getEditableSubProperties(xpath);
-    if (map != null) {
-      HashMap<String, String> mapCopy = new HashMap<>(args);
-      for (Map.Entry<String, String> e : map.entrySet()) {
-        mapCopy.put(e.getKey(), String.valueOf(e.getValue()));
-      }
-      return mapCopy;
-    }
-    return args;
-  }
-
-  public static Map<String, CacheConfig> getConfigs(SolrConfig solrConfig, String configPath) {
+  public static Map<String, CacheConfig> getMultipleConfigs(SolrConfig solrConfig, String configPath) {
     NodeList nodes = (NodeList) solrConfig.evaluate(configPath, XPathConstants.NODESET);
     if (nodes == null || nodes.getLength() == 0) return new LinkedHashMap<>();
     Map<String, CacheConfig> result = new HashMap<>(nodes.getLength());
     for (int i = 0; i < nodes.getLength(); i++) {
-      Map<String, String> args = DOMUtil.toMap(nodes.item(i).getAttributes());
-      result.put(args.get(NAME), new CacheConfig(args, configPath+"/"+args.get(NAME)));
+      CacheConfig config = getConfig(solrConfig, nodes.item(i).getNodeName(), DOMUtil.toMap(nodes.item(i).getAttributes()), configPath);
+      result.put(config.args.get(NAME), config);
     }
     return result;
   }
 
-  public String getName() {
-    return name;
+
+  public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) {
+    Node node = solrConfig.getNode(xpath, false);
+    if(node == null || !"true".equals(DOMUtil.getAttrOrDefault(node, "enabled", "true"))) {
+      Map<String, String> m = solrConfig.getOverlay().getEditableSubProperties(xpath);
+      if(m==null) return null;
+      List<String> parts = StrUtils.splitSmart(xpath, '/');
+      return getConfig(solrConfig,parts.get(parts.size()-1) , Collections.EMPTY_MAP,xpath);
+    }
+    return getConfig(solrConfig, node.getNodeName(),DOMUtil.toMap(node.getAttributes()), xpath);
   }
 
 
-  public <K, V> SolrCacheHolder<K, V> newInstance(SolrCore core) {
-    return new SolrCacheHolder(new CacheInfo(this, core));
+  public static CacheConfig getConfig(SolrConfig solrConfig, String nodeName, Map<String,String> attrs, String xpath) {
+    CacheConfig config = new CacheConfig();
+    config.nodeName = nodeName;
+    Map attrsCopy = new LinkedHashMap<>(attrs.size());
+    for (Map.Entry<String, String> e : attrs.entrySet()) {
+      attrsCopy.put(e.getKey(), String.valueOf(e.getValue()));
+    }
+    attrs = attrsCopy;
+    config.args = attrs;
+
+    Map<String, String> map = xpath == null ? null : solrConfig.getOverlay().getEditableSubProperties(xpath);
+    if(map != null){
+      HashMap<String, String> mapCopy = new HashMap<>(config.args);
+      for (Map.Entry<String, String> e : map.entrySet()) {
+        mapCopy.put(e.getKey(),String.valueOf(e.getValue()));
+      }
+      config.args = mapCopy;
+    }
+    String nameAttr = config.args.get(NAME);  // OPTIONAL
+    if (nameAttr==null) {
+      config.args.put(NAME, config.nodeName);
+    }
+
+    SolrResourceLoader loader = solrConfig.getResourceLoader();
+    config.cacheImpl = config.args.get("class");
+    if(config.cacheImpl == null) config.cacheImpl = "solr.LRUCache";
+    config.regenImpl = config.args.get("regenerator");
+    config.clazz = loader.findClass(config.cacheImpl, SolrCache.class);
+    if (config.regenImpl != null) {
+      config.regenerator = loader.newInstance(config.regenImpl, CacheRegenerator.class);
+    }
+    
+    return config;
   }
 
-  static class CacheInfo {
-    final CacheConfig cfg;
-    SolrCore core;
-    SolrCache cache = null;
-    String pkg;
-    RuntimeLib runtimeLib;
-    CacheRegenerator regen = null;
-
-
-    CacheInfo(CacheConfig cfg, SolrCore core) {
-      this.core = core;
-      this.cfg = cfg;
-      pkg = cfg.args.attributes.get(CommonParams.PACKAGE);
-      ResourceLoader loader = pkg == null ? core.getResourceLoader() :
-          core.getCoreContainer().getPackageManager().getResourceLoader(pkg);
-
-      try {
-        cache = loader.findClass(cfg.cacheImpl, SolrCache.class).getConstructor().newInstance();
-        regen = null;
-        if (cfg.regenImpl != null) {
-          regen = loader.findClass(cfg.regenImpl, CacheRegenerator.class).getConstructor().newInstance();
-        }
-      } catch (Exception e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error loading cache " + cfg.jsonStr(), e);
-      }
-      if (regen == null && cfg.defRegen != null) regen = cfg.defRegen;
-      cfg.persistence[0] = cache.init(cfg.args.attributes, cfg.persistence[0], regen);
-      if (pkg!=null && loader instanceof MemClassLoader) {
-        MemClassLoader memClassLoader = (MemClassLoader) loader;
-        runtimeLib = core.getCoreContainer().getPackageManager().getLib(pkg);
-      }
-
+  public SolrCache newInstance() {
+    try {
+      SolrCache cache = clazz.getConstructor().newInstance();
+      persistence[0] = cache.init(args, persistence[0], regenerator);
+      return cache;
+    } catch (Exception e) {
+      SolrException.log(log,"Error instantiating cache",e);
+      // we can carry on without a cache... but should we?
+      // in some cases (like an OOM) we probably should try to continue.
+      return null;
     }
   }
 
-
-  public void setDefaultRegenerator(CacheRegenerator regen) {
-    this.defRegen = regen;
-  }
-
   @Override
-  public void writeMap(EntryWriter ew) throws IOException {
-    args.attributes.forEach(ew.getBiConsumer());
+  public Map<String, Object> toMap(Map<String, Object> map) {
+    Map result = Collections.unmodifiableMap(args);
+    return result;
   }
+
+  public String getNodeName() {
+    return nodeName;
+  }
+
+
 }
diff --git a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
new file mode 100644
index 0000000..71eb86f
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
@@ -0,0 +1,367 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import java.lang.invoke.MethodHandles;
+import java.time.Duration;
+import java.util.Collections;
+import java.util.Locale;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.Executor;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.ForkJoinPool;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.LongAdder;
+
+import com.codahale.metrics.MetricRegistry;
+import com.github.benmanes.caffeine.cache.RemovalCause;
+import com.github.benmanes.caffeine.cache.RemovalListener;
+import org.apache.lucene.util.Accountable;
+import org.apache.lucene.util.RamUsageEstimator;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import com.github.benmanes.caffeine.cache.Cache;
+import com.github.benmanes.caffeine.cache.Caffeine;
+import com.github.benmanes.caffeine.cache.Policy.Eviction;
+import com.github.benmanes.caffeine.cache.stats.CacheStats;
+import com.google.common.annotations.VisibleForTesting;
+
+/**
+ * A SolrCache backed by the Caffeine caching library [1]. By default it uses the Window TinyLFU (W-TinyLFU)
+ * eviction policy.
+ * <p>This cache supports either maximum size limit (the number of items) or maximum ram bytes limit, but
+ * not both. If both values are set then only maxRamMB limit is used and maximum size limit is ignored.</p>
+ * <p>
+ * W-TinyLFU [2] is a near optimal policy that uses recency and frequency to determine which entry
+ * to evict in O(1) time. The estimated frequency is retained in a Count-Min Sketch and entries
+ * reside on LRU priority queues [3]. By capturing the historic frequency of an entry, the cache is
+ * able to outperform classic policies like LRU and LFU, as well as modern policies like ARC and
+ * LIRS. This policy performed particularly well in search workloads.
+ * <p>
+ * [1] https://github.com/ben-manes/caffeine
+ * [2] http://arxiv.org/pdf/1512.00727.pdf
+ * [3] http://highscalability.com/blog/2016/1/25/design-of-a-modern-cache.html
+ */
+public class CaffeineCache<K, V> extends SolrCacheBase implements SolrCache<K, V>, Accountable, RemovalListener<K, V> {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(CaffeineCache.class)
+      + RamUsageEstimator.shallowSizeOfInstance(CacheStats.class)
+      + 2 * RamUsageEstimator.shallowSizeOfInstance(LongAdder.class);
+
+  private Executor executor;
+
+  private CacheStats priorStats;
+  private long priorInserts;
+
+  private String description = "Caffeine Cache";
+  private LongAdder inserts;
+  private Cache<K,V> cache;
+  private long warmupTime;
+  private int maxSize;
+  private long maxRamBytes;
+  private int initialSize;
+  private int maxIdleTimeSec;
+  private boolean cleanupThread;
+
+  private Set<String> metricNames = ConcurrentHashMap.newKeySet();
+  private MetricsMap cacheMap;
+  private MetricRegistry registry;
+
+  private long initialRamBytes = 0;
+  private final LongAdder ramBytes = new LongAdder();
+
+  public CaffeineCache() {
+    this.priorStats = CacheStats.empty();
+  }
+
+  @Override
+  @SuppressWarnings({"unchecked", "rawtypes"})
+  public Object init(Map args, Object persistence, CacheRegenerator regenerator) {
+    super.init(args, regenerator);
+    String str = (String) args.get(SIZE_PARAM);
+    maxSize = (str == null) ? 1024 : Integer.parseInt(str);
+    str = (String) args.get("initialSize");
+    initialSize = Math.min((str == null) ? 1024 : Integer.parseInt(str), maxSize);
+    str = (String) args.get(MAX_IDLE_TIME_PARAM);
+    if (str == null) {
+      maxIdleTimeSec = -1;
+    } else {
+      maxIdleTimeSec = Integer.parseInt(str);
+    }
+    str = (String) args.get(MAX_RAM_MB_PARAM);
+    int maxRamMB = str == null ? -1 : Double.valueOf(str).intValue();
+    maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L;
+    str = (String) args.get(CLEANUP_THREAD_PARAM);
+    cleanupThread = str != null && Boolean.parseBoolean(str);
+    if (cleanupThread) {
+      executor = ForkJoinPool.commonPool();
+    } else {
+      executor = Runnable::run;
+    }
+
+    description = generateDescription(maxSize, initialSize);
+
+    cache = buildCache(null);
+    inserts = new LongAdder();
+
+    initialRamBytes =
+        RamUsageEstimator.shallowSizeOfInstance(cache.getClass()) +
+        RamUsageEstimator.shallowSizeOfInstance(executor.getClass()) +
+        RamUsageEstimator.sizeOfObject(description);
+
+    return persistence;
+  }
+
+  private Cache<K, V> buildCache(Cache<K, V> prev) {
+    Caffeine builder = Caffeine.newBuilder()
+        .initialCapacity(initialSize)
+        .executor(executor)
+        .removalListener(this)
+        .recordStats();
+    if (maxIdleTimeSec > 0) {
+      builder.expireAfterAccess(Duration.ofSeconds(maxIdleTimeSec));
+    }
+    if (maxRamBytes != Long.MAX_VALUE) {
+      builder.maximumWeight(maxRamBytes);
+      builder.weigher((k, v) -> (int) (RamUsageEstimator.sizeOfObject(k) + RamUsageEstimator.sizeOfObject(v)));
+    } else {
+      builder.maximumSize(maxSize);
+    }
+    Cache<K, V> newCache = builder.build();
+    if (prev != null) {
+      newCache.putAll(prev.asMap());
+    }
+    return newCache;
+  }
+
+  @Override
+  public void onRemoval(K key, V value, RemovalCause cause) {
+    ramBytes.add(
+        - (RamUsageEstimator.sizeOfObject(key, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED) +
+        RamUsageEstimator.sizeOfObject(value, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED) +
+        RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY)
+    );
+  }
+
+  @Override
+  public long ramBytesUsed() {
+    return BASE_RAM_BYTES_USED + initialRamBytes + ramBytes.sum();
+  }
+
+  @Override
+  public V get(K key) {
+    return cache.getIfPresent(key);
+  }
+
+  @Override
+  public V put(K key, V val) {
+    inserts.increment();
+    V old = cache.asMap().put(key, val);
+    ramBytes.add(RamUsageEstimator.sizeOfObject(key, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED) +
+        RamUsageEstimator.sizeOfObject(val, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED));
+    if (old != null) {
+      ramBytes.add(- RamUsageEstimator.sizeOfObject(old, RamUsageEstimator.QUERY_DEFAULT_RAM_BYTES_USED));
+    } else {
+      ramBytes.add(RamUsageEstimator.LINKED_HASHTABLE_RAM_BYTES_PER_ENTRY);
+    }
+    return old;
+  }
+
+  @Override
+  public void clear() {
+    cache.invalidateAll();
+    ramBytes.reset();
+  }
+
+  @Override
+  public int size() {
+    return cache.asMap().size();
+  }
+
+  @Override
+  public void close() {
+    cache.invalidateAll();
+    cache.cleanUp();
+    if (executor instanceof ExecutorService) {
+      ((ExecutorService)executor).shutdownNow();
+    }
+    ramBytes.reset();
+  }
+
+  @Override
+  public int getMaxSize() {
+    return maxSize;
+  }
+
+  @Override
+  public void setMaxSize(int maxSize) {
+    if (this.maxSize == maxSize) {
+      return;
+    }
+    Optional<Eviction<K, V>> evictionOpt = cache.policy().eviction();
+    if (evictionOpt.isPresent()) {
+      Eviction<K, V> eviction = evictionOpt.get();
+      eviction.setMaximum(maxSize);
+      this.maxSize = maxSize;
+      initialSize = Math.min(1024, this.maxSize);
+      description = generateDescription(this.maxSize, initialSize);
+      cache.cleanUp();
+    }
+  }
+
+  @Override
+  public int getMaxRamMB() {
+    return maxRamBytes != Long.MAX_VALUE ? (int) (maxRamBytes / 1024L / 1024L) : -1;
+  }
+
+  @Override
+  public void setMaxRamMB(int maxRamMB) {
+    long newMaxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L;
+    if (newMaxRamBytes != maxRamBytes) {
+      maxRamBytes = newMaxRamBytes;
+      Optional<Eviction<K, V>> evictionOpt = cache.policy().eviction();
+      if (evictionOpt.isPresent()) {
+        Eviction<K, V> eviction = evictionOpt.get();
+        if (!eviction.isWeighted()) {
+          // rebuild cache using weigher
+          cache = buildCache(cache);
+          return;
+        } else if (maxRamBytes == Long.MAX_VALUE) {
+          // rebuild cache using maxSize
+          cache = buildCache(cache);
+          return;
+        }
+        eviction.setMaximum(newMaxRamBytes);
+        description = generateDescription(this.maxSize, initialSize);
+        cache.cleanUp();
+      }
+    }
+  }
+
+  @Override
+  public void warm(SolrIndexSearcher searcher, SolrCache<K,V> old) {
+    if (regenerator == null) {
+      return;
+    }
+    
+    long warmingStartTime = System.nanoTime();
+    Map<K, V> hottest = Collections.emptyMap();
+    CaffeineCache<K,V> other = (CaffeineCache<K,V>)old;
+
+    // warm entries
+    if (isAutowarmingOn()) {
+      Eviction<K, V> policy = other.cache.policy().eviction().get();
+      int size = autowarm.getWarmCount(other.cache.asMap().size());
+      hottest = policy.hottest(size);
+    }
+
+    for (Entry<K, V> entry : hottest.entrySet()) {
+      try {
+        boolean continueRegen = regenerator.regenerateItem(
+            searcher, this, old, entry.getKey(), entry.getValue());
+        if (!continueRegen) {
+          break;
+        }
+      }
+      catch (Exception e) {
+        SolrException.log(log, "Error during auto-warming of key:" + entry.getKey(), e);
+      }
+    }
+
+    inserts.reset();
+    priorStats = other.cache.stats().plus(other.priorStats);
+    priorInserts = other.inserts.sum() + other.priorInserts;
+    warmupTime = TimeUnit.MILLISECONDS.convert(System.nanoTime() - warmingStartTime, TimeUnit.NANOSECONDS);
+  }
+
+  /** Returns the description of this cache. */
+  private String generateDescription(int limit, int initialSize) {
+    return String.format(Locale.ROOT, "TinyLfu Cache(maxSize=%d, initialSize=%d%s)",
+        limit, initialSize, isAutowarmingOn() ? (", " + getAutowarmDescription()) : "");
+  }
+
+  //////////////////////// SolrInfoBean methods //////////////////////
+
+  @Override
+  public String getName() {
+    return CaffeineCache.class.getName();
+  }
+
+  @Override
+  public String getDescription() {
+     return description;
+  }
+
+  // for unit tests only
+  @VisibleForTesting
+  MetricsMap getMetricsMap() {
+    return cacheMap;
+  }
+
+  @Override
+  public MetricRegistry getMetricRegistry() {
+    return registry;
+  }
+
+  @Override
+  public String toString() {
+    return name() + (cacheMap != null ? cacheMap.getValue().toString() : "");
+  }
+
+  @Override
+  public Set<String> getMetricNames() {
+    return metricNames;
+  }
+
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
+    registry = manager.registry(registryName);
+    cacheMap = new MetricsMap((detailed, map) -> {
+      if (cache != null) {
+        CacheStats stats = cache.stats();
+        long insertCount = inserts.sum();
+
+        map.put(LOOKUPS_PARAM, stats.requestCount());
+        map.put(HITS_PARAM, stats.hitCount());
+        map.put(HIT_RATIO_PARAM, stats.hitRate());
+        map.put(INSERTS_PARAM, insertCount);
+        map.put(EVICTIONS_PARAM, stats.evictionCount());
+        map.put(SIZE_PARAM, cache.asMap().size());
+        map.put("warmupTime", warmupTime);
+        map.put(RAM_BYTES_USED_PARAM, ramBytesUsed());
+        map.put(MAX_RAM_MB_PARAM, getMaxRamMB());
+
+        CacheStats cumulativeStats = priorStats.plus(stats);
+        map.put("cumulative_lookups", cumulativeStats.requestCount());
+        map.put("cumulative_hits", cumulativeStats.hitCount());
+        map.put("cumulative_hitratio", cumulativeStats.hitRate());
+        map.put("cumulative_inserts", priorInserts + insertCount);
+        map.put("cumulative_evictions", cumulativeStats.evictionCount());
+      }
+    });
+    manager.registerGauge(this, registryName, cacheMap, tag, true, scope, getCategory().toString());
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
index 1cec0aa..2dc1c1e 100644
--- a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
@@ -16,14 +16,6 @@
  */
 package org.apache.solr.search;
 
-import java.lang.invoke.MethodHandles;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.TimeUnit;
-
 import com.codahale.metrics.MetricRegistry;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.RamUsageEstimator;
@@ -34,6 +26,14 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.lang.invoke.MethodHandles;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.TimeUnit;
+
 /**
  * SolrCache based on ConcurrentLRUCache implementation.
  * <p>
@@ -47,16 +47,13 @@
  * @see org.apache.solr.search.SolrCache
  * @since solr 1.4
  */
-public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K, V>, Accountable {
+public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>, Accountable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FastLRUCache.class);
 
   public static final String MIN_SIZE_PARAM = "minSize";
   public static final String ACCEPTABLE_SIZE_PARAM = "acceptableSize";
-  public static final String INITIAL_SIZE_PARAM = "initialSize";
-  public static final String CLEANUP_THREAD_PARAM = "cleanupThread";
-  public static final String SHOW_ITEMS_PARAM = "showItems";
 
   // contains the statistics objects for all open caches of the same type
   private List<ConcurrentLRUCache.Stats> statsList;
@@ -64,7 +61,7 @@
   private long warmupTime = 0;
 
   private String description = "Concurrent LRU Cache";
-  private ConcurrentLRUCache<K, V> cache;
+  private ConcurrentLRUCache<K,V> cache;
   private int showItems = 0;
 
   private long maxRamBytes;
@@ -105,7 +102,7 @@
     str = (String) args.get(INITIAL_SIZE_PARAM);
     initialSize = str == null ? maxSize : Integer.parseInt(str);
     str = (String) args.get(CLEANUP_THREAD_PARAM);
-    cleanupThread = str != null && Boolean.parseBoolean(str);
+    cleanupThread = str == null ? false : Boolean.parseBoolean(str);
 
     str = (String) args.get(SHOW_ITEMS_PARAM);
     showItems = str == null ? 0 : Integer.parseInt(str);
@@ -120,7 +117,7 @@
     str = (String) args.get(MAX_RAM_MB_PARAM);
     long maxRamMB = str == null ? -1 : (long) Double.parseDouble(str);
     this.maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L;
-    if (maxRamBytes != Long.MAX_VALUE) {
+    if (maxRamBytes != Long.MAX_VALUE)  {
       ramLowerWatermark = Math.round(maxRamBytes * 0.8);
       description = generateDescription(maxRamBytes, ramLowerWatermark, cleanupThread);
       cache = new ConcurrentLRUCache<>(ramLowerWatermark, maxRamBytes, cleanupThread, null, maxIdleTimeSec);
@@ -143,6 +140,63 @@
       statsList.add(new ConcurrentLRUCache.Stats());
     }
     statsList.add(cache.getStats());
+    cacheMap = new MetricsMap((detailed, map) -> {
+      if (cache != null) {
+        ConcurrentLRUCache.Stats stats = cache.getStats();
+        long lookups = stats.getCumulativeLookups();
+        long hits = stats.getCumulativeHits();
+        long inserts = stats.getCumulativePuts();
+        long evictions = stats.getCumulativeEvictions();
+        long idleEvictions = stats.getCumulativeIdleEvictions();
+        long size = stats.getCurrentSize();
+        long clookups = 0;
+        long chits = 0;
+        long cinserts = 0;
+        long cevictions = 0;
+        long cIdleEvictions = 0;
+
+        // NOTE: It is safe to iterate on a CopyOnWriteArrayList
+        for (ConcurrentLRUCache.Stats statistiscs : statsList) {
+          clookups += statistiscs.getCumulativeLookups();
+          chits += statistiscs.getCumulativeHits();
+          cinserts += statistiscs.getCumulativePuts();
+          cevictions += statistiscs.getCumulativeEvictions();
+          cIdleEvictions += statistiscs.getCumulativeIdleEvictions();
+        }
+
+        map.put(LOOKUPS_PARAM, lookups);
+        map.put(HITS_PARAM, hits);
+        map.put(HIT_RATIO_PARAM, calcHitRatio(lookups, hits));
+        map.put(INSERTS_PARAM, inserts);
+        map.put(EVICTIONS_PARAM, evictions);
+        map.put(SIZE_PARAM, size);
+        map.put("cleanupThread", cleanupThread);
+        map.put("idleEvictions", idleEvictions);
+        map.put(RAM_BYTES_USED_PARAM, ramBytesUsed());
+        map.put(MAX_RAM_MB_PARAM, getMaxRamMB());
+
+        map.put("warmupTime", warmupTime);
+        map.put("cumulative_lookups", clookups);
+        map.put("cumulative_hits", chits);
+        map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
+        map.put("cumulative_inserts", cinserts);
+        map.put("cumulative_evictions", cevictions);
+        map.put("cumulative_idleEvictions", cIdleEvictions);
+
+        if (detailed && showItems != 0) {
+          Map items = cache.getLatestAccessedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
+          for (Map.Entry e : (Set<Map.Entry>) items.entrySet()) {
+            Object k = e.getKey();
+            Object v = e.getValue();
+
+            String ks = "item_" + k;
+            String vs = v.toString();
+            map.put(ks, vs);
+          }
+
+        }
+      }
+    });
     return statsList;
   }
 
@@ -159,7 +213,7 @@
    */
   protected String generateDescription(int limit, int initialSize, int minLimit, int acceptableLimit, boolean newThread) {
     String description = "Concurrent LRU Cache(maxSize=" + limit + ", initialSize=" + initialSize +
-        ", minSize=" + minLimit + ", acceptableSize=" + acceptableLimit + ", cleanupThread=" + newThread;
+        ", minSize="+minLimit + ", acceptableSize="+acceptableLimit+", cleanupThread="+newThread;
     if (isAutowarmingOn()) {
       description += ", " + getAutowarmDescription();
     }
@@ -220,9 +274,10 @@
       for (int i = itemsArr.length - 1; i >= 0; i--) {
         try {
           boolean continueRegen = regenerator.regenerateItem(searcher,
-              this, old, itemsArr[i].getKey(), itemsArr[i].getValue());
+                  this, old, itemsArr[i].getKey(), itemsArr[i].getValue());
           if (!continueRegen) break;
-        } catch (Exception e) {
+        }
+        catch (Exception e) {
           SolrException.log(log, "Error during auto-warming of key:" + itemsArr[i].getKey(), e);
         }
       }
@@ -258,67 +313,9 @@
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registryName, String tag, String scope) {
     registry = manager.registry(registryName);
-    cacheMap = new MetricsMap((detailed, map) -> {
-      if (cache != null) {
-        ConcurrentLRUCache.Stats stats = cache.getStats();
-        long lookups = stats.getCumulativeLookups();
-        long hits = stats.getCumulativeHits();
-        long inserts = stats.getCumulativePuts();
-        long evictions = stats.getCumulativeEvictions();
-        long idleEvictions = stats.getCumulativeIdleEvictions();
-        long size = stats.getCurrentSize();
-        long clookups = 0;
-        long chits = 0;
-        long cinserts = 0;
-        long cevictions = 0;
-        long cIdleEvictions = 0;
-
-        // NOTE: It is safe to iterate on a CopyOnWriteArrayList
-        for (ConcurrentLRUCache.Stats statistiscs : statsList) {
-          clookups += statistiscs.getCumulativeLookups();
-          chits += statistiscs.getCumulativeHits();
-          cinserts += statistiscs.getCumulativePuts();
-          cevictions += statistiscs.getCumulativeEvictions();
-          cIdleEvictions += statistiscs.getCumulativeIdleEvictions();
-        }
-
-        map.put(LOOKUPS_PARAM, lookups);
-        map.put(HITS_PARAM, hits);
-        map.put(HIT_RATIO_PARAM, calcHitRatio(lookups, hits));
-        map.put(INSERTS_PARAM, inserts);
-        map.put(EVICTIONS_PARAM, evictions);
-        map.put(SIZE_PARAM, size);
-        map.put("cleanupThread", cleanupThread);
-        map.put("idleEvictions", idleEvictions);
-        map.put(RAM_BYTES_USED_PARAM, ramBytesUsed());
-        map.put(MAX_RAM_MB_PARAM, getMaxRamMB());
-
-        map.put("warmupTime", warmupTime);
-        map.put("cumulative_lookups", clookups);
-        map.put("cumulative_hits", chits);
-        map.put("cumulative_hitratio", calcHitRatio(clookups, chits));
-        map.put("cumulative_inserts", cinserts);
-        map.put("cumulative_evictions", cevictions);
-        map.put("cumulative_idleEvictions", cIdleEvictions);
-
-        if (detailed && showItems != 0) {
-          Map items = cache.getLatestAccessedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
-          for (Map.Entry e : (Set<Map.Entry>) items.entrySet()) {
-            Object k = e.getKey();
-            Object v = e.getValue();
-
-            String ks = "item_" + k;
-            String vs = v.toString();
-            map.put(ks, vs);
-          }
-
-        }
-      }
-    });
     manager.registerGauge(this, registryName, cacheMap, tag, true, scope, getCategory().toString());
   }
 
-
   // for unit tests only
   MetricsMap getMetricsMap() {
     return cacheMap;
diff --git a/solr/core/src/java/org/apache/solr/search/LFUCache.java b/solr/core/src/java/org/apache/solr/search/LFUCache.java
index b9a4820..20cf664 100644
--- a/solr/core/src/java/org/apache/solr/search/LFUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LFUCache.java
@@ -17,10 +17,10 @@
 package org.apache.solr.search;
 
 import java.lang.invoke.MethodHandles;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.TimeUnit;
 
@@ -116,14 +116,14 @@
     str = (String) args.get(AUTOWARM_COUNT_PARAM);
     autowarmCount = str == null ? 0 : Integer.parseInt(str);
     str = (String) args.get(CLEANUP_THREAD_PARAM);
-    cleanupThread = str != null && Boolean.parseBoolean(str);
+    cleanupThread = str == null ? false : Boolean.parseBoolean(str);
 
     str = (String) args.get(SHOW_ITEMS_PARAM);
     showItems = str == null ? 0 : Integer.parseInt(str);
 
     // Don't make this "efficient" by removing the test, default is true and omitting the param will make it false.
     str = (String) args.get(TIME_DECAY_PARAM);
-    timeDecay = (str == null) || Boolean.parseBoolean(str);
+    timeDecay = (str == null) ? true : Boolean.parseBoolean(str);
 
     str = (String) args.get(MAX_IDLE_TIME_PARAM);
     if (str == null) {
diff --git a/solr/core/src/java/org/apache/solr/search/LRUCache.java b/solr/core/src/java/org/apache/solr/search/LRUCache.java
index bcb56cf..c733c07 100644
--- a/solr/core/src/java/org/apache/solr/search/LRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LRUCache.java
@@ -18,11 +18,11 @@
 
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.LongAdder;
 
@@ -234,8 +234,8 @@
   }
 
   /**
-   *
-   * @return Returns the description of this cache.
+   * 
+   * @return Returns the description of this cache. 
    */
   private String generateDescription() {
     String description = "LRU Cache(maxSize=" + getMaxSize() + ", initialSize=" + initialSize;
@@ -341,9 +341,9 @@
 
       // Don't do the autowarming in the synchronized block, just pull out the keys and values.
       synchronized (other.map) {
-
+        
         int sz = autowarm.getWarmCount(other.map.size());
-
+        
         keys = new Object[sz];
         vals = new Object[sz];
 
@@ -383,6 +383,7 @@
 
   }
 
+
   //////////////////////// SolrInfoMBeans methods //////////////////////
 
 
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCache.java b/solr/core/src/java/org/apache/solr/search/SolrCache.java
index 9fe186a..4a16b39 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCache.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCache.java
@@ -16,17 +16,16 @@
  */
 package org.apache.solr.search;
 
-import java.util.Map;
-
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricProducer;
 
+import java.util.Map;
+
 
 /**
  * Primary API for dealing with Solr's internal caches.
  */
 public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
-  String TYPE = "cache";
 
   String HIT_RATIO_PARAM = "hitratio";
   String HITS_PARAM = "hits";
@@ -38,6 +37,9 @@
   String RAM_BYTES_USED_PARAM = "ramBytesUsed";
   String MAX_RAM_MB_PARAM = "maxRamMB";
   String MAX_IDLE_TIME_PARAM = "maxIdleTime";
+  String INITIAL_SIZE_PARAM = "initialSize";
+  String CLEANUP_THREAD_PARAM = "cleanupThread";
+  String SHOW_ITEMS_PARAM = "showItems";
 
   /**
    * The initialization routine. Instance specific arguments are passed in
@@ -61,7 +63,7 @@
    * regenerate an item in the new cache from an entry in the old cache.
    *
    */
-  Object init(Map args, Object persistence, CacheRegenerator regenerator);
+  public Object init(Map args, Object persistence, CacheRegenerator regenerator);
   // I don't think we need a factory for faster creation given that these
   // will be associated with slow-to-create SolrIndexSearchers.
   // change to NamedList when other plugins do?
@@ -77,29 +79,29 @@
    *
    * :TODO: verify this.
    */
-  String name();
+  public String name();
 
 
   // Should SolrCache just extend the java.util.Map interface?
   // Following the conventions of the java.util.Map interface in any case.
 
   /** :TODO: copy from Map */
-  int size();
+  public int size();
 
   /** :TODO: copy from Map */
-  V put(K key, V value);
+  public V put(K key, V value);
 
   /** :TODO: copy from Map */
-  V get(K key);
+  public V get(K key);
 
   /** :TODO: copy from Map */
-  void clear();
+  public void clear();
 
   /** 
    * Enumeration of possible States for cache instances.
    * :TODO: only state that seems to ever be set is LIVE ?
   */
-  enum State {
+  public enum State { 
     /** :TODO */
     CREATED, 
     /** :TODO */
@@ -116,14 +118,14 @@
    * The cache user (SolrIndexSearcher) will take care of switching
    * cache states.
    */
-  void setState(State state);
+  public void setState(State state);
 
   /**
    * Returns the last State set on this instance
    *
    * @see #setState
    */
-  State getState();
+  public State getState();
 
   /**
    * Warm this cache associated with <code>searcher</code> using the <code>old</code>
@@ -135,7 +137,7 @@
 
 
   /** Frees any non-memory resources */
-  void close();
+  public void close();
 
   /** Returns maximum size limit (number of items) if set and supported, -1 otherwise. */
   int getMaxSize();
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
index 3b64e9d..66b8ab1 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
@@ -22,12 +22,8 @@
 import java.util.Set;
 
 import com.codahale.metrics.MetricRegistry;
-import org.apache.solr.common.MapWriter;
-import org.apache.solr.core.PluginInfo;
-import org.apache.solr.core.RuntimeLib;
-import org.apache.solr.core.SolrCore;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.metrics.SolrMetricManager;
-import org.apache.solr.metrics.SolrMetricProducer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -35,53 +31,12 @@
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
 
-  private CacheConfig.CacheInfo info;
+  private final CacheConfig factory;
   protected volatile SolrCache<K, V> delegate;
 
-
-
-  public SolrCacheHolder(CacheConfig.CacheInfo cacheInfo) {
-    this.info = cacheInfo;
-    this.delegate = cacheInfo.cache;
-
-    if(info.pkg != null) {
-      info.core.addPackageListener(new SolrCore.PkgListener() {
-        @Override
-        public String packageName() {
-          return info.pkg;
-        }
-
-        @Override
-        public PluginInfo pluginInfo() {
-          return info.cfg.args;
-        }
-
-        @Override
-        public MapWriter lib() {
-          return info.runtimeLib;
-        }
-
-        @Override
-        public void changed(RuntimeLib lib) {
-          reloadCache(lib);
-        }
-      });
-    }
-  }
-
-  private void reloadCache(RuntimeLib lib) {
-    int znodeVersion = info.runtimeLib == null ? -1 : info.runtimeLib.getZnodeVersion();
-    if (lib.getZnodeVersion() > znodeVersion) {
-      log.info("Cache {} being reloaded, package: {} loaded from: {} ", delegate.getClass().getSimpleName(), info.pkg, lib.getUrl());
-      info = new CacheConfig.CacheInfo(info.cfg, info.core);
-      delegate.close();
-      delegate = info.cache;
-      if(metricsInfo != null){
-        metricsInfo.init(delegate);
-
-      }
-
-    }
+  public SolrCacheHolder(SolrCache<K, V> delegate, CacheConfig factory) {
+    this.delegate = delegate;
+    this.factory = factory;
   }
 
   public int size() {
@@ -186,31 +141,12 @@
     return delegate.getCategory();
   }
 
-
-  private MetricsInfo metricsInfo;
-
-  public static class MetricsInfo {
-    final SolrMetricManager manager;
-    final String registry;
-    final String tag;
-    final String scope;
-
-    MetricsInfo(SolrMetricManager manager, String registry, String tag, String scope) {
-      this.manager = manager;
-      this.registry = registry;
-      this.tag = tag;
-      this.scope = scope;
-    }
-
-    public void init(SolrMetricProducer metricProducer) {
-      metricProducer.initializeMetrics(manager,registry,tag,scope);
-    }
-  }
-
   @Override
   public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    this.metricsInfo = new MetricsInfo(manager, registry, tag, scope);
-    delegate.initializeMetrics(manager, registry, tag, scope);
+    log.debug("Going to register cachemetrics " + Utils.toJSONString(factory));
+
+    delegate.initializeMetrics(manager, registry, tag,scope);
 
   }
+
 }
diff --git a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java
index f7bc263..313d91b 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java
@@ -111,8 +111,7 @@
     this.searcher = searcher;
     this.enableLazyFieldLoading = solrConfig.enableLazyFieldLoading;
     if (cachingEnabled) {
-      documentCache = solrConfig.documentCacheConfig == null ? null :
-          solrConfig.documentCacheConfig.newInstance(searcher.getCore());
+      documentCache = solrConfig.documentCacheConfig == null ? null : solrConfig.documentCacheConfig.newInstance();
     } else {
       documentCache = null;
     }
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index f0170ef..6de5bd5 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -98,6 +98,7 @@
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.index.SlowCompositeReaderWrapper;
+import org.apache.solr.metrics.MetricsMap;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
 import org.apache.solr.request.LocalSolrQueryRequest;
@@ -107,6 +108,7 @@
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SchemaField;
 import org.apache.solr.search.facet.UnInvertedField;
+import org.apache.solr.search.stats.StatsCache;
 import org.apache.solr.search.stats.StatsSource;
 import org.apache.solr.uninverting.UninvertingReader;
 import org.apache.solr.update.IndexFingerprint;
@@ -170,6 +172,8 @@
   private final String path;
   private boolean releaseDirectory;
 
+  private final StatsCache statsCache;
+
   private Set<String> metricNames = ConcurrentHashMap.newKeySet();
   private SolrMetricManager metricManager;
   private String registryName;
@@ -269,6 +273,7 @@
     this.rawReader = r;
     this.leafReader = SlowCompositeReaderWrapper.wrap(this.reader);
     this.core = core;
+    this.statsCache = core.createStatsCache();
     this.schema = schema;
     this.name = "Searcher@" + Integer.toHexString(hashCode()) + "[" + core.getName() + "]"
         + (name != null ? " " + name : "");
@@ -300,12 +305,12 @@
     if (cachingEnabled) {
       final ArrayList<SolrCache> clist = new ArrayList<>();
       fieldValueCache = solrConfig.fieldValueCacheConfig == null ? null
-          : solrConfig.fieldValueCacheConfig.newInstance(core);
-      if (fieldValueCache != null) clist.add( fieldValueCache);
-      filterCache = solrConfig.filterCacheConfig == null ? null : solrConfig.filterCacheConfig.newInstance(core);
+          : solrConfig.fieldValueCacheConfig.newInstance();
+      if (fieldValueCache != null) clist.add(fieldValueCache);
+      filterCache = solrConfig.filterCacheConfig == null ? null : solrConfig.filterCacheConfig.newInstance();
       if (filterCache != null) clist.add(filterCache);
       queryResultCache = solrConfig.queryResultCacheConfig == null ? null
-          : solrConfig.queryResultCacheConfig.newInstance(core);
+          : solrConfig.queryResultCacheConfig.newInstance();
       if (queryResultCache != null) clist.add(queryResultCache);
       SolrCache<Integer, Document> documentCache = docFetcher.getDocumentCache();
       if (documentCache != null) clist.add(documentCache);
@@ -314,8 +319,8 @@
         cacheMap = NO_GENERIC_CACHES;
       } else {
         cacheMap = new HashMap<>(solrConfig.userCacheConfigs.size());
-        for (Map.Entry<String, CacheConfig> e : solrConfig.userCacheConfigs.entrySet()) {
-          SolrCache cache = e.getValue().newInstance(core);
+        for (Map.Entry<String,CacheConfig> e : solrConfig.userCacheConfigs.entrySet()) {
+          SolrCache cache = e.getValue().newInstance();
           if (cache != null) {
             cacheMap.put(cache.name(), cache);
             clist.add(cache);
@@ -348,6 +353,10 @@
     return super.leafContexts;
   }
 
+  public StatsCache getStatsCache() {
+    return statsCache;
+  }
+
   public FieldInfos getFieldInfos() {
     return leafReader.getFieldInfos();
   }
@@ -535,8 +544,8 @@
   // Set default regenerators on filter and query caches if they don't have any
   //
   public static void initRegenerators(SolrConfig solrConfig) {
-    if (solrConfig.fieldValueCacheConfig != null) {
-      solrConfig.fieldValueCacheConfig.setDefaultRegenerator(new CacheRegenerator() {
+    if (solrConfig.fieldValueCacheConfig != null && solrConfig.fieldValueCacheConfig.getRegenerator() == null) {
+      solrConfig.fieldValueCacheConfig.setRegenerator(new CacheRegenerator() {
         @Override
         public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache,
             Object oldKey, Object oldVal) throws IOException {
@@ -548,8 +557,8 @@
       });
     }
 
-    if (solrConfig.filterCacheConfig != null ) {
-      solrConfig.filterCacheConfig.setDefaultRegenerator(new CacheRegenerator() {
+    if (solrConfig.filterCacheConfig != null && solrConfig.filterCacheConfig.getRegenerator() == null) {
+      solrConfig.filterCacheConfig.setRegenerator(new CacheRegenerator() {
         @Override
         public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache,
             Object oldKey, Object oldVal) throws IOException {
@@ -559,9 +568,9 @@
       });
     }
 
-    if (solrConfig.queryResultCacheConfig != null) {
+    if (solrConfig.queryResultCacheConfig != null && solrConfig.queryResultCacheConfig.getRegenerator() == null) {
       final int queryResultWindowSize = solrConfig.queryResultWindowSize;
-      solrConfig.queryResultCacheConfig.setDefaultRegenerator(new CacheRegenerator() {
+      solrConfig.queryResultCacheConfig.setRegenerator(new CacheRegenerator() {
         @Override
         public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache,
             Object oldKey, Object oldVal) throws IOException {
@@ -656,7 +665,7 @@
 
   /** expert: internal API, subject to change */
   public SolrCache<String,UnInvertedField> getFieldValueCache() {
-    return fieldValueCache ;
+    return fieldValueCache;
   }
 
   /** Returns a weighted sort according to this searcher */
@@ -2433,7 +2442,13 @@
         return -1;
       }
     }, tag, true, "indexCommitSize", Category.SEARCHER.toString(), scope);
-
+    // statsCache metrics
+    manager.registerGauge(this, registry,
+        new MetricsMap((detailed, map) -> {
+          statsCache.getCacheMetrics().getSnapshot(map::put);
+          map.put("statsCacheImpl", statsCache.getClass().getSimpleName());
+        }),
+        tag, true, "statsCache", Category.CACHE.toString(), scope);
   }
 
   @Override
@@ -2607,7 +2622,7 @@
 
     @Override
     public int hashCode() {
-      return classHash()
+      return classHash() 
           + 31 * Objects.hashCode(topFilter)
           + 31 * Objects.hashCode(weights);
     }
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
index 40eb785..5caea4e 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessor.java
@@ -34,6 +34,7 @@
 import org.apache.lucene.index.LeafReaderContext;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.util.PriorityQueue;
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.schema.FieldType;
 import org.apache.solr.schema.SchemaField;
@@ -148,7 +149,7 @@
   }
 
   /** 
-   * Simple helper for checking if a {@FacetRequest.FacetSort} is on "count" or "index" and picking 
+   * Simple helper for checking if a {@link FacetRequest.FacetSort} is on "count" or "index" and picking
    * the existing SlotAcc 
    * @return an existing SlotAcc for sorting, else null if it should be built from the Aggs
    */
@@ -224,6 +225,12 @@
 
     boolean needOtherAccs = freq.allBuckets;  // TODO: use for missing too...
 
+    if (sortAcc == null) {
+      // as sort is already validated, in what case sortAcc would be null?
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Invalid sort '" + sort + "' for field '" + sf.getName() + "'");
+    }
+
     if (!needOtherAccs) {
       // we may need them later, but we don't want to create them now
       // otherwise we won't know if we need to call setNextReader on them.
@@ -287,6 +294,7 @@
   SimpleOrderedMap<Object> findTopSlots(final int numSlots, final int slotCardinality,
                                         IntFunction<Comparable> bucketValFromSlotNumFunc,
                                         Function<Comparable, String> fieldQueryValFunc) throws IOException {
+    assert this.sortAcc != null;
     int numBuckets = 0;
 
     final int off = fcontext.isShard() ? 0 : (int) freq.offset;
@@ -326,7 +334,7 @@
         return cmp == 0 ? b.slot < a.slot : cmp < 0;
       };
     }
-    final PriorityQueue<Slot> queue = new PriorityQueue<Slot>(maxTopVals) {
+    final PriorityQueue<Slot> queue = new PriorityQueue<>(maxTopVals) {
       @Override
       protected boolean lessThan(Slot a, Slot b) { return orderPredicate.test(a, b); }
     };
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
index d792519..b5f1521 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRange.java
@@ -38,6 +38,7 @@
 import org.apache.solr.schema.TrieDateField;
 import org.apache.solr.schema.TrieField;
 import org.apache.solr.search.DocSet;
+import org.apache.solr.search.SyntaxError;
 import org.apache.solr.search.facet.SlotAcc.SlotContext;
 import org.apache.solr.util.DateMathParser;
 
@@ -50,6 +51,7 @@
   Object start;
   Object end;
   Object gap;
+  Object ranges;
   boolean hardend = false;
   EnumSet<FacetRangeInclude> include;
   EnumSet<FacetRangeOther> others;
@@ -72,11 +74,15 @@
   
   @Override
   public Map<String, Object> getFacetDescription() {
-    Map<String, Object> descr = new HashMap<String, Object>();
+    Map<String, Object> descr = new HashMap<>();
     descr.put("field", field);
-    descr.put("start", start);
-    descr.put("end", end);
-    descr.put("gap", gap);
+    if (ranges != null) {
+      descr.put("ranges", ranges);
+    } else {
+      descr.put("start", start);
+      descr.put("end", end);
+      descr.put("gap", gap);
+    }
     return descr;
   }
   
@@ -95,7 +101,8 @@
   final Comparable start;
   final Comparable end;
   final String gap;
-  
+  final Object ranges;
+
   /** Build by {@link #createRangeList} if and only if needed for basic faceting */
   List<Range> rangeList;
   /** Build by {@link #createRangeList} if and only if needed for basic faceting */
@@ -120,11 +127,22 @@
     include = freq.include;
     sf = fcontext.searcher.getSchema().getField(freq.field);
     calc = getCalcForField(sf);
-    start = calc.getValue(freq.start.toString());
-    end = calc.getValue(freq.end.toString());
-    gap = freq.gap.toString();
+    if (freq.ranges != null && (freq.start != null || freq.end != null || freq.gap != null)) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Cannot set gap/start/end and ranges params together");
+    }
+    if (freq.ranges != null) {
+      ranges = freq.ranges;
+      start = null;
+      end = null;
+      gap = null;
+    } else {
+      start = calc.getValue(freq.start.toString());
+      end = calc.getValue(freq.end.toString());
+      gap = freq.gap.toString();
+      ranges = null;
+    }
 
-    
     // Under the normal mincount=0, each shard will need to return 0 counts since we don't calculate buckets at the top level.
     // If mincount>0 then we could *potentially* set our sub mincount to 1...
     // ...but that would require sorting the buckets (by their val) at the top level
@@ -245,7 +263,12 @@
 
     Comparable low = start;
     Comparable loop_end = this.end;
-    
+
+    if (ranges != null) {
+      rangeList.addAll(parseRanges(ranges));
+      return;
+    }
+
     while (low.compareTo(end) < 0) {
       Comparable high = calc.addGap(low, gap);
       if (end.compareTo(high) < 0) {
@@ -263,14 +286,14 @@
       if (high.compareTo(low) == 0) {
         throw new SolrException
             (SolrException.ErrorCode.BAD_REQUEST,
-                "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high );
+                "range facet infinite loop: gap is either zero, or too small relative start/end and caused underflow: " + low + " + " + gap + " = " + high);
       }
 
-      boolean incLower =(include.contains(FacetRangeInclude.LOWER) ||
-                         (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
+      boolean incLower = (include.contains(FacetRangeInclude.LOWER) ||
+          (include.contains(FacetRangeInclude.EDGE) && 0 == low.compareTo(start)));
       boolean incUpper = (include.contains(FacetRangeInclude.UPPER) ||
-                          (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(end)));
-      
+          (include.contains(FacetRangeInclude.EDGE) && 0 == high.compareTo(end)));
+
       Range range = new Range(calc.buildRangeLabel(low), low, high, incLower, incUpper);
       rangeList.add( range );
 
@@ -299,8 +322,203 @@
       actual_end = null;
     }
   }
-  
-  
+
+  /**
+   * Parses the given list of maps and returns list of Ranges
+   *
+   * @param input - list of map containing the ranges
+   * @return list of {@link Range}
+   */
+  private List<Range> parseRanges(Object input) {
+    if (!(input instanceof List)) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Expected List for ranges but got " + input.getClass().getSimpleName() + " = " + input
+      );
+    }
+    List intervals = (List) input;
+    List<Range> ranges = new ArrayList<>();
+    for (Object obj : intervals) {
+      if (!(obj instanceof Map)) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "Expected Map for range but got " + obj.getClass().getSimpleName() + " = " + obj);
+      }
+      Range range;
+      Map<String, Object> interval = (Map<String, Object>) obj;
+      if (interval.containsKey("range")) {
+        range = getRangeByOldFormat(interval);
+      } else {
+        range = getRangeByNewFormat(interval);
+      }
+      ranges.add(range);
+    }
+    return ranges;
+  }
+
+  private boolean getBoolean(Map<String,Object> args, String paramName, boolean defVal) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      return defVal;
+    }
+    // TODO: should we be more flexible and accept things like "true" (strings)?
+    // Perhaps wait until the use case comes up.
+    if (!(o instanceof Boolean)) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Expected boolean type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
+    }
+
+    return (Boolean)o;
+  }
+
+  private String getString(Map<String,Object> args, String paramName, boolean required) {
+    Object o = args.get(paramName);
+    if (o == null) {
+      if (required) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+            "Missing required parameter '" + paramName + "' for " + args);
+      }
+      return null;
+    }
+    if (!(o instanceof String)) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Expected string type for param '"+paramName + "' but got " + o.getClass().getSimpleName() + " = " + o);
+    }
+
+    return (String)o;
+  }
+
+  /**
+   * Parses the range given in format {from:val1, to:val2, inclusive_to:true}
+   * and returns the {@link Range}
+   *
+   * @param rangeMap Map containing the range info
+   * @return {@link Range}
+   */
+  private Range getRangeByNewFormat(Map<String, Object> rangeMap) {
+    Object fromObj = rangeMap.get("from");
+    Object toObj = rangeMap.get("to");
+
+    String fromStr = fromObj == null? "*" : fromObj.toString();
+    String toStr = toObj == null? "*": toObj.toString();
+    boolean includeUpper = getBoolean(rangeMap, "inclusive_to", false);
+    boolean includeLower = getBoolean(rangeMap, "inclusive_from", true);
+
+    Object key = rangeMap.get("key");
+    // if (key == null) {
+    //  key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")");
+    // }
+    // using the default key as custom key won't work with refine
+    // refine would need both low and high values
+    key = (includeLower? "[": "(") + fromStr + "," + toStr + (includeUpper? "]": ")");
+
+    Comparable from = getComparableFromString(fromStr);
+    Comparable to = getComparableFromString(toStr);
+    if (from != null && to != null && from.compareTo(to) > 0) {
+      // allowing from and to be same
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'from' is higher than 'to' in range for key: " + key);
+    }
+
+    return new Range(key, from, to, includeLower, includeUpper);
+  }
+
+  /**
+   * Parses the range string from the map and Returns {@link Range}
+   *
+   * @param range map containing the interval
+   * @return {@link Range}
+   */
+  private Range getRangeByOldFormat(Map<String, Object> range) {
+    String key = getString(range, "key", false);
+    String rangeStr = getString(range, "range", true);
+    try {
+      return parseRangeFromString(key, rangeStr);
+    } catch (SyntaxError e) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+    }
+  }
+
+  /**
+   * Parses the given string and returns Range.
+   * This is adopted from {@link org.apache.solr.request.IntervalFacets}
+   *
+   * @param key The name of range which would be used as {@link Range}'s label
+   * @param rangeStr The string containing the Range
+   * @return {@link Range}
+   */
+  private Range parseRangeFromString(String key, String rangeStr) throws SyntaxError {
+    rangeStr = rangeStr.trim();
+    if (rangeStr.isEmpty()) {
+      throw new SyntaxError("empty facet range");
+    }
+
+    boolean includeLower = true, includeUpper = true;
+    Comparable start = null, end = null;
+    if (rangeStr.charAt(0) == '(') {
+      includeLower = false;
+    } else if (rangeStr.charAt(0) != '[') {
+      throw new SyntaxError( "Invalid start character " + rangeStr.charAt(0) + " in facet range " + rangeStr);
+    }
+
+    final int lastNdx = rangeStr.length() - 1;
+    if (rangeStr.charAt(lastNdx) == ')') {
+      includeUpper = false;
+    } else if (rangeStr.charAt(lastNdx) != ']') {
+      throw new SyntaxError("Invalid end character " + rangeStr.charAt(lastNdx) + " in facet range " + rangeStr);
+    }
+
+    StringBuilder startStr = new StringBuilder(lastNdx);
+    int i = unescape(rangeStr, 1, lastNdx, startStr);
+    if (i == lastNdx) {
+      if (rangeStr.charAt(lastNdx - 1) == ',') {
+        throw new SyntaxError("Empty range limit");
+      }
+      throw new SyntaxError("Missing unescaped comma separating range ends in " + rangeStr);
+    }
+    start = getComparableFromString(startStr.toString());
+
+    StringBuilder endStr = new StringBuilder(lastNdx);
+    i = unescape(rangeStr, i, lastNdx, endStr);
+    if (i != lastNdx) {
+      throw new SyntaxError("Extra unescaped comma at index " + i + " in range " + rangeStr);
+    }
+    end = getComparableFromString(endStr.toString());
+
+    if (start != null && end != null && start.compareTo(end) > 0) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "'start' is higher than 'end' in range for key: " + rangeStr);
+    }
+
+    // not using custom key as it won't work with refine
+    // refine would need both low and high values
+    return new Range(rangeStr, start, end, includeLower, includeUpper);
+  }
+
+  /* Fill in sb with a string from i to the first unescaped comma, or n.
+      Return the index past the unescaped comma, or n if no unescaped comma exists */
+  private int unescape(String s, int i, int n, StringBuilder sb) throws SyntaxError {
+    for (; i < n; ++i) {
+      char c = s.charAt(i);
+      if (c == '\\') {
+        ++i;
+        if (i < n) {
+          c = s.charAt(i);
+        } else {
+          throw new SyntaxError("Unfinished escape at index " + i + " in facet range " + s);
+        }
+      } else if (c == ',') {
+        return i + 1;
+      }
+      sb.append(c);
+    }
+    return n;
+  }
+
+  private Comparable getComparableFromString(String value) {
+    value = value.trim();
+    if ("*".equals(value)) {
+      return null;
+    }
+    return calc.getValue(value);
+  }
+
   private  SimpleOrderedMap getRangeCountsIndexed() throws IOException {
 
     int slotCount = rangeList.size() + otherList.size();
@@ -341,7 +559,7 @@
       addStats(bucket, rangeList.size() + idx);
       doSubs(bucket, rangeList.size() + idx);
     }
-      
+
     if (null != actual_end) {
       res.add(FacetRange.ACTUAL_END_JSON_KEY, calc.formatValue(actual_end));
     }
@@ -404,7 +622,7 @@
     }
 
     /**
-     * Given the low value for a bucket, generates the appropraite "label" object to use. 
+     * Given the low value for a bucket, generates the appropriate "label" object to use.
      * By default return the low object unmodified.
      */
     public Object buildRangeLabel(Comparable low) {
@@ -471,7 +689,7 @@
 
     /**
      * Adds the String gap param to a low Range endpoint value to determine
-     * the corrisponding high Range endpoint value, throwing
+     * the corresponding high Range endpoint value, throwing
      * a useful exception if not possible.
      */
     public final Comparable addGap(Comparable value, String gap) {
@@ -485,7 +703,7 @@
     }
     /**
      * Adds the String gap param to a low Range endpoint value to determine
-     * the corrisponding high Range endpoint value.
+     * the corresponding high Range endpoint value.
      * Can throw a low level format exception as needed.
      */
     protected abstract Comparable parseAndAddGap(Comparable value, String gap)
@@ -695,7 +913,7 @@
     // But range faceting does *NOT* use the "leaves" and "partial" syntax
     // 
     // If/When range facet becomes more like field facet in it's ability to sort and limit the "range buckets"
-    // FacetRangeProcessor and FacetFieldProcessor should prbably be refactored to share more code.
+    // FacetRangeProcessor and FacetFieldProcessor should probably be refactored to share more code.
     
     boolean skipThisFacet = (fcontext.flags & SKIP_FACET) != 0;
 
@@ -722,7 +940,7 @@
 
     { // refine the special "other" buckets
       
-      // NOTE: we're re-useing this variable for each special we look for...
+      // NOTE: we're re-using this variable for each special we look for...
       Map<String,Object> specialFacetInfo;
 
       specialFacetInfo = (Map<String, Object>) fcontext.facetInfo.get(FacetRangeOther.BEFORE.toString());
@@ -784,7 +1002,20 @@
   
   private SimpleOrderedMap<Object> refineBucket(Object bucketVal, boolean skip, Map<String,Object> facetInfo) throws IOException {
 
-    Comparable low = calc.getValue(bucketVal.toString());
+    String val = bucketVal.toString();
+    if (ranges != null) {
+      try {
+        Range range = parseRangeFromString(val, val);
+        final SimpleOrderedMap<Object> bucket = refineRange(range, skip, facetInfo);
+        bucket.add("val", range.label);
+        return bucket;
+      } catch (SyntaxError e) {
+        // execution won't reach here as ranges are already validated
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+      }
+    }
+
+    Comparable low = calc.getValue(val);
     Comparable high = calc.addGap(low, gap);
     Comparable max_end = end;
     if (end.compareTo(high) < 0) {
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
index 566be2e..6860a94 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetRequest.java
@@ -21,8 +21,9 @@
 import java.util.EnumSet;
 import java.util.LinkedHashMap;
 import java.util.List;
-import java.util.Objects;
 import java.util.Map;
+import java.util.Objects;
+import java.util.Optional;
 
 import org.apache.lucene.search.Query;
 import org.apache.solr.common.SolrException;
@@ -96,6 +97,20 @@
       this.multiplier = multiplier;
     }
 
+    public static SortDirection fromObj(Object direction) {
+      if (direction == null) {
+        // should we just default either to desc/asc??
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Missing Sort direction");
+      }
+
+      switch (direction.toString()) {
+        case "asc": return asc;
+        case "desc": return desc;
+        default:
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Unknown Sort direction '" + direction + "'");
+      }
+    }
+
     // asc==-1, desc==1
     public int getMultiplier() {
       return multiplier;
@@ -986,11 +1001,10 @@
       Object o = m.get("facet");
       parseSubs(o);
 
-      // TODO: SOLR-13022 ... validate the sortVariabls against the subs.
-      facet.sort = parseSort( m.get(SORT) );
-      facet.prelim_sort = parseSort( m.get("prelim_sort") );
+      facet.sort = parseAndValidateSort(facet, m, SORT);
+      facet.prelim_sort = parseAndValidateSort(facet, m, "prelim_sort");
     } else if (arg != null) {
-      // something lke json.facet.facet.field=2
+      // something like json.facet.facet.field=2
       throw err("Expected string/map for facet field, received " + arg.getClass().getSimpleName() + "=" + arg);
     }
 
@@ -1001,42 +1015,69 @@
     return facet;
   }
 
-
-  // Sort specification is currently
-  // sort : 'mystat desc'
-  // OR
-  // sort : { mystat : 'desc' }
-  private static FacetRequest.FacetSort parseSort(Object sort) {
+  /**
+   * Parses, validates and returns the {@link FacetRequest.FacetSort} for given sortParam
+   * and facet field
+   * <p>
+   *   Currently, supported sort specifications are 'mystat desc' OR {mystat: 'desc'}
+   *   index - This is equivalent to 'index asc'
+   *   count - This is equivalent to 'count desc'
+   * </p>
+   *
+   * @param facet {@link FacetField} for which sort needs to be parsed and validated
+   * @param args map containing the sortVal for given sortParam
+   * @param sortParam parameter for which sort needs to parsed and validated
+   * @return parsed facet sort
+   */
+  private static FacetRequest.FacetSort parseAndValidateSort(FacetField facet, Map<String, Object> args, String sortParam) {
+    Object sort = args.get(sortParam);
     if (sort == null) {
       return null;
-    } else if (sort instanceof String) {
+    }
+
+    FacetRequest.FacetSort facetSort = null;
+
+    if (sort instanceof String) {
       String sortStr = (String)sort;
       if (sortStr.endsWith(" asc")) {
-        return new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()),
-                                          FacetRequest.SortDirection.asc);
+        facetSort =  new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" asc".length()),
+            FacetRequest.SortDirection.asc);
       } else if (sortStr.endsWith(" desc")) {
-        return new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()),
-                                          FacetRequest.SortDirection.desc);
+        facetSort =  new FacetRequest.FacetSort(sortStr.substring(0, sortStr.length()-" desc".length()),
+            FacetRequest.SortDirection.desc);
       } else {
-        return new FacetRequest.FacetSort(sortStr,
-                                          // default direction for "index" is ascending
-                                          ("index".equals(sortStr)
-                                           ? FacetRequest.SortDirection.asc
-                                           : FacetRequest.SortDirection.desc));
+        facetSort =  new FacetRequest.FacetSort(sortStr,
+            // default direction for "index" is ascending
+            ("index".equals(sortStr)
+                ? FacetRequest.SortDirection.asc
+                : FacetRequest.SortDirection.desc));
       }
     } else if (sort instanceof Map) {
-     // sort : { myvar : 'desc' }
-      Map<String,Object> map = (Map<String,Object>)sort;
-      // TODO: validate
-      Map.Entry<String,Object> entry = map.entrySet().iterator().next();
-      String k = entry.getKey();
-      Object v = entry.getValue();
-      return new FacetRequest.FacetSort(k, FacetRequest.SortDirection.valueOf(v.toString()));
+      // { myvar : 'desc' }
+      Optional<Map.Entry<String,Object>> optional = ((Map<String,Object>)sort).entrySet().stream().findFirst();
+      if (optional.isPresent()) {
+        Map.Entry<String, Object> entry = optional.get();
+        facetSort = new FacetRequest.FacetSort(entry.getKey(), FacetRequest.SortDirection.fromObj(entry.getValue()));
+      }
     } else {
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
-          "Expected string/map for 'sort', received "+ sort.getClass().getSimpleName() + "=" + sort);
+          "Expected string/map for '" + sortParam +"', received "+ sort.getClass().getSimpleName() + "=" + sort);
     }
+
+    Map<String, AggValueSource> facetStats = facet.facetStats;
+    // validate facet sort
+    boolean isValidSort = facetSort == null ||
+        "index".equals(facetSort.sortVariable) ||
+        "count".equals(facetSort.sortVariable) ||
+        (facetStats != null && facetStats.containsKey(facetSort.sortVariable));
+
+    if (!isValidSort) {
+      throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
+          "Invalid " + sortParam + " option '" + sort + "' for field '" + facet.field + "'");
+    }
+    return facetSort;
   }
+
 }
 
 
@@ -1057,10 +1098,12 @@
     Map<String, Object> m = (Map<String, Object>) arg;
 
     facet.field = getString(m, "field", null);
+    facet.ranges = getVal(m, "ranges", false);
 
-    facet.start = getVal(m, "start", true);
-    facet.end = getVal(m, "end", true);
-    facet.gap = getVal(m, "gap", true);
+    boolean required = facet.ranges == null;
+    facet.start = getVal(m, "start", required);
+    facet.end = getVal(m, "end", required);
+    facet.gap = getVal(m, "gap", required);
     facet.hardend = getBoolean(m, "hardend", facet.hardend);
     facet.mincount = getLong(m, "mincount", 0);
 
@@ -1069,7 +1112,7 @@
     List<String> list = getStringList(m, "include", false);
     String[] includeList = null;
     if (list != null) {
-      includeList = (String[])list.toArray(new String[list.size()]);
+      includeList = list.toArray(new String[list.size()]);
     }
     facet.include = FacetParams.FacetRangeInclude.parseParam( includeList );
     facet.others = EnumSet.noneOf(FacetParams.FacetRangeOther.class);
diff --git a/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java
index c7758ff..93fb6e4 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/ExactSharedStatsCache.java
@@ -21,13 +21,19 @@
 import java.util.Map.Entry;
 import java.util.concurrent.ConcurrentHashMap;
 
-import org.apache.solr.core.PluginInfo;
 import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.request.SolrQueryRequest;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-
+/**
+ * This class implements exact caching of statistics. It requires an additional
+ * round-trip to parse query at shard servers, and return term statistics for
+ * query terms (and collection statistics for term fields).
+ * <p>Global statistics are accumulated in the instance of this component (with the same life-cycle as
+ * SolrSearcher), in unbounded maps. NOTE: This may lead to excessive memory usage, in which case
+ * a {@link LRUStatsCache} should be considered.</p>
+ */
 public class ExactSharedStatsCache extends ExactStatsCache {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   
@@ -39,13 +45,19 @@
   private final Map<String,CollectionStats> currentGlobalColStats = new ConcurrentHashMap<>();
 
   @Override
-  public StatsSource get(SolrQueryRequest req) {
+  protected StatsSource doGet(SolrQueryRequest req) {
     log.debug("total={}, cache {}", currentGlobalColStats, currentGlobalTermStats.size());
-    return new ExactStatsSource(currentGlobalTermStats, currentGlobalColStats);
+    return new ExactStatsSource(statsCacheMetrics, currentGlobalTermStats, currentGlobalColStats);
   }
-  
+
   @Override
-  public void init(PluginInfo info) {}
+  public void clear() {
+    super.clear();
+    perShardTermStats.clear();
+    perShardColStats.clear();
+    currentGlobalTermStats.clear();
+    currentGlobalColStats.clear();
+  }
 
   @Override
   protected void addToPerShardColStats(SolrQueryRequest req, String shard,
diff --git a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java
index 002b190..fc60f1c 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/ExactStatsCache.java
@@ -18,6 +18,7 @@
 
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -25,21 +26,23 @@
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
+import java.util.stream.Collectors;
 
-import com.google.common.collect.Lists;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.CollectionStatistics;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.TermQuery;
 import org.apache.lucene.search.TermStatistics;
 import org.apache.solr.client.solrj.SolrResponse;
+import org.apache.solr.cloud.CloudDescriptor;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.PluginInfo;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.handler.component.ShardRequest;
 import org.apache.solr.handler.component.ShardResponse;
@@ -52,36 +55,30 @@
  * This class implements exact caching of statistics. It requires an additional
  * round-trip to parse query at shard servers, and return term statistics for
  * query terms (and collection statistics for term fields).
+ * <p>Global statistics are cached in the current request's context and discarded
+ * once the processing of the current request is complete. There's no support for
+ * longer-term caching, and each request needs to build the global statistics from scratch,
+ * even for repeating queries.</p>
  */
 public class ExactStatsCache extends StatsCache {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  // experimenting with strategy that takes more RAM, but also doesn't share memory
-  // across threads
-  private static final String CURRENT_GLOBAL_COL_STATS = "org.apache.solr.stats.currentGlobalColStats";
-  private static final String CURRENT_GLOBAL_TERM_STATS = "org.apache.solr.stats.currentGlobalTermStats";
-  private static final String PER_SHARD_TERM_STATS = "org.apache.solr.stats.perShardTermStats";
-  private static final String PER_SHARD_COL_STATS = "org.apache.solr.stats.perShardColStats";
+  private static final String CURRENT_GLOBAL_COL_STATS = "solr.stats.globalCol";
+  private static final String CURRENT_GLOBAL_TERM_STATS = "solr.stats.globalTerm";
+  private static final String PER_SHARD_TERM_STATS = "solr.stats.shardTerm";
+  private static final String PER_SHARD_COL_STATS = "solr.stats.shardCol";
 
   @Override
-  public StatsSource get(SolrQueryRequest req) {
-    Map<String,CollectionStats> currentGlobalColStats = (Map<String,CollectionStats>) req.getContext().get(CURRENT_GLOBAL_COL_STATS);
-    Map<String,TermStats> currentGlobalTermStats = (Map<String,TermStats>) req.getContext().get(CURRENT_GLOBAL_TERM_STATS);
-    if (currentGlobalColStats == null) {
-     currentGlobalColStats = Collections.emptyMap();
-    }
-    if (currentGlobalTermStats == null) {
-      currentGlobalTermStats = Collections.emptyMap();
-    }
+  protected StatsSource doGet(SolrQueryRequest req) {
+    Map<String,CollectionStats> currentGlobalColStats = (Map<String,CollectionStats>) req.getContext().getOrDefault(CURRENT_GLOBAL_COL_STATS, Collections.emptyMap());
+    Map<String,TermStats> currentGlobalTermStats = (Map<String,TermStats>) req.getContext().getOrDefault(CURRENT_GLOBAL_TERM_STATS, Collections.emptyMap());
     log.debug("Returning StatsSource. Collection stats={}, Term stats size= {}", currentGlobalColStats, currentGlobalTermStats.size());
-    return new ExactStatsSource(currentGlobalTermStats, currentGlobalColStats);
+    return new ExactStatsSource(statsCacheMetrics, currentGlobalTermStats, currentGlobalColStats);
   }
 
   @Override
-  public void init(PluginInfo info) {}
-
-  @Override
-  public ShardRequest retrieveStatsRequest(ResponseBuilder rb) {
+  protected ShardRequest doRetrieveStatsRequest(ResponseBuilder rb) {
+    // always request shard statistics
     ShardRequest sreq = new ShardRequest();
     sreq.purpose = ShardRequest.PURPOSE_GET_TERM_STATS;
     sreq.params = new ModifiableSolrParams(rb.req.getParams());
@@ -91,20 +88,27 @@
   }
 
   @Override
-  public void mergeToGlobalStats(SolrQueryRequest req, List<ShardResponse> responses) {
-    Set<Object> allTerms = new HashSet<>();
+  protected void doMergeToGlobalStats(SolrQueryRequest req, List<ShardResponse> responses) {
+    Set<Term> allTerms = new HashSet<>();
     for (ShardResponse r : responses) {
       log.debug("Merging to global stats, shard={}, response={}", r.getShard(), r.getSolrResponse().getResponse());
+      // response's "shard" is really a shardURL, or even a list of URLs
       String shard = r.getShard();
       SolrResponse res = r.getSolrResponse();
+      if (res.getException() != null) {
+        log.debug("Exception response={}", res);
+        continue;
+      }
+      if (res.getResponse().get(ShardParams.SHARD_NAME) != null) {
+        shard = (String) res.getResponse().get(ShardParams.SHARD_NAME);
+      }
       NamedList<Object> nl = res.getResponse();
 
-      // TODO: nl == null if not all shards respond (no server hosting shard)
       String termStatsString = (String) nl.get(TERM_STATS_KEY);
       if (termStatsString != null) {
         addToPerShardTermStats(req, shard, termStatsString);
       }
-      List<Object> terms = nl.getAll(TERMS_KEY);
+      Set<Term> terms = StatsUtil.termsFromEncodedString((String) nl.get(TERMS_KEY));
       allTerms.addAll(terms);
       String colStatsString = (String) nl.get(COL_STATS_KEY);
       Map<String,CollectionStats> colStats = StatsUtil.colStatsMapFromString(colStatsString);
@@ -113,48 +117,36 @@
       }
     }
     if (allTerms.size() > 0) {
-      req.getContext().put(TERMS_KEY, Lists.newArrayList(allTerms));
+      req.getContext().put(TERMS_KEY, StatsUtil.termsToEncodedString(allTerms));
     }
     if (log.isDebugEnabled()) printStats(req);
   }
 
   protected void addToPerShardColStats(SolrQueryRequest req, String shard, Map<String,CollectionStats> colStats) {
-    Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) req.getContext().get(PER_SHARD_COL_STATS);
-    if (perShardColStats == null) {
-      perShardColStats = new HashMap<>();
-      req.getContext().put(PER_SHARD_COL_STATS, perShardColStats);
-    }
+    Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) req.getContext().computeIfAbsent(PER_SHARD_COL_STATS, Utils.NEW_HASHMAP_FUN);
     perShardColStats.put(shard, colStats);
   }
 
   protected void printStats(SolrQueryRequest req) {
-    Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().get(PER_SHARD_TERM_STATS);
-    if (perShardTermStats == null) {
-      perShardTermStats = Collections.emptyMap();
-    }
-    Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) req.getContext().get(PER_SHARD_COL_STATS);
-    if (perShardColStats == null) {
-      perShardColStats = Collections.emptyMap();
-    }
+    Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().getOrDefault(PER_SHARD_TERM_STATS, Collections.emptyMap());
+    Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) req.getContext().getOrDefault(PER_SHARD_COL_STATS, Collections.emptyMap());
     log.debug("perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
   }
 
   protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) {
     Map<String,TermStats> termStats = StatsUtil.termStatsMapFromString(termStatsString);
     if (termStats != null) {
-      Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().get(PER_SHARD_TERM_STATS);
-      if (perShardTermStats == null) {
-        perShardTermStats = new HashMap<>();
-        req.getContext().put(PER_SHARD_TERM_STATS, perShardTermStats);
-      }
+      Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().computeIfAbsent(PER_SHARD_TERM_STATS, Utils.NEW_HASHMAP_FUN);
       perShardTermStats.put(shard, termStats);
     }
   }
 
   @Override
-  public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) {
+  protected void doReturnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) {
     Query q = rb.getQuery();
     try {
+      Set<Term> additionalTerms = StatsUtil.termsFromEncodedString(rb.req.getParams().get(TERMS_KEY));
+      Set<String> additionalFields = StatsUtil.fieldsFromString(rb.req.getParams().get(FIELDS_KEY));
       HashSet<Term> terms = new HashSet<>();
       HashMap<String,TermStats> statsMap = new HashMap<>();
       HashMap<String,CollectionStats> colMap = new HashMap<>();
@@ -177,18 +169,31 @@
         }
       };
       statsCollectingSearcher.createWeight(searcher.rewrite(q), ScoreMode.COMPLETE, 1);
-
-      for (Term t : terms) {
-        rb.rsp.add(TERMS_KEY, t.toString());
+      for (String field : additionalFields) {
+        if (colMap.containsKey(field)) {
+          continue;
+        }
+        statsCollectingSearcher.collectionStatistics(field);
       }
-      if (statsMap.size() != 0) { //Don't add empty keys
+      for (Term term : additionalTerms) {
+        statsCollectingSearcher.createWeight(searcher.rewrite(new TermQuery(term)), ScoreMode.COMPLETE, 1);
+      }
+
+      CloudDescriptor cloudDescriptor = searcher.getCore().getCoreDescriptor().getCloudDescriptor();
+      if (cloudDescriptor != null) {
+        rb.rsp.add(ShardParams.SHARD_NAME, cloudDescriptor.getShardId());
+      }
+      if (!terms.isEmpty()) {
+        rb.rsp.add(TERMS_KEY, StatsUtil.termsToEncodedString(terms));
+      }
+      if (!statsMap.isEmpty()) { //Don't add empty keys
         String termStatsString = StatsUtil.termStatsMapToString(statsMap);
         rb.rsp.add(TERM_STATS_KEY, termStatsString);
         if (log.isDebugEnabled()) {
           log.debug("termStats={}, terms={}, numDocs={}", termStatsString, terms, searcher.maxDoc());
         }
       }
-      if (colMap.size() != 0){
+      if (!colMap.isEmpty()) {
         String colStatsString = StatsUtil.colStatsMapToString(colMap);
         rb.rsp.add(COL_STATS_KEY, colStatsString);
         if (log.isDebugEnabled()) {
@@ -202,21 +207,29 @@
   }
 
   @Override
-  public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) {
-    outgoing.purpose |= ShardRequest.PURPOSE_SET_TERM_STATS;
+  protected void doSendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) {
     ModifiableSolrParams params = outgoing.params;
-    List<String> terms = (List<String>) rb.req.getContext().get(TERMS_KEY);
-    if (terms != null) {
-      Set<String> fields = new HashSet<>();
-      for (String t : terms) {
-        String[] fv = t.split(":");
-        fields.add(fv[0]);
-      }
+    Set<Term> terms = StatsUtil.termsFromEncodedString((String) rb.req.getContext().get(TERMS_KEY));
+    if (!terms.isEmpty()) {
+      Set<String> fields = terms.stream().map(t -> t.field()).collect(Collectors.toSet());
       Map<String,TermStats> globalTermStats = new HashMap<>();
       Map<String,CollectionStats> globalColStats = new HashMap<>();
       // aggregate collection stats, only for the field in terms
-
-      for (String shard : rb.shards) {
+      String collectionName = rb.req.getCore().getCoreDescriptor().getCollectionName();
+      if (collectionName == null) {
+        collectionName = rb.req.getCore().getCoreDescriptor().getName();
+      }
+      List<String> shards = new ArrayList<>();
+      for (String shardUrl : rb.shards) {
+        String shard = StatsUtil.shardUrlToShard(collectionName, shardUrl);
+        if (shard == null) {
+          log.warn("Can't determine shard from collectionName=" + collectionName + " and shardUrl=" + shardUrl + ", skipping...");
+          continue;
+        } else {
+          shards.add(shard);
+        }
+      }
+      for (String shard : shards) {
         Map<String,CollectionStats> s = getPerShardColStats(rb, shard);
         if (s == null) {
           continue;
@@ -235,17 +248,18 @@
       }
       params.add(COL_STATS_KEY, StatsUtil.colStatsMapToString(globalColStats));
       // sum up only from relevant shards
-      for (String t : terms) {
-        params.add(TERMS_KEY, t);
-        for (String shard : rb.shards) {
-          TermStats termStats = getPerShardTermStats(rb.req, t, shard);
+      params.add(TERMS_KEY, StatsUtil.termsToEncodedString(terms));
+      for (Term t : terms) {
+        String term = t.toString();
+        for (String shard : shards) {
+          TermStats termStats = getPerShardTermStats(rb.req, term, shard);
           if (termStats == null || termStats.docFreq == 0) {
             continue;
           }
-          TermStats g = globalTermStats.get(t);
+          TermStats g = globalTermStats.get(term);
           if (g == null) {
-            g = new TermStats(t);
-            globalTermStats.put(t, g);
+            g = new TermStats(term);
+            globalTermStats.put(term, g);
           }
           g.add(termStats);
         }
@@ -257,24 +271,18 @@
   }
 
   protected Map<String,CollectionStats> getPerShardColStats(ResponseBuilder rb, String shard) {
-    Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) rb.req.getContext().get(PER_SHARD_COL_STATS);
-    if (perShardColStats == null) {
-      perShardColStats = Collections.emptyMap();
-    }
+    Map<String,Map<String,CollectionStats>> perShardColStats = (Map<String,Map<String,CollectionStats>>) rb.req.getContext().getOrDefault(PER_SHARD_COL_STATS, Collections.emptyMap());
     return perShardColStats.get(shard);
   }
 
   protected TermStats getPerShardTermStats(SolrQueryRequest req, String t, String shard) {
-    Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().get(PER_SHARD_TERM_STATS);
-    if (perShardTermStats == null) {
-      perShardTermStats = Collections.emptyMap();
-    }
+    Map<String,Map<String,TermStats>> perShardTermStats = (Map<String,Map<String,TermStats>>) req.getContext().getOrDefault(PER_SHARD_TERM_STATS, Collections.emptyMap());
     Map<String,TermStats> cache = perShardTermStats.get(shard);
     return (cache != null) ? cache.get(t) : null; //Term doesn't exist in shard
   }
 
   @Override
-  public void receiveGlobalStats(SolrQueryRequest req) {
+  protected void doReceiveGlobalStats(SolrQueryRequest req) {
     String globalTermStats = req.getParams().get(TERM_STATS_KEY);
     String globalColStats = req.getParams().get(COL_STATS_KEY);
     if (globalColStats != null) {
@@ -297,29 +305,23 @@
 
   protected void addToGlobalColStats(SolrQueryRequest req,
                                      Entry<String,CollectionStats> e) {
-    Map<String,CollectionStats> currentGlobalColStats = (Map<String,CollectionStats>) req.getContext().get(CURRENT_GLOBAL_COL_STATS);
-    if (currentGlobalColStats == null) {
-      currentGlobalColStats = new HashMap<>();
-      req.getContext().put(CURRENT_GLOBAL_COL_STATS, currentGlobalColStats);
-    }
+    Map<String,CollectionStats> currentGlobalColStats = (Map<String,CollectionStats>) req.getContext().computeIfAbsent(CURRENT_GLOBAL_COL_STATS, Utils.NEW_HASHMAP_FUN);
     currentGlobalColStats.put(e.getKey(), e.getValue());
   }
 
   protected void addToGlobalTermStats(SolrQueryRequest req, Entry<String,TermStats> e) {
-    Map<String,TermStats> currentGlobalTermStats = (Map<String,TermStats>) req.getContext().get(CURRENT_GLOBAL_TERM_STATS);
-    if (currentGlobalTermStats == null) {
-      currentGlobalTermStats = new HashMap<>();
-      req.getContext().put(CURRENT_GLOBAL_TERM_STATS, currentGlobalTermStats);
-    }
+    Map<String,TermStats> currentGlobalTermStats = (Map<String,TermStats>) req.getContext().computeIfAbsent(CURRENT_GLOBAL_TERM_STATS, Utils.NEW_HASHMAP_FUN);
     currentGlobalTermStats.put(e.getKey(), e.getValue());
   }
 
   protected static class ExactStatsSource extends StatsSource {
     private final Map<String,TermStats> termStatsCache;
     private final Map<String,CollectionStats> colStatsCache;
+    private final StatsCacheMetrics metrics;
 
-    public ExactStatsSource(Map<String,TermStats> termStatsCache,
+    public ExactStatsSource(StatsCacheMetrics metrics, Map<String,TermStats> termStatsCache,
                             Map<String,CollectionStats> colStatsCache) {
+      this.metrics = metrics;
       this.termStatsCache = termStatsCache;
       this.colStatsCache = colStatsCache;
     }
@@ -332,7 +334,8 @@
       // Not sure we need a warning here
       if (termStats == null) {
         log.debug("Missing global termStats info for term={}, using local stats", term);
-        return localSearcher.localTermStatistics(term, docFreq, totalTermFreq);
+        metrics.missingGlobalTermStats.increment();
+        return localSearcher != null ? localSearcher.localTermStatistics(term, docFreq, totalTermFreq) : null;
       } else {
         return termStats.toTermStatistics();
       }
@@ -344,7 +347,8 @@
       CollectionStats colStats = colStatsCache.get(field);
       if (colStats == null) {
         log.debug("Missing global colStats info for field={}, using local", field);
-        return localSearcher.localCollectionStatistics(field);
+        metrics.missingGlobalFieldStats.increment();
+        return localSearcher != null ? localSearcher.localCollectionStatistics(field) : null;
       } else {
         return colStats.toCollectionStatistics();
       }
diff --git a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java
index c49f5e9..c0b425f 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/LRUStatsCache.java
@@ -21,13 +21,17 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.LongAdder;
 
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.CollectionStatistics;
 import org.apache.lucene.search.TermStatistics;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.handler.component.ResponseBuilder;
+import org.apache.solr.handler.component.ShardRequest;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.FastLRUCache;
 import org.apache.solr.search.SolrCache;
@@ -37,44 +41,129 @@
 
 /**
  * Unlike {@link ExactStatsCache} this implementation preserves term stats
- * across queries in a set of LRU caches, and based on surface features of a
- * query it determines the need to send additional RPC-s. As a result the
- * additional RPC-s are needed much less frequently.
- * 
+ * across queries in a set of LRU caches (with the same life-cycle as SolrIndexSearcher),
+ * and based on surface features of a
+ * query it determines the need to send additional requests to retrieve local term
+ * and collection statistics from shards. As a result the
+ * additional requests may be needed much less frequently.
  * <p>
- * Query terms and their stats are maintained in a set of maps. At the query
- * front-end there will be as many maps as there are shards, each maintaining
- * the respective shard statistics. At each shard server there is a single map
- * that is updated with the global statistics on every request.
+ * Query terms, their stats and field stats are maintained in LRU caches, with the size by default
+ * {@link #DEFAULT_MAX_SIZE}, one cache per shard. These caches
+ * are updated as needed (when term or field statistics are missing). Each instance of the component
+ * keeps also a global stats cache, which is aggregated from per-shard caches.
+ * <p>Cache entries expire after a max idle time, by default {@link #DEFAULT_MAX_IDLE_TIME}.
  */
 public class LRUStatsCache extends ExactStatsCache {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
+
+  public static final int DEFAULT_MAX_SIZE = 200;
+  public static final int DEFAULT_MAX_IDLE_TIME = 60;
+
   // local stats obtained from shard servers
+  // map of <shardName, <term, termStats>>
   private final Map<String,SolrCache<String,TermStats>> perShardTermStats = new ConcurrentHashMap<>();
+  // map of <shardName, <field, collStats>>
   private final Map<String,Map<String,CollectionStats>> perShardColStats = new ConcurrentHashMap<>();
   
   // global stats synchronized from the master
-  private final FastLRUCache<String,TermStats> currentGlobalTermStats = new FastLRUCache<>();
-  private final Map<String,CollectionStats> currentGlobalColStats = new ConcurrentHashMap<>();
-  
-  // local term context (caching term lookups)
 
-  private final Map lruCacheInitArgs = new HashMap();
+  // cache of <term, termStats>
+  private final FastLRUCache<String,TermStats> currentGlobalTermStats = new FastLRUCache<>();
+  // cache of <field, colStats>
+  private final FastLRUCache<String,CollectionStats> currentGlobalColStats = new FastLRUCache<>();
+
+  // missing stats to be fetched with the next request
+  private Set<String> missingColStats = ConcurrentHashMap.newKeySet();
+  private Set<Term> missingTermStats = ConcurrentHashMap.newKeySet();
   
+  private final Map<String, String> lruCacheInitArgs = new HashMap<>();
+
+  private final StatsCacheMetrics ignorableMetrics = new StatsCacheMetrics();
+
   @Override
-  public StatsSource get(SolrQueryRequest req) {
+  protected StatsSource doGet(SolrQueryRequest req) {
     log.debug("## GET total={}, cache {}", currentGlobalColStats , currentGlobalTermStats.size());
-    return new LRUStatsSource(currentGlobalTermStats, currentGlobalColStats);
+    return new LRUStatsSource(statsCacheMetrics);
   }
-  
+
+  @Override
+  public void clear() {
+    super.clear();
+    perShardTermStats.clear();
+    perShardColStats.clear();
+    currentGlobalTermStats.clear();
+    currentGlobalColStats.clear();
+    ignorableMetrics.clear();
+  }
+
   @Override
   public void init(PluginInfo info) {
-    // TODO: make this configurable via PluginInfo
-    lruCacheInitArgs.put("size", "100");
+    super.init(info);
+    if (info != null && info.attributes != null) {
+      lruCacheInitArgs.putAll(info.attributes);
+    }
+    lruCacheInitArgs.computeIfAbsent(SolrCache.SIZE_PARAM, s -> String.valueOf(DEFAULT_MAX_SIZE));
+    lruCacheInitArgs.computeIfAbsent(SolrCache.MAX_IDLE_TIME_PARAM, t -> String.valueOf(DEFAULT_MAX_IDLE_TIME));
+    Map<String, Object> map = new HashMap<>(lruCacheInitArgs);
+    map.put(CommonParams.NAME, "globalTermStats");
     currentGlobalTermStats.init(lruCacheInitArgs, null, null);
+    currentGlobalTermStats.setState(SolrCache.State.LIVE);
+    map = new HashMap<>(lruCacheInitArgs);
+    map.put(CommonParams.NAME, "globalColStats");
+    currentGlobalColStats.init(lruCacheInitArgs, null, null);
+    currentGlobalColStats.setState(SolrCache.State.LIVE);  }
+
+  @Override
+  protected ShardRequest doRetrieveStatsRequest(ResponseBuilder rb) {
+    // check approximately what terms are needed.
+
+    // NOTE: query rewrite only expands to terms that are present in the local index
+    // so it's possible that the result will contain less terms than present in all shards.
+
+    // HOWEVER: the absence of these terms is recorded by LRUStatsSource, and they will be
+    // force-fetched on next request and cached.
+
+    // check for missing stats from previous requests
+    if (!missingColStats.isEmpty() || !missingColStats.isEmpty()) {
+      // needs to fetch anyway, so get the full query stats + the missing stats for caching
+      ShardRequest sreq = super.doRetrieveStatsRequest(rb);
+      if (!missingColStats.isEmpty()) {
+        Set<String> requestColStats = missingColStats;
+        // there's a small window when new items may be added before
+        // creating the request and clearing, so don't clear - instead replace the instance
+        missingColStats = ConcurrentHashMap.newKeySet();
+        sreq.params.add(FIELDS_KEY, StatsUtil.fieldsToString(requestColStats));
+      }
+      if (!missingTermStats.isEmpty()) {
+        Set<Term> requestTermStats = missingTermStats;
+        missingTermStats = ConcurrentHashMap.newKeySet();
+        sreq.params.add(TERMS_KEY, StatsUtil.termsToEncodedString(requestTermStats));
+      }
+      return sreq;
+    }
+
+    // rewrite locally to see if there are any missing terms. See the note above for caveats.
+    LongAdder missing = new LongAdder();
+    try {
+      // use ignorableMetrics to avoid counting this checking as real misses
+      approxCheckMissingStats(rb, new LRUStatsSource(ignorableMetrics), t -> missing.increment(), f -> missing.increment());
+      if (missing.sum() == 0) {
+        // it should be (approximately) ok to skip the fetching
+
+        // since we already incremented the stats decrement it here
+        statsCacheMetrics.retrieveStats.decrement();
+        statsCacheMetrics.useCachedGlobalStats.increment();
+        return null;
+      } else {
+        return super.doRetrieveStatsRequest(rb);
+      }
+    } catch (IOException e) {
+      log.warn("Exception checking missing stats for query " + rb.getQuery() + ", forcing retrieving stats", e);
+      // retrieve anyway
+      return super.doRetrieveStatsRequest(rb);
+    }
   }
-  
+
   @Override
   protected void addToGlobalTermStats(SolrQueryRequest req, Entry<String,TermStats> e) {
     currentGlobalTermStats.put(e.getKey(), e.getValue());
@@ -94,12 +183,14 @@
   protected void addToPerShardTermStats(SolrQueryRequest req, String shard, String termStatsString) {
     Map<String,TermStats> termStats = StatsUtil.termStatsMapFromString(termStatsString);
     if (termStats != null) {
-      SolrCache<String,TermStats> cache = perShardTermStats.get(shard);
-      if (cache == null) { // initialize
-        cache = new FastLRUCache<>();
-        cache.init(lruCacheInitArgs, null, null);
-        perShardTermStats.put(shard, cache);
-      }
+      SolrCache<String,TermStats> cache = perShardTermStats.computeIfAbsent(shard, s -> {
+        FastLRUCache c = new FastLRUCache<>();
+        Map<String, String> map = new HashMap<>(lruCacheInitArgs);
+        map.put(CommonParams.NAME, s);
+        c.init(map, null, null);
+        c.setState(SolrCache.State.LIVE);
+        return c;
+      });
       for (Entry<String,TermStats> e : termStats.entrySet()) {
         cache.put(e.getKey(), e.getValue());
       }
@@ -122,21 +213,22 @@
     log.debug("## MERGED: perShardColStats={}, perShardTermStats={}", perShardColStats, perShardTermStats);
   }
   
-  static class LRUStatsSource extends StatsSource {
-    private final SolrCache<String,TermStats> termStatsCache;
-    private final Map<String,CollectionStats> colStatsCache;
-    
-    public LRUStatsSource(SolrCache<String,TermStats> termStatsCache, Map<String,CollectionStats> colStatsCache) {
-      this.termStatsCache = termStatsCache;
-      this.colStatsCache = colStatsCache;
+  class LRUStatsSource extends StatsSource {
+    private final StatsCacheMetrics metrics;
+
+    LRUStatsSource(StatsCacheMetrics metrics) {
+      this.metrics = metrics;
     }
+
     @Override
     public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, int docFreq, long totalTermFreq)
         throws IOException {
-      TermStats termStats = termStatsCache.get(term.toString());
+      TermStats termStats = currentGlobalTermStats.get(term.toString());
       if (termStats == null) {
         log.debug("## Missing global termStats info: {}, using local", term);
-        return localSearcher.localTermStatistics(term, docFreq, totalTermFreq);
+        missingTermStats.add(term);
+        metrics.missingGlobalTermStats.increment();
+        return localSearcher != null ? localSearcher.localTermStatistics(term, docFreq, totalTermFreq) : null;
       } else {
         return termStats.toTermStatistics();
       }
@@ -145,10 +237,12 @@
     @Override
     public CollectionStatistics collectionStatistics(SolrIndexSearcher localSearcher, String field)
         throws IOException {
-      CollectionStats colStats = colStatsCache.get(field);
+      CollectionStats colStats = currentGlobalColStats.get(field);
       if (colStats == null) {
         log.debug("## Missing global colStats info: {}, using local", field);
-        return localSearcher.localCollectionStatistics(field);
+        missingColStats.add(field);
+        metrics.missingGlobalFieldStats.increment();
+        return localSearcher != null ? localSearcher.localCollectionStatistics(field) : null;
       } else {
         return colStats.toCollectionStatistics();
       }
diff --git a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java
index a0fb5b6..3a3ebd1 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsCache.java
@@ -20,7 +20,6 @@
 
 import java.util.List;
 
-import org.apache.solr.core.PluginInfo;
 import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.handler.component.ShardRequest;
 import org.apache.solr.handler.component.ShardResponse;
@@ -37,27 +36,25 @@
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   @Override
-  public StatsSource get(SolrQueryRequest req) {
+  protected StatsSource doGet(SolrQueryRequest req) {
     log.debug("## GET {}", req);
-    return new LocalStatsSource();
-  }
-
-  @Override
-  public void init(PluginInfo info) {
+    return new LocalStatsSource(statsCacheMetrics);
   }
 
   // by returning null we don't create additional round-trip request.
   @Override
-  public ShardRequest retrieveStatsRequest(ResponseBuilder rb) {
-    log.debug("## RDR {}", rb.req);
+  protected ShardRequest doRetrieveStatsRequest(ResponseBuilder rb) {
+    log.debug("## RSR {}", rb.req);
+    // already incremented the stats - decrement it now
+    statsCacheMetrics.retrieveStats.decrement();
     return null;
   }
 
   @Override
-  public void mergeToGlobalStats(SolrQueryRequest req,
+  protected void doMergeToGlobalStats(SolrQueryRequest req,
           List<ShardResponse> responses) {
     if (log.isDebugEnabled()) {
-      log.debug("## MTGD {}", req);
+      log.debug("## MTGS {}", req);
       for (ShardResponse r : responses) {
         log.debug(" - {}", r);
       }
@@ -65,17 +62,17 @@
   }
 
   @Override
-  public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) {
-    log.debug("## RLD {}", rb.req);
+  protected void doReturnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) {
+    log.debug("## RLS {}", rb.req);
   }
 
   @Override
-  public void receiveGlobalStats(SolrQueryRequest req) {
-    log.debug("## RGD {}", req);
+  protected void doReceiveGlobalStats(SolrQueryRequest req) {
+    log.debug("## RGS {}", req);
   }
 
   @Override
-  public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) {
-    log.debug("## SGD {}", outgoing);
+  protected void doSendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) {
+    log.debug("## SGS {}", outgoing);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java
index 6b33108..542e35b 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/LocalStatsSource.java
@@ -28,19 +28,23 @@
  * local statistics.
  */
 public final class LocalStatsSource extends StatsSource {
+  private final StatsCache.StatsCacheMetrics metrics;
   
-  public LocalStatsSource() {
+  public LocalStatsSource(StatsCache.StatsCacheMetrics metrics) {
+    this.metrics = metrics;
   }
   
   @Override
   public TermStatistics termStatistics(SolrIndexSearcher localSearcher, Term term, int docFreq, long totalTermFreq)
       throws IOException {
+    metrics.missingGlobalTermStats.increment();
     return localSearcher.localTermStatistics(term, docFreq, totalTermFreq);
   }
   
   @Override
   public CollectionStatistics collectionStatistics(SolrIndexSearcher localSearcher, String field)
       throws IOException {
+    metrics.missingGlobalFieldStats.increment();
     return localSearcher.localCollectionStatistics(field);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java b/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java
index ab5790e..238bb12 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/StatsCache.java
@@ -16,14 +16,29 @@
  */
 package org.apache.solr.search.stats;
 
+import java.io.IOException;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.atomic.LongAdder;
+import java.util.function.BiConsumer;
+import java.util.function.Consumer;
 
+import org.apache.lucene.index.IndexReader;
+import org.apache.lucene.index.Term;
+import org.apache.lucene.search.CollectionStatistics;
+import org.apache.lucene.search.IndexSearcher;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.ScoreMode;
+import org.apache.lucene.search.TermStatistics;
 import org.apache.lucene.search.Weight;
+import org.apache.solr.core.PluginInfo;
 import org.apache.solr.handler.component.ResponseBuilder;
 import org.apache.solr.handler.component.ShardRequest;
 import org.apache.solr.handler.component.ShardResponse;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.search.QueryCommand;
+import org.apache.solr.search.SolrCache;
 import org.apache.solr.search.SolrIndexSearcher;
 import org.apache.solr.util.plugin.PluginInfoInitialized;
 
@@ -36,7 +51,7 @@
  * <p>
  * There are instances of this class at the aggregator node (where the partial
  * data from shards is aggregated), and on each core involved in a shard request
- * (where this data is maintained and updated from the central cache).
+ * (where this data is maintained and updated from the aggregator's cache).
  * </p>
  */
 public abstract class StatsCache implements PluginInfoInitialized {
@@ -44,75 +59,228 @@
   /**
    * Map of terms and {@link TermStats}.
    */
-  public static final String TERM_STATS_KEY = "org.apache.solr.stats.termStats";
+  public static final String TERM_STATS_KEY = "solr.stats.term";
   /**
    * Value of {@link CollectionStats}.
    */
-  public static final String COL_STATS_KEY = "org.apache.solr.stats.colStats";
+  public static final String COL_STATS_KEY = "solr.stats.col";
   /**
    * List of terms in the query.
    */
-  public static final String TERMS_KEY = "org.apache.solr.stats.terms";
+  public static final String TERMS_KEY = "solr.stats.terms";
+  /**
+   * List of fields in the query.
+   */
+  public static final String FIELDS_KEY = "solr.stats.fields";
+
+  public static final class StatsCacheMetrics {
+    public final LongAdder lookups = new LongAdder();
+    public final LongAdder retrieveStats = new LongAdder();
+    public final LongAdder receiveGlobalStats = new LongAdder();
+    public final LongAdder returnLocalStats = new LongAdder();
+    public final LongAdder mergeToGlobalStats = new LongAdder();
+    public final LongAdder sendGlobalStats = new LongAdder();
+    public final LongAdder useCachedGlobalStats = new LongAdder();
+    public final LongAdder missingGlobalTermStats = new LongAdder();
+    public final LongAdder missingGlobalFieldStats = new LongAdder();
+
+    public void clear() {
+      lookups.reset();
+      retrieveStats.reset();
+      receiveGlobalStats.reset();
+      returnLocalStats.reset();
+      mergeToGlobalStats.reset();
+      sendGlobalStats.reset();
+      useCachedGlobalStats.reset();
+      missingGlobalTermStats.reset();
+      missingGlobalFieldStats.reset();
+    }
+
+    public void getSnapshot(BiConsumer<String, Object> consumer) {
+      consumer.accept(SolrCache.LOOKUPS_PARAM, lookups.longValue());
+      consumer.accept("retrieveStats", retrieveStats.longValue());
+      consumer.accept("receiveGlobalStats", receiveGlobalStats.longValue());
+      consumer.accept("returnLocalStats", returnLocalStats.longValue());
+      consumer.accept("mergeToGlobalStats", mergeToGlobalStats.longValue());
+      consumer.accept("sendGlobalStats", sendGlobalStats.longValue());
+      consumer.accept("useCachedGlobalStats", useCachedGlobalStats.longValue());
+      consumer.accept("missingGlobalTermStats", missingGlobalTermStats.longValue());
+      consumer.accept("missingGlobalFieldStats", missingGlobalFieldStats.longValue());
+    }
+
+    public String toString() {
+      Map<String, Object> map = new HashMap<>();
+      getSnapshot(map::put);
+      return map.toString();
+    }
+  }
+
+  protected StatsCacheMetrics statsCacheMetrics = new StatsCacheMetrics();
+  protected PluginInfo pluginInfo;
+
+  public StatsCacheMetrics getCacheMetrics() {
+    return statsCacheMetrics;
+  }
+
+  @Override
+  public void init(PluginInfo info) {
+    this.pluginInfo = info;
+  }
 
   /**
    * Creates a {@link ShardRequest} to retrieve per-shard stats related to the
    * current query and the current state of the requester's {@link StatsCache}.
+   * <p>This method updates the cache metrics and calls {@link #doRetrieveStatsRequest(ResponseBuilder)}.</p>
    *
    * @param rb contains current request
    * @return shard request to retrieve stats for terms in the current request,
    * or null if no additional request is needed (e.g. if the information
    * in global cache is already sufficient to satisfy this request).
    */
-  public abstract ShardRequest retrieveStatsRequest(ResponseBuilder rb);
+  public ShardRequest retrieveStatsRequest(ResponseBuilder rb) {
+    statsCacheMetrics.retrieveStats.increment();
+    return doRetrieveStatsRequest(rb);
+  }
+
+  protected abstract ShardRequest doRetrieveStatsRequest(ResponseBuilder rb);
 
   /**
    * Prepare a local (from the local shard) response to a "retrieve stats" shard
    * request.
+   * <p>This method updates the cache metrics and calls {@link #doReturnLocalStats(ResponseBuilder, SolrIndexSearcher)}.</p>
    *
    * @param rb       response builder
    * @param searcher current local searcher
    */
-  public abstract void returnLocalStats(ResponseBuilder rb,
-                                        SolrIndexSearcher searcher);
+  public void returnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher) {
+    statsCacheMetrics.returnLocalStats.increment();
+    doReturnLocalStats(rb, searcher);
+  }
+
+  protected abstract void doReturnLocalStats(ResponseBuilder rb, SolrIndexSearcher searcher);
 
   /**
    * Process shard responses that contain partial local stats. Usually this
    * entails combining per-shard stats for each term.
+   * <p>This method updates the cache metrics and calls {@link #doMergeToGlobalStats(SolrQueryRequest, List)}.</p>
    *
    * @param req       query request
    * @param responses responses from shards containing local stats for each shard
    */
-  public abstract void mergeToGlobalStats(SolrQueryRequest req,
-                                          List<ShardResponse> responses);
+  public void mergeToGlobalStats(SolrQueryRequest req,
+                                          List<ShardResponse> responses) {
+    statsCacheMetrics.mergeToGlobalStats.increment();
+    doMergeToGlobalStats(req, responses);
+  }
+
+  protected abstract void doMergeToGlobalStats(SolrQueryRequest req, List<ShardResponse> responses);
 
   /**
-   * Receive global stats data from the master and update a local cache of stats
+   * Receive global stats data from the master and update a local cache of global stats
    * with this global data. This event occurs either as a separate request, or
    * together with the regular query request, in which case this method is
    * called first, before preparing a {@link QueryCommand} to be submitted to
    * the local {@link SolrIndexSearcher}.
+   * <p>This method updates the cache metrics and calls {@link #doReceiveGlobalStats(SolrQueryRequest)}.</p>
    *
    * @param req query request with global stats data
    */
-  public abstract void receiveGlobalStats(SolrQueryRequest req);
+  public void receiveGlobalStats(SolrQueryRequest req) {
+    statsCacheMetrics.receiveGlobalStats.increment();
+    doReceiveGlobalStats(req);
+  }
+
+  protected abstract void doReceiveGlobalStats(SolrQueryRequest req);
 
   /**
    * Prepare global stats data to be sent out to shards in this request.
+   * <p>This method updates the cache metrics and calls {@link #doSendGlobalStats(ResponseBuilder, ShardRequest)}.</p>
    *
    * @param rb       response builder
    * @param outgoing shard request to be sent
    */
-  public abstract void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing);
+  public void sendGlobalStats(ResponseBuilder rb, ShardRequest outgoing) {
+    statsCacheMetrics.sendGlobalStats.increment();
+    doSendGlobalStats(rb, outgoing);
+  }
+
+  protected abstract void doSendGlobalStats(ResponseBuilder rb, ShardRequest outgoing);
 
   /**
-   * Prepare local {@link StatsSource} to provide stats information to perform
+   * Prepare a {@link StatsSource} that provides stats information to perform
    * local scoring (to be precise, to build a local {@link Weight} from the
    * query).
+   * <p>This method updates the cache metrics and calls {@link #doGet(SolrQueryRequest)}.</p>
    *
    * @param req query request
    * @return an instance of {@link StatsSource} to use in creating a query
    * {@link Weight}
    */
-  public abstract StatsSource get(SolrQueryRequest req);
+  public StatsSource get(SolrQueryRequest req) {
+    statsCacheMetrics.lookups.increment();
+    return doGet(req);
+  }
 
+  protected abstract StatsSource doGet(SolrQueryRequest req);
+
+  /**
+   * Clear cached statistics.
+   */
+  public void clear() {
+    statsCacheMetrics.clear();
+  };
+
+  /**
+   * Check if the <code>statsSource</code> is missing some term or field statistics info,
+   * which then needs to be retrieved.
+   * <p>NOTE: this uses the local IndexReader for query rewriting, which may expand to less (or different)
+   * terms as rewriting the same query on other shards' readers. This in turn may falsely fail to inform the consumers
+   * about possibly missing stats, which may lead consumers to skip the fetching of full stats. Consequently
+   * this would lead to incorrect global IDF data for the missing terms (because for these terms only local stats
+   * would be used).</p>
+   * @param rb request to evaluate against the statsSource
+   * @param statsSource stats source to check
+   * @param missingTermStats consumer of missing term stats
+   * @param missingFieldStats consumer of missing field stats
+   * @return approximate number of missing term stats and field stats combined
+   */
+  public int approxCheckMissingStats(ResponseBuilder rb, StatsSource statsSource, Consumer<Term> missingTermStats, Consumer<String> missingFieldStats) throws IOException {
+    CheckingIndexSearcher checkingSearcher = new CheckingIndexSearcher(statsSource, rb.req.getSearcher().getIndexReader(), missingTermStats, missingFieldStats);
+    Query q = rb.getQuery();
+    q = checkingSearcher.rewrite(q);
+    checkingSearcher.createWeight(q, ScoreMode.COMPLETE, 1);
+    return checkingSearcher.missingFieldsCount + checkingSearcher.missingTermsCount;
+  }
+
+  static final class CheckingIndexSearcher extends IndexSearcher {
+    final StatsSource statsSource;
+    final Consumer<Term> missingTermStats;
+    final Consumer<String> missingFieldStats;
+    int missingTermsCount, missingFieldsCount;
+
+    CheckingIndexSearcher(StatsSource statsSource, IndexReader reader, Consumer<Term> missingTermStats, Consumer<String> missingFieldStats) {
+      super(reader);
+      this.statsSource = statsSource;
+      this.missingTermStats = missingTermStats;
+      this.missingFieldStats = missingFieldStats;
+    }
+
+    @Override
+    public TermStatistics termStatistics(Term term, int docFreq, long totalTermFreq) throws IOException {
+      if (statsSource.termStatistics(null, term, docFreq, totalTermFreq) == null) {
+        missingTermStats.accept(term);
+        missingTermsCount++;
+      }
+      return super.termStatistics(term, docFreq, totalTermFreq);
+    }
+
+    @Override
+    public CollectionStatistics collectionStatistics(String field) throws IOException {
+      if (statsSource.collectionStatistics(null, field) == null) {
+        missingFieldStats.accept(field);
+        missingFieldsCount++;
+      }
+      return super.collectionStatistics(field);
+    }
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java b/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java
index 21377d0..b390e6c 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/StatsUtil.java
@@ -16,25 +16,126 @@
  */
 package org.apache.solr.search.stats;
 
+import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 
+import java.net.URLDecoder;
+import java.net.URLEncoder;
+import java.nio.charset.Charset;
+import java.util.Collection;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.Map;
 import java.util.Map.Entry;
+import java.util.Set;
 
 import org.apache.lucene.index.Term;
-import org.apache.lucene.util.BytesRef;
-import org.apache.solr.common.util.Base64;
+import org.apache.solr.common.util.Utils;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
  * Various utilities for de/serialization of term stats and collection stats.
+ * <p>TODO: serialization format is very simple and does nothing to compress the data.</p>
  */
 public class StatsUtil {
   
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
+
+  public static final String ENTRY_SEPARATOR = "!";
+  public static final char ENTRY_SEPARATOR_CHAR = '!';
+
+  /**
+   * Parse a list of urls separated by "|" in order to retrieve a shard name.
+   * @param collectionName collection name
+   * @param shardUrls list of urls
+   * @return shard name, or shardUrl if no shard info is present,
+   *        or null if impossible to determine (eg. empty string)
+   */
+  public static String shardUrlToShard(String collectionName, String shardUrls) {
+    // we may get multiple replica urls
+    String[] urls = shardUrls.split("\\|");
+    if (urls.length == 0) {
+      return null;
+    }
+    String[] urlParts = urls[0].split("/");
+    String coreName = urlParts[urlParts.length - 1];
+    String replicaName = Utils.parseMetricsReplicaName(collectionName, coreName);
+    String shard;
+    if (replicaName != null) {
+      shard = coreName.substring(collectionName.length() + 1);
+      shard = shard.substring(0, shard.length() - replicaName.length() - 1);
+    } else {
+      if (coreName.length() > collectionName.length() && coreName.startsWith(collectionName)) {
+        shard = coreName.substring(collectionName.length() + 1);
+        if (shard.isEmpty()) {
+          shard = urls[0];
+        }
+      } else {
+        shard = urls[0];
+      }
+    }
+    return shard;
+  }
+
+  public static String termsToEncodedString(Collection<?> terms) {
+    StringBuilder sb = new StringBuilder();
+    for (Object o : terms) {
+      if (sb.length() > 0) {
+        sb.append(ENTRY_SEPARATOR);
+      }
+      if (o instanceof Term) {
+        sb.append(termToEncodedString((Term) o));
+      } else {
+        sb.append(termToEncodedString(String.valueOf(o)));
+      }
+    }
+    return sb.toString();
+  }
+
+  public static Set<Term> termsFromEncodedString(String data) {
+    Set<Term> terms = new HashSet<>();
+    if (data == null || data.isBlank()) {
+      return terms;
+    }
+    String[] items = data.split(ENTRY_SEPARATOR);
+    for (String item : items) {
+      Term t = termFromEncodedString(item);
+      if (t != null) {
+        terms.add(t);
+      }
+    }
+    return terms;
+  }
+
+  public static Set<String> fieldsFromString(String data) {
+    Set<String> fields = new HashSet<>();
+    if (data == null || data.isBlank()) {
+      return fields;
+    }
+    String[] items = data.split(ENTRY_SEPARATOR);
+    for (String item : items) {
+      if (!item.isBlank()) {
+        fields.add(item);
+      }
+    }
+    return fields;
+  }
+
+  public static String fieldsToString(Collection<String> fields) {
+    StringBuilder sb = new StringBuilder();
+    for (String field : fields) {
+      if (field.isBlank()) {
+        continue;
+      }
+      if (sb.length() > 0) {
+        sb.append(ENTRY_SEPARATOR);
+      }
+      sb.append(field);
+    }
+    return sb.toString();
+  }
+
   /**
    * Make a String representation of {@link CollectionStats}
    */
@@ -42,13 +143,13 @@
     StringBuilder sb = new StringBuilder();
     sb.append(colStats.field);
     sb.append(',');
-    sb.append(String.valueOf(colStats.maxDoc));
+    sb.append(colStats.maxDoc);
     sb.append(',');
-    sb.append(String.valueOf(colStats.docCount));
+    sb.append(colStats.docCount);
     sb.append(',');
-    sb.append(String.valueOf(colStats.sumTotalTermFreq));
+    sb.append(colStats.sumTotalTermFreq);
     sb.append(',');
-    sb.append(String.valueOf(colStats.sumDocFreq));
+    sb.append(colStats.sumDocFreq);
     return sb.toString();
   }
   
@@ -78,15 +179,69 @@
     }
   }
   
-  public static String termToString(Term t) {
+  public static String termToEncodedString(Term t) {
     StringBuilder sb = new StringBuilder();
     sb.append(t.field()).append(':');
-    BytesRef bytes = t.bytes();
-    sb.append(Base64.byteArrayToBase64(bytes.bytes, bytes.offset, bytes.offset));
+    sb.append(encode(t.text()));
     return sb.toString();
   }
+
+  public static final char ESCAPE = '_';
+  public static final char ESCAPE_ENTRY_SEPARATOR = '0';
+
+  public static String encode(String value) {
+    StringBuilder output = new StringBuilder(value.length() + 2);
+    for (int i = 0; i < value.length(); i++) {
+      char c = value.charAt(i);
+      switch (c) {
+        case ESCAPE :
+          output.append(ESCAPE).append(ESCAPE);
+          break;
+        case ENTRY_SEPARATOR_CHAR :
+          output.append(ESCAPE).append(ESCAPE_ENTRY_SEPARATOR);
+          break;
+        default :
+          output.append(c);
+      }
+    }
+    return URLEncoder.encode(output.toString(), Charset.forName("UTF-8"));
+  }
+
+  public static String decode(String value) throws IOException {
+    value = URLDecoder.decode(value, Charset.forName("UTF-8"));
+    StringBuilder output = new StringBuilder(value.length());
+    for (int i = 0; i < value.length(); i++) {
+      char c = value.charAt(i);
+      // escaped char follows
+      if (c == ESCAPE && i < value.length() - 1) {
+        i++;
+        char next = value.charAt(i);
+        if (next == ESCAPE) {
+          output.append(ESCAPE);
+        } else if (next == ESCAPE_ENTRY_SEPARATOR) {
+          output.append(ENTRY_SEPARATOR_CHAR);
+        } else {
+          throw new IOException("invalid escape sequence in " + value);
+        }
+      } else {
+        output.append(c);
+      }
+    }
+    return output.toString();
+  }
+
+  public static String termToEncodedString(String term) {
+    int idx = term.indexOf(':');
+    if (idx == -1) {
+      log.warn("Invalid term data without ':': '" + term + "'");
+      return null;
+    }
+    String prefix = term.substring(0, idx + 1);
+    String value = term.substring(idx + 1);
+    return prefix + encode(value);
+  }
   
-  private static Term termFromString(String data) {
+  public static Term termFromEncodedString(String data) {
     if (data == null || data.trim().length() == 0) {
       log.warn("Invalid empty term value");
       return null;
@@ -99,76 +254,50 @@
     String field = data.substring(0, idx);
     String value = data.substring(idx + 1);
     try {
-      return new Term(field, value);
-      // XXX this would be more correct
-      // byte[] bytes = Base64.base64ToByteArray(value);
-      // return new Term(field, new BytesRef(bytes));
+       return new Term(field, decode(value));
     } catch (Exception e) {
       log.warn("Invalid term value '" + value + "'");
       return null;
     }
   }
   
-  public static String termStatsToString(TermStats termStats,
-      boolean includeTerm) {
+  public static String termStatsToString(TermStats termStats, boolean encode) {
     StringBuilder sb = new StringBuilder();
-    if (includeTerm) {
-      sb.append(termStats.term).append(',');
-    }
-    sb.append(String.valueOf(termStats.docFreq));
+    sb.append(encode ? termToEncodedString(termStats.term) : termStats.term).append(',');
+    sb.append(termStats.docFreq);
     sb.append(',');
-    sb.append(String.valueOf(termStats.totalTermFreq));
+    sb.append(termStats.totalTermFreq);
     return sb.toString();
   }
   
-  private static TermStats termStatsFromString(String data, Term t) {
+  private static TermStats termStatsFromString(String data) {
     if (data == null || data.trim().length() == 0) {
       log.warn("Invalid empty term stats string");
       return null;
     }
     String[] vals = data.split(",");
-    if (vals.length < 2) {
+    if (vals.length < 3) {
       log.warn("Invalid term stats string, num fields " + vals.length
-          + " < 2, '" + data + "'");
+          + " < 3, '" + data + "'");
       return null;
     }
-    Term termToUse;
-    int idx = 0;
-    if (vals.length == 3) {
-      idx++;
-      // with term
-      Term term = termFromString(vals[0]);
-      if (term != null) {
-        termToUse = term;
-        if (t != null) {
-          assert term.equals(t);
-        }
-      } else { // failed term decoding
-        termToUse = t;
-      }
-    } else {
-      termToUse = t;
-    }
-    if (termToUse == null) {
-      log.warn("Missing term in termStats '" + data + "'");
-      return null;
-    }
+    Term term = termFromEncodedString(vals[0]);
     try {
-      long docFreq = Long.parseLong(vals[idx++]);
-      long totalTermFreq = Long.parseLong(vals[idx]);
-      return new TermStats(termToUse.toString(), docFreq, totalTermFreq);
+      long docFreq = Long.parseLong(vals[1]);
+      long totalTermFreq = Long.parseLong(vals[2]);
+      return new TermStats(term.toString(), docFreq, totalTermFreq);
     } catch (Exception e) {
       log.warn("Invalid termStats string '" + data + "'");
       return null;
     }
   }
-  
+
   public static Map<String,CollectionStats> colStatsMapFromString(String data) {
     if (data == null || data.trim().length() == 0) {
       return null;
     }
     Map<String,CollectionStats> map = new HashMap<String,CollectionStats>();
-    String[] entries = data.split("!");
+    String[] entries = data.split(ENTRY_SEPARATOR);
     for (String es : entries) {
       CollectionStats stats = colStatsFromString(es);
       if (stats != null) {
@@ -185,7 +314,7 @@
     StringBuilder sb = new StringBuilder();
     for (Entry<String,CollectionStats> e : stats.entrySet()) {
       if (sb.length() > 0) {
-        sb.append('!');
+        sb.append(ENTRY_SEPARATOR);
       }
       sb.append(colStatsToString(e.getValue()));
     }
@@ -197,9 +326,9 @@
       return null;
     }
     Map<String,TermStats> map = new HashMap<>();
-    String[] entries = data.split("!");
+    String[] entries = data.split(ENTRY_SEPARATOR);
     for (String es : entries) {
-      TermStats termStats = termStatsFromString(es, null);
+      TermStats termStats = termStatsFromString(es);
       if (termStats != null) {
         map.put(termStats.term, termStats);
       }
@@ -214,7 +343,7 @@
     StringBuilder sb = new StringBuilder();
     for (Entry<String,TermStats> e : stats.entrySet()) {
       if (sb.length() > 0) {
-        sb.append('!');
+        sb.append(ENTRY_SEPARATOR);
       }
       sb.append(termStatsToString(e.getValue(), true));
     }
diff --git a/solr/core/src/java/org/apache/solr/search/stats/TermStats.java b/solr/core/src/java/org/apache/solr/search/stats/TermStats.java
index 9977b28..ef059e9 100644
--- a/solr/core/src/java/org/apache/solr/search/stats/TermStats.java
+++ b/solr/core/src/java/org/apache/solr/search/stats/TermStats.java
@@ -33,7 +33,7 @@
     this.term = term;
     t = makeTerm(term);
   }
-  
+
   private Term makeTerm(String s) {
     int idx = s.indexOf(':');
     if (idx == -1) {
@@ -68,6 +68,6 @@
   }
   
   public String toString() {
-    return StatsUtil.termStatsToString(this, true);
+    return StatsUtil.termStatsToString(this, false);
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
index 31f5a74..5fd18a1 100644
--- a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
@@ -29,11 +29,12 @@
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
-import org.apache.http.HttpRequest;
-import org.apache.http.protocol.HttpContext;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
+
+import org.apache.http.HttpRequest;
+import org.apache.http.protocol.HttpContext;
 import org.eclipse.jetty.client.api.Request;
 
 /**
@@ -65,7 +66,7 @@
    * @param pluginConfig Config parameters, possibly from a ZK source
    */
   public abstract void init(Map<String, Object> pluginConfig);
-
+ 
   /**
    * This method attempts to authenticate the request. Upon a successful authentication, this
    * must call the next filter in the filter chain and set the user principal of the request,
@@ -106,10 +107,10 @@
    * delegate to {@link PKIAuthenticationPlugin}. Return true to indicate that your plugin
    * did handle the request, or false to signal that PKI plugin should handle it. This method
    * will be called by {@link PKIAuthenticationPlugin}'s interceptor.
-   *
+   * 
    * <p>
    *   If not overridden, this method will return true for plugins implementing {@link HttpClientBuilderPlugin}.
-   *   This method can be overridden by subclasses e.g. to set HTTP headers, even if you don't use a clientBuilder.
+   *   This method can be overridden by subclasses e.g. to set HTTP headers, even if you don't use a clientBuilder. 
    * </p>
    * @param httpRequest the httpRequest that is about to be sent to another internal Solr node
    * @param httpContext the context of that request.
@@ -136,7 +137,7 @@
   protected boolean interceptInternodeRequest(Request request) {
     return this instanceof HttpClientBuilderPlugin;
   }
-
+  
   /**
    * Cleanup any per request  data
    */
@@ -160,7 +161,7 @@
     metricNames.addAll(Arrays.asList("errors", "requests", "authenticated", "passThrough",
         "failWrongCredentials", "failMissingCredentials", "requestTimes", "totalTime"));
   }
-
+  
   @Override
   public String getName() {
     return this.getClass().getName();
@@ -185,5 +186,5 @@
   public MetricRegistry getMetricRegistry() {
     return registry;
   }
-
+  
 }
diff --git a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
index 16b39a4..a4c7c0d 100644
--- a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
+++ b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
@@ -39,7 +39,6 @@
     CORE_READ_PERM("core-admin-read", null),
     CORE_EDIT_PERM("core-admin-edit", null),
     READ_PERM("read", "*"),
-    CUSTOM_PERM("custom-op", null),//custom operation , user-defined
     UPDATE_PERM("update", "*"),
     CONFIG_EDIT_PERM("config-edit", unmodifiableSet(new HashSet<>(asList("*", null)))),
     CONFIG_READ_PERM("config-read", "*"),
@@ -52,8 +51,8 @@
     AUTOSCALING_WRITE_PERM("autoscaling-write", null),
     AUTOSCALING_HISTORY_READ_PERM("autoscaling-history-read", null),
     METRICS_HISTORY_READ_PERM("metrics-history-read", null),
-    BLOB_READ("blob-read", null),
-    BLOB_WRITE("blob-write", null),
+    FILESTORE_READ_PERM("filestore-read", null),
+    FILESTORE_WRITE_PERM("filestore-write", null),
     ALL("all", unmodifiableSet(new HashSet<>(asList("*", null))))
     ;
     final String name;
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index a385479..90d6b17 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -82,9 +82,9 @@
 import org.apache.solr.security.PKIAuthenticationPlugin;
 import org.apache.solr.security.PublicKeyHandler;
 import org.apache.solr.util.SolrFileCleaningTracker;
+import org.apache.solr.util.tracing.GlobalTracer;
 import org.apache.solr.util.StartupLoggingUtils;
 import org.apache.solr.util.configuration.SSLConfigurationsFactory;
-import org.apache.solr.util.tracing.GlobalTracer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -180,7 +180,6 @@
       final Path solrHomePath = solrHome == null ? SolrResourceLoader.locateSolrHome() : Paths.get(solrHome);
       coresInit = createCoreContainer(solrHomePath, extraProperties);
       SolrResourceLoader.ensureUserFilesDataDir(solrHomePath);
-      SolrResourceLoader.ensureBlobsDir(solrHomePath);
       this.httpClient = coresInit.getUpdateShardHandler().getDefaultHttpClient();
       setupJvmMetrics(coresInit);
       log.debug("user.dir=" + System.getProperty("user.dir"));
@@ -627,8 +626,8 @@
             public void close() {
               // even though we skip closes, we let local tests know not to close so that a full understanding can take
               // place
-              assert !Thread.currentThread().getStackTrace()[2].getClassName().matches(
-                  "org\\.apache\\.(?:solr|lucene).*") : CLOSE_STREAM_MSG;
+              assert Thread.currentThread().getStackTrace()[2].getClassName().matches(
+                  "org\\.apache\\.(?:solr|lucene).*") ? false : true : CLOSE_STREAM_MSG;
               this.stream = ClosedServletInputStream.CLOSED_SERVLET_INPUT_STREAM;
             }
           };
@@ -662,8 +661,9 @@
             public void close() {
               // even though we skip closes, we let local tests know not to close so that a full understanding can take
               // place
-              assert !Thread.currentThread().getStackTrace()[2].getClassName().matches(
-                  "org\\.apache\\.(?:solr|lucene).*") : CLOSE_STREAM_MSG;
+              assert Thread.currentThread().getStackTrace()[2].getClassName().matches(
+                  "org\\.apache\\.(?:solr|lucene).*") ? false
+                      : true : CLOSE_STREAM_MSG;
               stream = ClosedServletOutputStream.CLOSED_SERVLET_OUTPUT_STREAM;
             }
           };
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
index 19a4a30..7c21ad1 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
@@ -63,6 +63,7 @@
 import org.apache.solr.util.SolrFileCleaningTracker;
 import org.apache.solr.util.tracing.GlobalTracer;
 
+import static org.apache.solr.client.solrj.impl.BinaryResponseParser.BINARY_CONTENT_TYPE;
 import static org.apache.solr.common.params.CommonParams.PATH;
 
 
@@ -732,6 +733,7 @@
       String contentType = req.getContentType();
       String method = req.getMethod(); // No need to uppercase... HTTP verbs are case sensitive
       String uri = req.getRequestURI();
+      boolean isRawPut = "PUT".equals(method) && BINARY_CONTENT_TYPE.equals(contentType);
       boolean isPost = "POST".equals(method);
 
       // SOLR-6787 changed the behavior of a POST without content type.  Previously it would throw an exception,
@@ -747,7 +749,7 @@
       // POST was handled normally, but other methods (PUT/DELETE)
       // were handled by restlet if the URI contained /schema or /config
       // "handled by restlet" means that we don't attempt to handle any request body here.
-      if (!isPost) {
+      if (!isPost && !isRawPut) {
         if (contentType == null) {
           return parseQueryString(req.getQueryString());
         }
diff --git a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
index ea42552..f0972db 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/AtomicUpdateDocumentMerger.java
@@ -541,9 +541,9 @@
   private Collection<Pattern> preparePatterns(Object fieldVal) {
     final Collection<Pattern> patterns = new LinkedHashSet<>(1);
     if (fieldVal instanceof Collection) {
-      Collection<String> patternVals = (Collection<String>) fieldVal;
-      for (String patternVal : patternVals) {
-        patterns.add(Pattern.compile(patternVal));
+      Collection<Object> patternVals = (Collection<Object>) fieldVal;
+      for (Object patternVal : patternVals) {
+        patterns.add(Pattern.compile(patternVal.toString()));
       }
     } else {
       patterns.add(Pattern.compile(fieldVal.toString()));
diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
index c45f0c6..eb3c08b 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
@@ -328,7 +328,7 @@
   public static class LazyUpdateProcessorFactoryHolder extends PluginBag.PluginHolder<UpdateRequestProcessorFactory> {
     private volatile UpdateRequestProcessorFactory lazyFactory;
 
-    public LazyUpdateProcessorFactoryHolder(final PluginBag.PluginHolder<UpdateRequestProcessorFactory> holder) {
+    public LazyUpdateProcessorFactoryHolder(final PluginBag.LazyPluginHolder holder) {
       super(holder.getPluginInfo());
       lazyFactory = new LazyUpdateRequestProcessorFactory(holder);
     }
@@ -340,20 +340,26 @@
     }
 
     public class LazyUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory {
-      private final PluginBag.PluginHolder<UpdateRequestProcessorFactory> holder;
+      private final PluginBag.LazyPluginHolder holder;
+      UpdateRequestProcessorFactory delegate;
 
-      public LazyUpdateRequestProcessorFactory(PluginBag.PluginHolder holder) {
+      public LazyUpdateRequestProcessorFactory(PluginBag.LazyPluginHolder holder) {
         this.holder = holder;
       }
 
       public UpdateRequestProcessorFactory getDelegate() {
-        return holder.get();
+        return delegate;
       }
 
       @Override
       public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
-        return holder.get().getInstance(req, rsp, next);
+        if (delegate != null) return delegate.getInstance(req, rsp, next);
 
+        synchronized (this) {
+          if (delegate == null)
+            delegate = (UpdateRequestProcessorFactory) holder.get();
+        }
+        return delegate.getInstance(req, rsp, next);
       }
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java
index 3aca5d5..8795e33 100644
--- a/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java
+++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLFUCache.java
@@ -23,8 +23,9 @@
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
+//import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.atomic.LongAdder;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.lucene.util.Accountable;
@@ -69,7 +70,7 @@
   private long maxIdleTimeNs;
   private final TimeSource timeSource = TimeSource.NANO_TIME;
   private final AtomicLong oldestEntry = new AtomicLong(0L);
-  private final AtomicLong ramBytes = new AtomicLong(0);
+  private final LongAdder ramBytes = new LongAdder();
 
   public ConcurrentLFUCache(int upperWaterMark, final int lowerWaterMark, int acceptableSize,
                             int initialSize, boolean runCleanupThread, boolean runNewThreadForCleanup,
@@ -155,11 +156,11 @@
   public V get(K key) {
     CacheEntry<K, V> e = map.get(key);
     if (e == null) {
-      if (islive) stats.missCounter.incrementAndGet();
+      if (islive) stats.missCounter.increment();
     } else if (islive) {
       e.lastAccessed = timeSource.getEpochTimeNs();
-      stats.accessCounter.incrementAndGet();
-      e.hits.incrementAndGet();
+      stats.accessCounter.increment();
+      e.hits.increment();
     }
     return e != null ? e.value : null;
   }
@@ -168,8 +169,8 @@
   public V remove(K key) {
     CacheEntry<K, V> cacheEntry = map.remove(key);
     if (cacheEntry != null) {
-      stats.size.decrementAndGet();
-      ramBytes.addAndGet(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY);
+      stats.size.decrement();
+      ramBytes.add(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY);
       return cacheEntry.value;
     }
     return null;
@@ -187,23 +188,24 @@
    * @lucene.internal
    */
   public V putCacheEntry(CacheEntry<K, V> e) {
-    stats.accessCounter.incrementAndGet();
+    stats.accessCounter.increment();
     // initialize oldestEntry
     oldestEntry.updateAndGet(x -> x > e.lastAccessed  || x == 0 ? e.lastAccessed : x);
     CacheEntry<K, V> oldCacheEntry = map.put(e.key, e);
     int currentSize;
     if (oldCacheEntry == null) {
-      currentSize = stats.size.incrementAndGet();
-      ramBytes.addAndGet(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry
+      stats.size.increment();
+      currentSize = stats.size.intValue();
+      ramBytes.add(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry
     } else {
-      currentSize = stats.size.get();
-      ramBytes.addAndGet(-oldCacheEntry.ramBytesUsed());
-      ramBytes.addAndGet(e.ramBytesUsed());
+      currentSize = stats.size.intValue();
+      ramBytes.add(-oldCacheEntry.ramBytesUsed());
+      ramBytes.add(e.ramBytesUsed());
     }
     if (islive) {
-      stats.putCounter.incrementAndGet();
+      stats.putCounter.increment();
     } else {
-      stats.nonLivePutCounter.incrementAndGet();
+      stats.nonLivePutCounter.increment();
     }
 
     // Check if we need to clear out old entries from the cache.
@@ -242,7 +244,7 @@
       isCleaning = true;
       this.lowHitCount = lowHitCount; // volatile write to make isCleaning visible
       
-      int sz = stats.size.get();
+      int sz = stats.size.intValue();
       boolean evictByIdleTime = maxIdleTimeNs != Long.MAX_VALUE;
       long idleCutoff = evictByIdleTime ? timeSource.getEpochTimeNs() - maxIdleTimeNs : -1L;
       if (sz <= upperWaterMark && (evictByIdleTime && oldestEntry.get() > idleCutoff)) {
@@ -264,7 +266,7 @@
           if (entry.getValue().lastAccessedCopy < idleCutoff) {
             iterator.remove();
             postRemoveEntry(entry.getValue());
-            stats.evictionIdleCounter.incrementAndGet();
+            stats.evictionIdleCounter.increment();
           } else {
             if (entry.getValue().lastAccessedCopy < currentOldestEntry) {
               currentOldestEntry = entry.getValue().lastAccessedCopy;
@@ -275,7 +277,7 @@
           oldestEntry.set(currentOldestEntry);
         }
         // refresh size and maybe return
-        sz = stats.size.get();
+        sz = stats.size.intValue();
         if (sz <= upperWaterMark) {
           return;
         }
@@ -286,10 +288,11 @@
 
       for (CacheEntry<K, V> ce : map.values()) {
         // set hitsCopy to avoid later Atomic reads.  Primitive types are faster than the atomic get().
-        ce.hitsCopy = ce.hits.get();
+        ce.hitsCopy = ce.hits.longValue();
         ce.lastAccessedCopy = ce.lastAccessed;
         if (timeDecay) {
-          ce.hits.set(ce.hitsCopy >>> 1);
+          ce.hits.reset();
+          ce.hits.add(ce.hitsCopy >>> 1);
         }
         if (tree.size() < wantToRemove) {
           tree.add(ce);
@@ -342,9 +345,9 @@
 
   private void postRemoveEntry(CacheEntry<K, V> o) {
     if (o == null) return;
-    ramBytes.addAndGet(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY));
-    stats.size.decrementAndGet();
-    stats.evictionCounter.incrementAndGet();
+    ramBytes.add(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY));
+    stats.size.decrement();
+    stats.evictionCounter.increment();
     if (evictionListener != null) evictionListener.evictedEntry(o.key, o.value);
   }
 
@@ -367,7 +370,7 @@
     try {
       for (Map.Entry<Object, CacheEntry<K, V>> entry : map.entrySet()) {
         CacheEntry<K, V> ce = entry.getValue();
-        ce.hitsCopy = ce.hits.get();
+        ce.hitsCopy = ce.hits.longValue();
         ce.lastAccessedCopy = ce.lastAccessed;
         if (tree.size() < n) {
           tree.add(ce);
@@ -411,7 +414,7 @@
     try {
       for (Map.Entry<Object, CacheEntry<K, V>> entry : map.entrySet()) {
         CacheEntry<K, V> ce = entry.getValue();
-        ce.hitsCopy = ce.hits.get();
+        ce.hitsCopy = ce.hits.longValue();
         ce.lastAccessedCopy = ce.lastAccessed;
         if (tree.size() < n) {
           tree.add(ce);
@@ -437,13 +440,13 @@
   }
 
   public int size() {
-    return stats.size.get();
+    return stats.size.intValue();
   }
 
   @Override
   public void clear() {
     map.clear();
-    ramBytes.set(0);
+    ramBytes.reset();
   }
 
   public Map<Object, CacheEntry<K, V>> getMap() {
@@ -452,7 +455,7 @@
 
   @Override
   public long ramBytesUsed() {
-    return BASE_RAM_BYTES_USED + ramBytes.get();
+    return BASE_RAM_BYTES_USED + ramBytes.sum();
   }
 
   public static class CacheEntry<K, V> implements Comparable<CacheEntry<K, V>>, Accountable {
@@ -463,7 +466,7 @@
     final K key;
     final V value;
     final long ramBytesUsed;
-    volatile AtomicLong hits = new AtomicLong(0);
+    final LongAdder hits = new LongAdder();
     long hitsCopy = 0;
     volatile long lastAccessed = 0;
     long lastAccessedCopy = 0;
@@ -500,7 +503,7 @@
 
     @Override
     public String toString() {
-      return "key: " + key + " value: " + value + " hits:" + hits.get();
+      return "key: " + key + " value: " + value + " hits:" + hits.longValue();
     }
 
     @Override
@@ -529,57 +532,63 @@
   public static class Stats implements Accountable {
     private static final long RAM_BYTES_USED =
         RamUsageEstimator.shallowSizeOfInstance(Stats.class) +
-            6 * RamUsageEstimator.primitiveSizes.get(long.class) +
-            RamUsageEstimator.primitiveSizes.get(int.class);
+            // LongAdder
+            7 * (
+                RamUsageEstimator.NUM_BYTES_ARRAY_HEADER +
+                    RamUsageEstimator.primitiveSizes.get(long.class) +
+                    2 * (RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.primitiveSizes.get(long.class))
+            );
 
-    private final AtomicLong accessCounter = new AtomicLong(0),
-        putCounter = new AtomicLong(0),
-        nonLivePutCounter = new AtomicLong(0),
-        missCounter = new AtomicLong();
-    private final AtomicInteger size = new AtomicInteger();
-    private AtomicLong evictionCounter = new AtomicLong();
-    private AtomicLong evictionIdleCounter = new AtomicLong();
+    private final LongAdder accessCounter = new LongAdder();
+    private final LongAdder putCounter = new LongAdder();
+    private final LongAdder nonLivePutCounter = new LongAdder();
+    private final LongAdder missCounter = new LongAdder();
+    private final LongAdder size = new LongAdder();
+    private LongAdder evictionCounter = new LongAdder();
+    private LongAdder evictionIdleCounter = new LongAdder();
 
     public long getCumulativeLookups() {
-      return (accessCounter.get() - putCounter.get() - nonLivePutCounter.get()) + missCounter.get();
+      return (accessCounter.longValue() - putCounter.longValue() - nonLivePutCounter.longValue()) + missCounter.longValue();
     }
 
     public long getCumulativeHits() {
-      return accessCounter.get() - putCounter.get() - nonLivePutCounter.get();
+      return accessCounter.longValue() - putCounter.longValue() - nonLivePutCounter.longValue();
     }
 
     public long getCumulativePuts() {
-      return putCounter.get();
+      return putCounter.longValue();
     }
 
     public long getCumulativeEvictions() {
-      return evictionCounter.get();
+      return evictionCounter.longValue();
     }
 
     public long getCumulativeIdleEvictions() {
-      return evictionIdleCounter.get();
+      return evictionIdleCounter.longValue();
     }
 
     public int getCurrentSize() {
-      return size.get();
+      return size.intValue();
     }
 
     public long getCumulativeNonLivePuts() {
-      return nonLivePutCounter.get();
+      return nonLivePutCounter.longValue();
     }
 
     public long getCumulativeMisses() {
-      return missCounter.get();
+      return missCounter.longValue();
     }
 
     public void add(Stats other) {
-      accessCounter.addAndGet(other.accessCounter.get());
-      putCounter.addAndGet(other.putCounter.get());
-      nonLivePutCounter.addAndGet(other.nonLivePutCounter.get());
-      missCounter.addAndGet(other.missCounter.get());
-      evictionCounter.addAndGet(other.evictionCounter.get());
-      evictionIdleCounter.addAndGet(other.evictionIdleCounter.get());
-      size.set(Math.max(size.get(), other.size.get()));
+      accessCounter.add(other.accessCounter.longValue());
+      putCounter.add(other.putCounter.longValue());
+      nonLivePutCounter.add(other.nonLivePutCounter.longValue());
+      missCounter.add(other.missCounter.longValue());
+      evictionCounter.add(other.evictionCounter.longValue());
+      evictionIdleCounter.add(other.evictionIdleCounter.longValue());
+      long maxSize = Math.max(size.longValue(), other.size.longValue());
+      size.reset();
+      size.add(maxSize);
     }
 
     @Override
diff --git a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java
index b6bf2a1..61793d8 100644
--- a/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java
+++ b/solr/core/src/java/org/apache/solr/util/ConcurrentLRUCache.java
@@ -32,7 +32,7 @@
 import java.util.TreeSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicInteger;
+//import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.concurrent.atomic.LongAdder;
 import java.util.concurrent.locks.ReentrantLock;
@@ -78,7 +78,7 @@
   private boolean runCleanupThread;
 
   private long ramLowerWatermark, ramUpperWatermark;
-  private final AtomicLong ramBytes = new AtomicLong(0);
+  private final LongAdder ramBytes = new LongAdder();
 
   public ConcurrentLRUCache(long ramLowerWatermark, long ramUpperWatermark,
                             boolean runCleanupThread, EvictionListener<K, V> evictionListener) {
@@ -202,8 +202,8 @@
   public V remove(K key) {
     CacheEntry<K,V> cacheEntry = map.remove(key);
     if (cacheEntry != null) {
-      stats.size.decrementAndGet();
-      ramBytes.addAndGet(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY);
+      stats.size.decrement();
+      ramBytes.add(-cacheEntry.ramBytesUsed() - HASHTABLE_RAM_BYTES_PER_ENTRY);
       return cacheEntry.value;
     }
     return null;
@@ -226,12 +226,13 @@
     CacheEntry<K,V> oldCacheEntry = map.put(e.key, e);
     int currentSize;
     if (oldCacheEntry == null) {
-      currentSize = stats.size.incrementAndGet();
-      ramBytes.addAndGet(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry
+      stats.size.increment();
+      currentSize = stats.size.intValue();
+      ramBytes.add(e.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY); // added key + value + entry
     } else {
-      currentSize = stats.size.get();
-      ramBytes.addAndGet(-oldCacheEntry.ramBytesUsed());
-      ramBytes.addAndGet(e.ramBytesUsed());
+      currentSize = stats.size.intValue();
+      ramBytes.add(-oldCacheEntry.ramBytesUsed());
+      ramBytes.add(e.ramBytesUsed());
     }
     if (islive) {
       stats.putCounter.increment();
@@ -250,7 +251,7 @@
     // Thread safety note: isCleaning read is piggybacked (comes after) other volatile reads
     // in this method.
     long idleCutoff = timeSource.getEpochTimeNs() - maxIdleTimeNs;
-    if ((currentSize > upperWaterMark || ramBytes.get() > ramUpperWatermark || oldestEntryNs.get() < idleCutoff) && !isCleaning) {
+    if ((currentSize > upperWaterMark || ramBytes.sum() > ramUpperWatermark || oldestEntryNs.get() < idleCutoff) && !isCleaning) {
       if (newThreadForCleanup) {
         new Thread(this::markAndSweep).start();
       } else if (cleanupThread != null){
@@ -311,7 +312,7 @@
       Map.Entry<Object, CacheEntry<K, V>> entry = iterator.next();
       if (entry.getValue().createTime < idleCutoff) {
         iterator.remove();
-        stats.evictionIdleCounter.incrementAndGet();
+        stats.evictionIdleCounter.increment();
         postRemoveEntry(entry.getValue());
       } else {
         if (entry.getValue().createTime < currentOldestEntry) {
@@ -341,7 +342,7 @@
     for (int i = entriesInAccessOrder.size() - 1; i >= 0; i--) {
       CacheEntry<K, V> kvCacheEntry = entriesInAccessOrder.get(i);
       evictEntry(kvCacheEntry.key);
-      if (ramBytes.get() <= ramLowerWatermark)  {
+      if (ramBytes.sum() <= ramLowerWatermark)  {
         break; // we are done!
       }
     }
@@ -366,7 +367,7 @@
     this.oldestEntry = oldestEntry;     // volatile write to make isCleaning visible
 
     long timeCurrent = stats.accessCounter.longValue();
-    int sz = stats.size.get();
+    int sz = stats.size.intValue();
 
     int numRemoved = 0;
     int numKept = 0;
@@ -585,9 +586,9 @@
 
   private void postRemoveEntry(CacheEntry<K, V> o) {
     if (o == null) return;
-    ramBytes.addAndGet(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY));
-    stats.size.decrementAndGet();
-    stats.evictionCounter.incrementAndGet();
+    ramBytes.add(-(o.ramBytesUsed() + HASHTABLE_RAM_BYTES_PER_ENTRY));
+    stats.size.decrement();
+    stats.evictionCounter.increment();
     if(evictionListener != null) evictionListener.evictedEntry(o.key,o.value);
   }
 
@@ -657,13 +658,13 @@
   }
 
   public int size() {
-    return stats.size.get();
+    return stats.size.intValue();
   }
 
   @Override
   public void clear() {
     map.clear();
-    ramBytes.set(0);
+    ramBytes.reset();
   }
 
   public Map<Object, CacheEntry<K,V>> getMap() {
@@ -749,23 +750,21 @@
         // accounts for field refs
         RamUsageEstimator.shallowSizeOfInstance(Stats.class) +
             // LongAdder
-            3 * (
+            6 * (
                 RamUsageEstimator.NUM_BYTES_ARRAY_HEADER +
                 RamUsageEstimator.primitiveSizes.get(long.class) +
                 2 * (RamUsageEstimator.NUM_BYTES_OBJECT_REF + RamUsageEstimator.primitiveSizes.get(long.class))
             ) +
             // AtomicLong
-            3 * RamUsageEstimator.primitiveSizes.get(long.class) +
-            // AtomicInteger
-            RamUsageEstimator.primitiveSizes.get(int.class);
+            RamUsageEstimator.primitiveSizes.get(long.class);
 
     private final AtomicLong accessCounter = new AtomicLong(0);
     private final LongAdder putCounter = new LongAdder();
     private final LongAdder nonLivePutCounter = new LongAdder();
     private final LongAdder missCounter = new LongAdder();
-    private final AtomicInteger size = new AtomicInteger();
-    private AtomicLong evictionCounter = new AtomicLong();
-    private AtomicLong evictionIdleCounter = new AtomicLong();
+    private final LongAdder size = new LongAdder();
+    private LongAdder evictionCounter = new LongAdder();
+    private LongAdder evictionIdleCounter = new LongAdder();
 
     public long getCumulativeLookups() {
       return (accessCounter.longValue() - putCounter.longValue() - nonLivePutCounter.longValue()) + missCounter.longValue();
@@ -780,15 +779,15 @@
     }
 
     public long getCumulativeEvictions() {
-      return evictionCounter.get();
+      return evictionCounter.longValue();
     }
 
     public long getCumulativeIdleEvictions() {
-      return evictionIdleCounter.get();
+      return evictionIdleCounter.longValue();
     }
 
     public int getCurrentSize() {
-      return size.get();
+      return size.intValue();
     }
 
     public long getCumulativeNonLivePuts() {
@@ -804,8 +803,10 @@
       putCounter.add(other.putCounter.longValue());
       nonLivePutCounter.add(other.nonLivePutCounter.longValue());
       missCounter.add(other.missCounter.longValue());
-      evictionCounter.addAndGet(other.evictionCounter.get());
-      size.set(Math.max(size.get(), other.size.get()));
+      evictionCounter.add(other.evictionCounter.longValue());
+      long maxSize = Math.max(size.longValue(), other.size.longValue());
+      size.reset();
+      size.add(maxSize);
     }
 
     @Override
@@ -862,7 +863,7 @@
 
   @Override
   public long ramBytesUsed() {
-    return BASE_RAM_BYTES_USED + ramBytes.get();
+    return BASE_RAM_BYTES_USED + ramBytes.sum();
   }
 
   @Override
diff --git a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
index ede009a..8bac73c 100644
--- a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
+++ b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
@@ -21,6 +21,8 @@
 import javax.crypto.IllegalBlockSizeException;
 import javax.crypto.spec.IvParameterSpec;
 import javax.crypto.spec.SecretKeySpec;
+import java.io.IOException;
+import java.io.InputStream;
 import java.lang.invoke.MethodHandles;
 import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
@@ -41,6 +43,7 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import com.google.common.collect.ImmutableMap;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.Base64;
 import org.slf4j.Logger;
@@ -60,7 +63,7 @@
       m.put(e.getKey(), getX509PublicKey(e.getValue()));
 
     }
-    this.keys = m;
+    this.keys = ImmutableMap.copyOf(m);
   }
 
   /**
@@ -116,6 +119,30 @@
 
   }
 
+  public static boolean verify(PublicKey publicKey, byte[] sig, InputStream is)
+      throws InvalidKeyException, SignatureException, IOException {
+    try {
+      Signature signature = Signature.getInstance("SHA1withRSA");
+      signature.initVerify(publicKey);
+      byte[] buf = new byte[1024];
+      while (true) {
+        int sz = is.read(buf);
+        if (sz == -1) break;
+        signature.update(buf, 0, sz);
+      }
+      try {
+        return signature.verify(sig);
+      } catch (SignatureException e) {
+        return false;
+      }
+    } catch (NoSuchAlgorithmException e) {
+      //will not happen
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+    }
+
+  }
+
+
   private static byte[][] evpBytesTokey(int key_len, int iv_len, MessageDigest md,
                                         byte[] salt, byte[] data, int count) {
     byte[][] both = new byte[2][];
diff --git a/solr/core/src/java/org/apache/solr/util/JmxUtil.java b/solr/core/src/java/org/apache/solr/util/JmxUtil.java
index f27a55e..16dc4e8 100644
--- a/solr/core/src/java/org/apache/solr/util/JmxUtil.java
+++ b/solr/core/src/java/org/apache/solr/util/JmxUtil.java
@@ -23,6 +23,7 @@
 import javax.management.remote.JMXServiceURL;
 
 import java.io.IOException;
+import java.lang.management.ManagementFactory;
 import java.util.List;
 
 /**
@@ -31,12 +32,16 @@
 public final class JmxUtil {
 
   /**
-   * Retrieve the first MBeanServer found.
+   * Retrieve the first MBeanServer found and if not found return the platform mbean server
    *
    * @return the first MBeanServer found
    */
   public static MBeanServer findFirstMBeanServer() {
-    return findMBeanServerForAgentId(null);
+    MBeanServer mBeanServer = findMBeanServerForAgentId(null);
+    if (mBeanServer == null)  {
+      return ManagementFactory.getPlatformMBeanServer();
+    }
+    return mBeanServer;
   }
 
   /**
diff --git a/solr/core/src/test-files/cryptokeys/priv_key2048.pem b/solr/core/src/test-files/cryptokeys/priv_key2048.pem
deleted file mode 100644
index 4d2c8c2..0000000
--- a/solr/core/src/test-files/cryptokeys/priv_key2048.pem
+++ /dev/null
@@ -1,27 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIEpQIBAAKCAQEA1fSq/8iz1sIppHhSKrC0g2uzfFKZzWZAbcvVQbyS/pwxC7VB
-hR93DVINyGGT3XHnpemt/h0wrifCIEMyqSLTIhiu5bRJpfE7UO9vGgTcP5+i2wTe
-cKHqrxDvbQ4D7co96Gvu2cShySbOHsFjZXL4eaqU2W2x8S7U+OjRBwtwMxB4vstX
-5u75WtwVXwNRj+uXIfTTisplE/nA/slqByW4Q9QAg+du+Ejh4W7nF+Z9GRMR7MZe
-c1TeGOYZd8YzYku7WyUZ1SRQ6JjaZrdphlLtysMgqP0MMajEoFs/ajeNHef0iCz0
-TnB05PQd+GPO5+JrLPZ399mucl/jM+cbixn9pwIDAQABAoIBAQCpfA51XryvU9F+
-+t1D+gSU0p00z44MeoJxN3WwhDwBOyNS/ftwA/Pf9m76m+lxEwkIkesFkIP+gXoy
-6mhYOUD9EoaBaeapcEWnoHPnLZB3SfLNArdei7UHhyfSwLZ2CK4vzkg/85ahbH79
-N/6P35pbbrhI4K+DubB1mJ/0r6fqmh6/04L47/liAPvsSM9ZJIMwbuZbYY21ggI9
-ZGk+kO0C/CyzxplaVLJ8P86KnRloEfjSmMhP72z7bja/BE2NX42G12YbjY7tVMn7
-duTWU2F4JWYriWAHr+4GwODDdtvn/R5jPirDIJeHCd6Bg1t7KibHRTcgYgtwDBqG
-F65g4zqRAoGBAP2fry+6uXe3rAJDJrCSKPQVTv5QhOvG1466xsOaWRSe/rx1Mvnd
-Z4pe+T8bdvsvqFnNMAkZKzzPjJ+oCSVKHhcerzMm2Cw6Gpv2yywA/1VykIoZmdNM
-/vHjC7w35q7xwEUHxB/rt2vvijrAYnhaq86uIXzoiqTGaKJ/z34QsCppAoGBANf1
-1wsISnZPjIipMIYtC7Co3GCUhsQ+ksVBhtsOHaKfon3Q69Qbz93l7dbCwgFbL6td
-HW/ppnABZLVFHnoLJ5YrriVZ1Wizx90+RFGdNj74UTV8bfqr/C32UKTjqoYjPAZO
-vEOzHkmpc9I1mrxm1Mcff5EHDFmXGXoZ2GLCpEWPAoGAOXroVFPoVtacuEKJ0Ti+
-6Vqu9XpANcNx9RollA02JTNHnmSdcf2YysZtjLznwVPyvq9/NICsyPJs93443Geo
-3CqLIHesRJHCmBhdwZJUTART98iHkVkA6sc/UKAGux11Ku/wph9hCahXVqtlZct+
-5q+WTV3SljeVXUbEOtkDZAkCgYEArnd0R/xls5jmbs1IX01q4Ug56Wh0S3xFtEgQ
-u013EZcnfb9Xld72Gk0TzOlANDpHk4hBLNU02c22X188lNoIHCCjqpcdel2rPIh+
-RvTcCxku+ifQ7a8dpsAUPHGUpJM4fdwD6il9cYMNB6i4njXw9gDzXOW1y3bvZR4W
-GwsmDO8CgYEA5vG0TdwkvdDcsJYimm3WQJ/VnYidE6JfjnAxnPwFFPjQoDRIS32f
-TMMJFTHSSH4xgQLEhEfaAbrkptpPORM9QAjjRx2RXoa5yu2GMpDWua4MxpHdqiSY
-v/rOw+6fZbe8YC9bZ8AE+GPuHdJDQFoSU7ieCGiF/iwWB2jhwCm7OyY=
------END RSA PRIVATE KEY-----
diff --git a/solr/core/src/test-files/cryptokeys/pub_key2048.der b/solr/core/src/test-files/cryptokeys/pub_key2048.der
deleted file mode 100644
index 0e0e36b..0000000
--- a/solr/core/src/test-files/cryptokeys/pub_key2048.der
+++ /dev/null
Binary files differ
diff --git a/solr/core/src/test-files/runtimecode/cache.jar.bin b/solr/core/src/test-files/runtimecode/cache.jar.bin
deleted file mode 100644
index 0729896..0000000
--- a/solr/core/src/test-files/runtimecode/cache.jar.bin
+++ /dev/null
Binary files differ
diff --git a/solr/core/src/test-files/runtimecode/cache_v2.jar.bin b/solr/core/src/test-files/runtimecode/cache_v2.jar.bin
deleted file mode 100644
index 6105993..0000000
--- a/solr/core/src/test-files/runtimecode/cache_v2.jar.bin
+++ /dev/null
Binary files differ
diff --git a/solr/core/src/test-files/runtimecode/sig.txt b/solr/core/src/test-files/runtimecode/sig.txt
index 29dbb47..4ef8e9c 100644
--- a/solr/core/src/test-files/runtimecode/sig.txt
+++ b/solr/core/src/test-files/runtimecode/sig.txt
@@ -36,7 +36,6 @@
 
 L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1
 f/U3bOlMPINlSOM6LK3JpQ==
-L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==
 
 openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs_v2.jar.bin | openssl enc -base64
 
@@ -48,6 +47,15 @@
 a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEq
 DVLhQoL3WqYtQmLPti0G4Q==
 
+openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem cache.jar.bin | openssl enc -base64
+
+A2CDnReirpII005KRN1C3pvt4NM4kItsagQPNaa3ljj/5R3LKVgiPuNvqBsffU8n
+81LOAfr5VMyGFcb4QMHpyg==
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem cache_v2.jar.bin | openssl enc -base64
+
+SOrekHt+uup+z2z+nZU5indk2huRRfmbM+W+vQ0variHrcZEG9EXt5LuPFl8Ki9A
+hr6klMHdVP8nj4wuQhu/Hg==
 
 ====================sha512====================
 
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml b/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml
index a0d5238..555ee35 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema-tiny.xml
@@ -32,4 +32,6 @@
       <filter class="solr.LowerCaseFilterFactory"/>
     </analyzer>
   </fieldType>
+
+  <similarity class="${solr.similarity:solr.SchemaSimilarityFactory}"/>
 </schema>
diff --git a/solr/core/src/test-files/solr/collection1/conf/schema.xml b/solr/core/src/test-files/solr/collection1/conf/schema.xml
index e0a96cc..9f46cec 100644
--- a/solr/core/src/test-files/solr/collection1/conf/schema.xml
+++ b/solr/core/src/test-files/solr/collection1/conf/schema.xml
@@ -498,6 +498,14 @@
     </analyzer>
   </fieldType>
   <fieldType name="severityType" class="${solr.tests.EnumFieldType}" enumsConfig="enumsConfig.xml" enumName="severity"/>
+  
+  <fieldType name="binary" class="solr.BinaryField" />
+  <fieldType name="collation" class="solr.CollationField" language="en" />
+  <fieldType name="externalFile" class="solr.ExternalFileField" />
+  <fieldType name="icuCollation" class="solr.ICUCollationField" locale="en" />
+  <fieldType name="latLonPointSpatial" class="solr.LatLonPointSpatialField" />
+  <fieldType name="randomSort" class="solr.RandomSortField" />
+  <fieldType name="point" class="solr.PointType" subFieldSuffix="_coordinate" />
 
   <fieldType name="sortable_text" class="solr.SortableTextField">
     <analyzer>
@@ -632,6 +640,28 @@
   <field name="dateRemove" type="date" indexed="true" stored="true" multiValued="true"/>
   <field name="floatRemove" type="float" indexed="true" stored="true" multiValued="true"/>
 
+  <field name="binaryRemove" type="binary" indexed="true" stored="true" multiValued="true"/>
+  <field name="booleanRemove" type="boolean" indexed="true" stored="true" multiValued="true"/>
+  <field name="collationRemove" type="collation" indexed="true" stored="true" multiValued="true"/>
+  <field name="datePointRemove" type="pdate" indexed="true" stored="true" multiValued="true"/>
+  <field name="dateRangeRemove" type="dateRange" indexed="true" stored="true" multiValued="true"/>
+  <field name="doublePointRemove" type="pdouble" indexed="true" stored="true" multiValued="true"/>
+  <field name="externalFileRemove" type="externalFile" indexed="true" stored="true" multiValued="true"/>
+  <field name="floatPointRemove" type="pfloat" indexed="true" stored="true" multiValued="true"/>
+  <field name="icuCollationRemove" type="icuCollation" indexed="true" stored="true" multiValued="true"/>
+  <field name="intPointRemove" type="pint" indexed="true" stored="true" multiValued="true"/>
+  <field name="latLonPointSpatialRemove" type="latLonPointSpatial" indexed="true" stored="true" multiValued="true"/>
+  <field name="latLonRemove" type="location" indexed="true" stored="true" multiValued="true"/>
+  <field name="longPointRemove" type="plong" indexed="true" stored="true" multiValued="true"/>
+  <field name="point_0_coordinate" type="float" indexed="true" stored="true" multiValued="true"/>
+  <field name="point_1_coordinate" type="float" indexed="true" stored="true" multiValued="true"/>
+  <field name="pointRemove" type="point" indexed="true" stored="true" multiValued="true"/>
+  <field name="randomSortRemove" type="randomSort" indexed="true" stored="true" multiValued="true"/>
+  <field name="spatialRecursivePrefixTreeRemove" type="location_rpt" indexed="true" stored="true" multiValued="true"/>
+  <field name="stringRemove" type="string" indexed="true" stored="true" multiValued="true"/>
+  <field name="textRemove" type="text" indexed="true" stored="true" multiValued="true"/>
+  <field name="uuidRemove" type="uuid" indexed="true" stored="true" multiValued="true"/>
+
   <field name="nopositionstext" type="nopositions" indexed="true" stored="true"/>
 
   <field name="tlong" type="tlong" indexed="true" stored="true"/>
diff --git a/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml b/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml
index 059e58f..0cdb6ac 100644
--- a/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml
+++ b/solr/core/src/test-files/solr/configsets/cloud-dynamic/conf/solrconfig.xml
@@ -29,6 +29,8 @@
 
   <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
 
+  <statsCache class="${solr.statsCache:}"/>
+
   <updateHandler class="solr.DirectUpdateHandler2">
     <commitWithin>
       <softCommit>${solr.commitwithin.softcommit:true}</softCommit>
diff --git a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java
index 795c3e8..fc995e3 100644
--- a/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java
+++ b/solr/core/src/test/org/apache/solr/SolrTestCaseJ4Test.java
@@ -19,6 +19,7 @@
 import java.io.File;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.solr.common.params.ModifiableSolrParams;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -61,4 +62,25 @@
   public void testCorrectCore() throws Exception {
     assertEquals("should be core1", "core1", h.getCore().getName());
   }
+
+  @Test
+  public void testParams() throws Exception {
+    final ModifiableSolrParams params = new ModifiableSolrParams();
+    assertEquals(params.toString(), params().toString());
+
+    params.add("q", "*:*");
+    assertEquals(params.toString(), params("q", "*:*").toString());
+
+    params.add("rows", "42");
+    assertEquals(params.toString(), params("q", "*:*", "rows", "42").toString());
+
+    expectThrows(RuntimeException.class, () -> {
+      params("parameterWithoutValue");
+    });
+
+    expectThrows(RuntimeException.class, () -> {
+      params("q", "*:*", "rows", "42", "parameterWithoutValue");
+    });
+  }
+
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java
new file mode 100644
index 0000000..85f9f5d
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/TestBaseStatsCacheCloud.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import java.lang.invoke.MethodHandles;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.function.Function;
+
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.CloudSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrInputDocument;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.search.similarities.CustomSimilarityFactory;
+import org.apache.solr.search.stats.StatsCache;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ *
+ */
+@Ignore("Abstract classes should not be executed as tests")
+public abstract class TestBaseStatsCacheCloud extends SolrCloudTestCase {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  protected int numNodes = 2;
+  protected String configset = "cloud-dynamic";
+
+  protected String collectionName = "collection_" + getClass().getSimpleName();
+
+  protected Function<Integer, SolrInputDocument> generator = i -> {
+    SolrInputDocument doc = new SolrInputDocument("id", "id-" + i);
+    if (i % 3 == 0) {
+      doc.addField("foo_t", "bar baz");
+    } else if (i % 3 == 1) {
+      doc.addField("foo_t", "bar");
+    } else {
+      // skip the field
+    }
+    return doc;
+  };
+
+  protected CloudSolrClient solrClient;
+
+  protected SolrClient control;
+
+  protected int NUM_DOCS = 100;
+
+  // implementation name
+  protected abstract String getImplementationName();
+
+  // does this implementation produce the same distrib scores as local ones?
+  protected abstract boolean assertSameScores();
+
+  @Before
+  public void setupCluster() throws Exception {
+    // create control core & client
+    System.setProperty("solr.statsCache", getImplementationName());
+    System.setProperty("solr.similarity", CustomSimilarityFactory.class.getName());
+    initCore("solrconfig-minimal.xml", "schema-tiny.xml");
+    control = new EmbeddedSolrServer(h.getCore());
+    // create cluster
+    configureCluster(numNodes) // 2 + random().nextInt(3)
+        .addConfig("conf", configset(configset))
+        .configure();
+    solrClient = cluster.getSolrClient();
+    createTestCollection();
+  }
+
+  protected void createTestCollection() throws Exception {
+    CollectionAdminRequest.createCollection(collectionName, "conf", 2, numNodes)
+        .setMaxShardsPerNode(2)
+        .process(solrClient);
+    indexDocs(solrClient, collectionName, NUM_DOCS, 0, generator);
+    indexDocs(control, "collection1", NUM_DOCS, 0, generator);
+  }
+
+  @After
+  public void tearDownCluster() {
+    System.clearProperty("solr.statsCache");
+    System.clearProperty("solr.similarity");
+  }
+
+  @Test
+  public void testBasicStats() throws Exception {
+    QueryResponse cloudRsp = solrClient.query(collectionName,
+        params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + NUM_DOCS, "debug", "true"));
+    QueryResponse controlRsp = control.query("collection1",
+        params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + NUM_DOCS, "debug", "true"));
+
+    assertResponses(controlRsp, cloudRsp, assertSameScores());
+
+    // test after updates
+    indexDocs(solrClient, collectionName, NUM_DOCS, NUM_DOCS, generator);
+    indexDocs(control, "collection1", NUM_DOCS, NUM_DOCS, generator);
+
+    cloudRsp = solrClient.query(collectionName,
+        params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + (NUM_DOCS * 2)));
+    controlRsp = control.query("collection1",
+        params("q", "foo_t:\"bar baz\"", "fl", "*,score", "rows", "" + (NUM_DOCS * 2)));
+    assertResponses(controlRsp, cloudRsp, assertSameScores());
+
+    // check cache metrics
+    StatsCache.StatsCacheMetrics statsCacheMetrics = new StatsCache.StatsCacheMetrics();
+    for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) {
+      try (SolrClient client = getHttpSolrClient(jettySolrRunner.getBaseUrl().toString())) {
+        NamedList<Object> metricsRsp = client.request(
+            new GenericSolrRequest(SolrRequest.METHOD.GET, "/admin/metrics", params("group", "solr.core", "prefix", "CACHE.searcher.statsCache")));
+        assertNotNull(metricsRsp);
+        NamedList<Object> metricsPerReplica = (NamedList<Object>)metricsRsp.get("metrics");
+        assertNotNull("no metrics perReplica", metricsPerReplica);
+        //log.info("======= Node: " + jettySolrRunner.getBaseUrl());
+        //log.info("======= Metrics:\n" + Utils.toJSONString(metricsPerReplica));
+        metricsPerReplica.forEach((replica, metrics) -> {
+          Map<String, Object> values = (Map<String, Object>)((NamedList<Object>)metrics).get("CACHE.searcher.statsCache");
+          values.forEach((name, value) -> {
+            long val = value instanceof Number ? ((Number) value).longValue() : 0;
+            switch (name) {
+              case "lookups" :
+                statsCacheMetrics.lookups.add(val);
+                break;
+              case "returnLocalStats" :
+                statsCacheMetrics.returnLocalStats.add(val);
+                break;
+              case "mergeToGlobalStats" :
+                statsCacheMetrics.mergeToGlobalStats.add(val);
+                break;
+              case "missingGlobalFieldStats" :
+                statsCacheMetrics.missingGlobalFieldStats.add(val);
+                break;
+              case "missingGlobalTermStats" :
+                statsCacheMetrics.missingGlobalTermStats.add(val);
+                break;
+              case "receiveGlobalStats" :
+                statsCacheMetrics.receiveGlobalStats.add(val);
+                break;
+              case "retrieveStats" :
+                statsCacheMetrics.retrieveStats.add(val);
+                break;
+              case "sendGlobalStats" :
+                statsCacheMetrics.sendGlobalStats.add(val);
+                break;
+              case "useCachedGlobalStats" :
+                statsCacheMetrics.useCachedGlobalStats.add(val);
+                break;
+              case "statsCacheImpl" :
+                assertTrue("incorreect cache impl, expected" + getImplementationName() + " but was " + value,
+                    getImplementationName().endsWith((String)value));
+                break;
+              default:
+                fail("Unexpected cache metrics: key=" + name + ", value=" + value);
+            }
+          });
+        });
+      }
+    }
+    checkStatsCacheMetrics(statsCacheMetrics);
+  }
+
+  protected void checkStatsCacheMetrics(StatsCache.StatsCacheMetrics statsCacheMetrics) {
+    assertEquals(statsCacheMetrics.toString(), 0, statsCacheMetrics.missingGlobalFieldStats.intValue());
+    assertEquals(statsCacheMetrics.toString(), 0, statsCacheMetrics.missingGlobalTermStats.intValue());
+  }
+
+  protected void assertResponses(QueryResponse controlRsp, QueryResponse cloudRsp, boolean sameScores) throws Exception {
+    Map<String, SolrDocument> cloudDocs = new HashMap<>();
+    Map<String, SolrDocument> controlDocs = new HashMap<>();
+    cloudRsp.getResults().forEach(doc -> cloudDocs.put((String) doc.getFieldValue("id"), doc));
+    controlRsp.getResults().forEach(doc -> controlDocs.put((String) doc.getFieldValue("id"), doc));
+    assertEquals("number of docs", controlDocs.size(), cloudDocs.size());
+    for (Map.Entry<String, SolrDocument> entry : controlDocs.entrySet()) {
+      SolrDocument controlDoc = entry.getValue();
+      SolrDocument cloudDoc = cloudDocs.get(entry.getKey());
+      assertNotNull("missing cloud doc " + controlDoc, cloudDoc);
+      Float controlScore = (Float) controlDoc.getFieldValue("score");
+      Float cloudScore = (Float) cloudDoc.getFieldValue("score");
+      if (sameScores) {
+        assertEquals("cloud score differs from control", controlScore, cloudScore, controlScore * 0.01f);
+      } else {
+        assertFalse("cloud score the same as control", controlScore == cloudScore);
+      }
+    }
+  }
+
+  protected void indexDocs(SolrClient client, String collectionName, int num, int start, Function<Integer, SolrInputDocument> generator) throws Exception {
+
+    UpdateRequest ureq = new UpdateRequest();
+    for (int i = 0; i < num; i++) {
+      SolrInputDocument doc = generator.apply(i + start);
+      ureq.add(doc);
+    }
+    ureq.process(client, collectionName);
+    client.commit(collectionName);
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
index 1bc54f2..c082e37 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
@@ -27,7 +27,7 @@
 public class TestClusterProperties extends SolrCloudTestCase {
 
   private ClusterProperties props;
-
+  
   @BeforeClass
   public static void setupCluster() throws Exception {
     configureCluster(1).configure();
@@ -49,7 +49,7 @@
     CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false").process(cluster.getSolrClient());
     assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "true"));
   }
-
+  
   @Test
   public void testSetPluginClusterProperty() throws Exception {
     String propertyName = ClusterProperties.EXT_PROPRTTY_PREFIX + "pluginA.propertyA";
@@ -57,7 +57,7 @@
         .process(cluster.getSolrClient());
     assertEquals("valueA", props.getClusterProperty(propertyName, null));
   }
-
+  
   @Test(expected = SolrException.class)
   public void testSetInvalidPluginClusterProperty() throws Exception {
     String propertyName = "pluginA.propertyA";
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java
index ca172e9..146ad82 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java
@@ -195,7 +195,7 @@
   }
 
 
-  public static byte[] readFile(String fname) throws IOException {
+  private byte[] readFile(String fname) throws IOException {
     byte[] buf = null;
     try (FileInputStream fis = new FileInputStream(getFile(fname))) {
       buf = new byte[fis.available()];
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/test/org/apache/solr/cloud/TestExactSharedStatsCacheCloud.java
similarity index 64%
copy from solr/core/src/test-files/runtimecode/MyDocCache.java
copy to solr/core/src/test/org/apache/solr/cloud/TestExactSharedStatsCacheCloud.java
index 406b950..cca209b 100644
--- a/solr/core/src/test-files/runtimecode/MyDocCache.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestExactSharedStatsCacheCloud.java
@@ -14,22 +14,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.cloud;
 
-package runtimecode;
+import org.apache.solr.search.stats.ExactSharedStatsCache;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.StoredField;
-import org.apache.solr.search.LRUCache;
-
-public  class MyDocCache<K,V> extends LRUCache<K,V> {
-
-  static String fld_name= "my_synthetic_fld_s";
+/**
+ *
+ */
+public class TestExactSharedStatsCacheCloud extends TestBaseStatsCacheCloud {
   @Override
-  public V put(K key, V value) {
-    if(value instanceof Document){
-      Document d = (Document) value;
-      d.add(new StoredField(fld_name, "version_2"));
-    }
-    return super.put(key, value);
+  protected boolean assertSameScores() {
+    return true;
+  }
+
+  @Override
+  protected String getImplementationName() {
+    return ExactSharedStatsCache.class.getName();
   }
 }
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/test/org/apache/solr/cloud/TestExactStatsCacheCloud.java
similarity index 64%
copy from solr/core/src/test-files/runtimecode/MyDocCache.java
copy to solr/core/src/test/org/apache/solr/cloud/TestExactStatsCacheCloud.java
index 406b950..ba7e0d4 100644
--- a/solr/core/src/test-files/runtimecode/MyDocCache.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestExactStatsCacheCloud.java
@@ -14,22 +14,23 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.cloud;
 
-package runtimecode;
+import org.apache.solr.search.stats.ExactStatsCache;
+import org.apache.solr.util.LogLevel;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.StoredField;
-import org.apache.solr.search.LRUCache;
-
-public  class MyDocCache<K,V> extends LRUCache<K,V> {
-
-  static String fld_name= "my_synthetic_fld_s";
+/**
+ *
+ */
+@LogLevel("org.apache.solr.search=DEBUG")
+public class TestExactStatsCacheCloud extends TestBaseStatsCacheCloud {
   @Override
-  public V put(K key, V value) {
-    if(value instanceof Document){
-      Document d = (Document) value;
-      d.add(new StoredField(fld_name, "version_2"));
-    }
-    return super.put(key, value);
+  protected boolean assertSameScores() {
+    return true;
+  }
+
+  @Override
+  protected String getImplementationName() {
+    return ExactStatsCache.class.getName();
   }
 }
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/test/org/apache/solr/cloud/TestLRUStatsCacheCloud.java
similarity index 64%
rename from solr/core/src/test-files/runtimecode/MyDocCache.java
rename to solr/core/src/test/org/apache/solr/cloud/TestLRUStatsCacheCloud.java
index 406b950..e7ae992 100644
--- a/solr/core/src/test-files/runtimecode/MyDocCache.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestLRUStatsCacheCloud.java
@@ -14,22 +14,21 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.cloud;
 
-package runtimecode;
+import org.apache.solr.search.stats.LRUStatsCache;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.StoredField;
-import org.apache.solr.search.LRUCache;
-
-public  class MyDocCache<K,V> extends LRUCache<K,V> {
-
-  static String fld_name= "my_synthetic_fld_s";
+/**
+ *
+ */
+public class TestLRUStatsCacheCloud extends TestBaseStatsCacheCloud {
   @Override
-  public V put(K key, V value) {
-    if(value instanceof Document){
-      Document d = (Document) value;
-      d.add(new StoredField(fld_name, "version_2"));
-    }
-    return super.put(key, value);
+  protected boolean assertSameScores() {
+    return true;
+  }
+
+  @Override
+  protected String getImplementationName() {
+    return LRUStatsCache.class.getName();
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestLocalStatsCacheCloud.java b/solr/core/src/test/org/apache/solr/cloud/TestLocalStatsCacheCloud.java
new file mode 100644
index 0000000..fd44232
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/cloud/TestLocalStatsCacheCloud.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.cloud;
+
+import org.apache.solr.search.stats.LocalStatsCache;
+import org.apache.solr.search.stats.StatsCache;
+import org.apache.solr.util.LogLevel;
+
+/**
+ *
+ */
+@LogLevel("org.apache.solr.search=DEBUG")
+public class TestLocalStatsCacheCloud extends TestBaseStatsCacheCloud {
+
+  @Override
+  protected boolean assertSameScores() {
+    return false;
+  }
+
+  @Override
+  protected String getImplementationName() {
+    return LocalStatsCache.class.getName();
+  }
+
+  @Override
+  protected void checkStatsCacheMetrics(StatsCache.StatsCacheMetrics statsCacheMetrics) {
+    assertTrue("LocalStatsCache should produce missing stats: " + statsCacheMetrics,
+        statsCacheMetrics.missingGlobalFieldStats.intValue() > 0);
+    assertTrue("LocalStatsCache should produce missing stats: " + statsCacheMetrics,
+        statsCacheMetrics.missingGlobalTermStats.intValue() > 0);
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
index a5dedc3..68898fb 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/AutoAddReplicasIntegrationTest.java
@@ -24,6 +24,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.SolrServerException;
@@ -31,7 +32,9 @@
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.cloud.MiniSolrCloudCluster;
 import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.cloud.CollectionStatePredicate;
 import org.apache.solr.common.cloud.ClusterStateUtil;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
@@ -49,16 +52,15 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+@org.apache.solr.util.LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.cloud.autoscaling.NodeLostTrigger=TRACE;org.apache.solr.cloud.Overseer=DEBUG;org.apache.solr.cloud.overseer=DEBUG")
 public class AutoAddReplicasIntegrationTest extends SolrCloudTestCase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
-  private static final String COLLECTION1 =  "testSimple1";
-  private static final String COLLECTION2 =  "testSimple2";
+
 
   protected String getConfigSet() {
     return "cloud-minimal";
   }
-
+  
   @Before
   public void setupCluster() throws Exception {
     configureCluster(3)
@@ -82,102 +84,267 @@
     }
   }
 
+  /**
+   * Test that basic autoAddReplicaLogic kicks in when a node is lost 
+   */
   @Test
   public void testSimple() throws Exception {
-    JettySolrRunner jetty1 = cluster.getJettySolrRunner(0);
-    JettySolrRunner jetty2 = cluster.getJettySolrRunner(1);
-    JettySolrRunner jetty3 = cluster.getJettySolrRunner(2);
-    CollectionAdminRequest.createCollection(COLLECTION1, "conf", 2, 2)
-        .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName())
-        .setAutoAddReplicas(true)
-        .setMaxShardsPerNode(2)
-        .process(cluster.getSolrClient());
+    final String COLLECTION = "test_simple";
+    final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader();
+    final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1);
+    final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2);
+    log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION,
+             jetty1.getNodeName(), jetty1.getLocalPort(),
+             jetty2.getNodeName(), jetty2.getLocalPort());
+             
+    CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2)
+      .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName())
+      .setAutoAddReplicas(true)
+      .setMaxShardsPerNode(2)
+      .process(cluster.getSolrClient());
     
-    cluster.waitForActiveCollection(COLLECTION1, 2, 4);
+    cluster.waitForActiveCollection(COLLECTION, 2, 4);
     
-    CollectionAdminRequest.createCollection(COLLECTION2, "conf", 2, 2)
-        .setCreateNodeSet(jetty2.getNodeName()+","+jetty3.getNodeName())
-        .setAutoAddReplicas(false)
-        .setMaxShardsPerNode(2)
-        .process(cluster.getSolrClient());
-    
-    cluster.waitForActiveCollection(COLLECTION2, 2, 4);
-    
-    // the number of cores in jetty1 (5) will be larger than jetty3 (1)
-    CollectionAdminRequest.createCollection("testSimple3", "conf", 3, 1)
-        .setCreateNodeSet(jetty1.getNodeName())
-        .setAutoAddReplicas(false)
-        .setMaxShardsPerNode(3)
-        .process(cluster.getSolrClient());
-
-    cluster.waitForActiveCollection("testSimple3", 3, 3);
-    
-    ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader();
-
     // start the tests
-    JettySolrRunner lostJetty = random().nextBoolean() ? cluster.getJettySolrRunner(0) : cluster.getJettySolrRunner(1);
+    JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2;
     String lostNodeName = lostJetty.getNodeName();
-    List<Replica> replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION1, zkStateReader, lostNodeName);
+    List<Replica> replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName);
+    log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.stop();
+    
+    cluster.waitForJettyToStop(lostJetty);
+    waitForNodeLeave(lostNodeName);
+    
+    waitForState(COLLECTION + "=(2,4) w/o down replicas",
+                 COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS);
+                 
+    checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION);
+    
+    log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.start();
+    
+    waitForNodeLive(lostJetty);
+    
+    assertTrue("Timeout waiting for all live and active",
+               ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000));
+
+  }
+
+  /**
+   * Test that basic autoAddReplicaLogic logic is <b>not</b> used if the cluster prop for it is disabled 
+   * (even if sys prop is set after collection is created)
+   */
+  @Test
+  public void testClusterPropOverridesCollecitonProp() throws Exception {
+    final String COLLECTION = "test_clusterprop";
+    final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader();
+    final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1);
+    final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2);
+
+    log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION,
+             jetty1.getNodeName(), jetty1.getLocalPort(),
+             jetty2.getNodeName(), jetty2.getLocalPort());
+             
+    CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2)
+      .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName())
+      .setAutoAddReplicas(true)
+      .setMaxShardsPerNode(2)
+      .process(cluster.getSolrClient());
+    
+    cluster.waitForActiveCollection(COLLECTION, 2, 4);
+
+    // check cluster property is considered
+    disableAutoAddReplicasInCluster();
+
+    JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2;
+    String lostNodeName = lostJetty.getNodeName();
+    List<Replica> replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName);
+    
+    log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
     lostJetty.stop();
     
     cluster.waitForJettyToStop(lostJetty);
     
     waitForNodeLeave(lostNodeName);
     
-    // ensure that 2 shards have 2 active replicas and only 4 replicas in total
-    // i.e. old replicas have been deleted.
-    // todo remove the condition for total replicas == 4 after SOLR-11591 is fixed
-    waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, (liveNodes, collectionState) -> clusterShape(2, 4).matches(liveNodes, collectionState)
-        && collectionState.getReplicas().size() == 4, 90, TimeUnit.SECONDS);
-    checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION1);
+    waitForState(COLLECTION + "=(2,2)", COLLECTION,
+                 clusterShape(2, 2), 90, TimeUnit.SECONDS);
+                 
+    
+    log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
     lostJetty.start();
     
-    cluster.waitForAllNodes(30);
+    waitForNodeLive(lostJetty);
     
-    assertTrue("Timeout waiting for all live and active", ClusterStateUtil.waitForAllActiveAndLiveReplicas(cluster.getSolrClient().getZkStateReader(), 90000));
+    assertTrue("Timeout waiting for all live and active",
+               ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000));
+    
+    waitForState(COLLECTION + "=(2,4) w/o down replicas",
+                 COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS);
 
-    // check cluster property is considered
-    disableAutoAddReplicasInCluster();
-    lostNodeName = jetty3.getNodeName();
-    jetty3.stop();
-    
-    cluster.waitForJettyToStop(jetty3);
-    
-    waitForNodeLeave(lostNodeName);
-    
-    waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, clusterShape(2, 2));
-    jetty3.start();
-    waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, clusterShape(2, 4));
-    waitForState("Waiting for collection " + COLLECTION2, COLLECTION2, clusterShape(2, 4));
-    enableAutoAddReplicasInCluster();
+  }
 
+  /**
+   * Test that we can modify a collection after creation to add autoAddReplicas.
+   */
+  @Test
+  public void testAddCollectionPropAfterCreation() throws Exception {
+    final String COLLECTION = "test_addprop";
+    final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader();
+    final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1);
+    final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2);
 
-    // test for multiple collections
+    log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION,
+             jetty1.getNodeName(), jetty1.getLocalPort(),
+             jetty2.getNodeName(), jetty2.getLocalPort());
+             
+    CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2)
+      .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName())
+      .setAutoAddReplicas(false) // NOTE: false
+      .setMaxShardsPerNode(2)
+      .process(cluster.getSolrClient());
+    
+    cluster.waitForActiveCollection(COLLECTION, 2, 4);
+    
+    log.info("Modifying {} to use autoAddReplicas", COLLECTION);
     new CollectionAdminRequest.AsyncCollectionAdminRequest(CollectionParams.CollectionAction.MODIFYCOLLECTION) {
       @Override
       public SolrParams getParams() {
         ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
-        params.set("collection", COLLECTION2);
+        params.set("collection", COLLECTION);
         params.set("autoAddReplicas", true);
         return params;
       }
     }.process(cluster.getSolrClient());
 
-    lostNodeName = jetty2.getNodeName();
-    replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION2, zkStateReader, lostNodeName);
+    JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2;
+    String lostNodeName = lostJetty.getNodeName();
+    List<Replica> replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName);
+
+    log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.stop();
     
-    jetty2.stop();
-    
-    cluster.waitForJettyToStop(jetty2);
+    cluster.waitForJettyToStop(lostJetty);
     
     waitForNodeLeave(lostNodeName);
-    waitForState("Waiting for collection " + COLLECTION1, COLLECTION1, clusterShape(2, 4), 45, TimeUnit.SECONDS);
-    waitForState("Waiting for collection " + COLLECTION2, COLLECTION2, clusterShape(2, 4), 45, TimeUnit.SECONDS);
-    checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION2);
 
-    // overseer failover test..
+    waitForState(COLLECTION + "=(2,4) w/o down replicas",
+                 COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS);
+    checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION);
+    
+    log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.start();
+    
+    waitForNodeLive(lostJetty);
+    
+    assertTrue("Timeout waiting for all live and active",
+               ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000));
   }
 
+  /**
+   * Test a specific sequence of problematic events:
+   * <ul>
+   *  <li>create a collection with autoAddReplicas=<b>false</b></li>
+   *  <li>stop a nodeX in use by the collection</li>
+   *  <li>re-start nodeX</li>
+   *  <li>set autoAddReplicas=<b>true</b></li>
+   *  <li>re-stop nodeX</li>
+   * </ul>
+   */
+  @Test
+  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13811")
+  public void testRapidStopStartStopWithPropChange() throws Exception {
+
+    // This is the collection we'll be focused on in our testing...
+    final String COLLECTION = "test_stoptwice";
+    // This is a collection we'll use as a "marker" to ensure we "wait" for the
+    // autoAddReplicas logic (via NodeLostTrigger) to kick in at least once before proceeding...
+    final String ALT_COLLECTION = "test_dummy";
+    
+    final ZkStateReader zkStateReader = cluster.getSolrClient().getZkStateReader();
+    final JettySolrRunner jetty1 = cluster.getJettySolrRunner(1);
+    final JettySolrRunner jetty2 = cluster.getJettySolrRunner(2);
+
+    log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", COLLECTION,
+             jetty1.getNodeName(), jetty1.getLocalPort(),
+             jetty2.getNodeName(), jetty2.getLocalPort());
+             
+    CollectionAdminRequest.createCollection(COLLECTION, "conf", 2, 2)
+      .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName())
+      .setAutoAddReplicas(false) // NOTE: false
+      .setMaxShardsPerNode(2)
+      .process(cluster.getSolrClient());
+    
+    log.info("Creating {} using jetty1:{}/{} and jetty2:{}/{}", ALT_COLLECTION,
+             jetty1.getNodeName(), jetty1.getLocalPort(),
+             jetty2.getNodeName(), jetty2.getLocalPort());
+             
+    CollectionAdminRequest.createCollection(ALT_COLLECTION, "conf", 2, 2)
+      .setCreateNodeSet(jetty1.getNodeName()+","+jetty2.getNodeName())
+      .setAutoAddReplicas(true) // NOTE: true
+      .setMaxShardsPerNode(2)
+      .process(cluster.getSolrClient());
+    
+    cluster.waitForActiveCollection(COLLECTION, 2, 4);
+    cluster.waitForActiveCollection(ALT_COLLECTION, 2, 4);
+
+    JettySolrRunner lostJetty = random().nextBoolean() ? jetty1 : jetty2;
+    String lostNodeName = lostJetty.getNodeName();
+    List<Replica> replacedHdfsReplicas = getReplacedSharedFsReplicas(COLLECTION, zkStateReader, lostNodeName);
+
+    log.info("Stopping random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.stop();
+    
+    cluster.waitForJettyToStop(lostJetty);
+    waitForNodeLeave(lostNodeName);
+    
+    // ensure that our marker collection indicates that the autoAddReplicas logic
+    // has detected the down node and done some processing
+    waitForState(ALT_COLLECTION + "=(2,4) w/o down replicas",
+                 ALT_COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS);
+
+    waitForState(COLLECTION + "=(2,2)", COLLECTION, clusterShape(2, 2));
+    
+    log.info("Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.start();
+    // save time, don't bother waiting for lostJetty to start until after updating collection prop...
+    
+    log.info("Modifying {} to use autoAddReplicas", COLLECTION);
+    new CollectionAdminRequest.AsyncCollectionAdminRequest(CollectionParams.CollectionAction.MODIFYCOLLECTION) {
+      @Override
+      public SolrParams getParams() {
+        ModifiableSolrParams params = (ModifiableSolrParams) super.getParams();
+        params.set("collection", COLLECTION);
+        params.set("autoAddReplicas", true);
+        return params;
+      }
+    }.process(cluster.getSolrClient());
+
+    // make sure lostJetty is fully up before stopping again...
+    waitForNodeLive(lostJetty);
+
+    log.info("Re-Stopping (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.stop();
+    
+    cluster.waitForJettyToStop(lostJetty);
+    waitForNodeLeave(lostNodeName);
+
+    // TODO: this is the problematic situation...
+    // wether or not NodeLostTrigger noticed that lostJetty was re-started and shutdown *again*
+    // and that the new auoAddReplicas=true since the last time lostJetty was shutdown is respected
+    waitForState(COLLECTION + "=(2,4) w/o down replicas",
+                 COLLECTION, clusterShapeNoDownReplicas(2,4), 90, TimeUnit.SECONDS);
+    checkSharedFsReplicasMovedCorrectly(replacedHdfsReplicas, zkStateReader, COLLECTION);
+    
+    log.info("Re-Re-starting (same) random node: {} / {}", lostNodeName, lostJetty.getLocalPort());
+    lostJetty.start();
+    
+    waitForNodeLive(lostJetty);
+    
+    assertTrue("Timeout waiting for all live and active",
+               ClusterStateUtil.waitForAllActiveAndLiveReplicas(zkStateReader, 90000));
+  }
+  
   private void disableAutoAddReplicasInCluster() throws SolrServerException, IOException {
     Map m = makeMap(
         "action", CollectionParams.CollectionAction.CLUSTERPROP.toLower(),
@@ -225,13 +392,44 @@
     return replacedHdfsReplicas;
   }
 
-  private void waitForNodeLeave(String lostNodeName) throws InterruptedException {
+  /** 
+   * {@link MiniSolrCloudCluster#waitForNode} Doesn't check isRunning first, and we don't want to 
+   * use {@link MiniSolrCloudCluster#waitForAllNodes} because we don't want to waste cycles checking 
+   * nodes we aren't messing with  
+   */
+  private void waitForNodeLive(final JettySolrRunner jetty)
+    throws InterruptedException, TimeoutException, IOException {
+    log.info("waitForNodeLive: {}/{}", jetty.getNodeName(), jetty.getLocalPort());
+    
+    TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME);
+    while(!timeout.hasTimedOut()) {
+      if (jetty.isRunning()) {
+        break;
+      }
+      try {
+        Thread.sleep(100);
+      } catch (InterruptedException e) {
+        // ignore
+      }
+    }
+    if (timeout.hasTimedOut()) {
+      throw new TimeoutException("Waiting for Jetty to stop timed out");
+    }
+    cluster.waitForNode(jetty, 30);
+  }
+    
+  private void waitForNodeLeave(String lostNodeName) throws InterruptedException, TimeoutException {
     log.info("waitForNodeLeave: {}", lostNodeName);
     ZkStateReader reader = cluster.getSolrClient().getZkStateReader();
-    TimeOut timeOut = new TimeOut(20, TimeUnit.SECONDS, TimeSource.NANO_TIME);
-    while (reader.getClusterState().getLiveNodes().contains(lostNodeName)) {
-      Thread.sleep(100);
-      if (timeOut.hasTimedOut()) fail("Wait for " + lostNodeName + " to leave failed!");
-    }
+    reader.waitForLiveNodes(30, TimeUnit.SECONDS, (o, n) -> !n.contains(lostNodeName));
   }
+
+  
+  private static CollectionStatePredicate clusterShapeNoDownReplicas(final int expectedShards,
+                                                                     final int expectedReplicas) {
+    return (liveNodes, collectionState)
+      -> (clusterShape(expectedShards, expectedReplicas).matches(liveNodes, collectionState)
+          && collectionState.getReplicas().size() == expectedReplicas);
+  }
+  
 }
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java
index c775dca..849c5c8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/NodeMarkersRegistrationTest.java
@@ -20,12 +20,14 @@
 import java.lang.invoke.MethodHandles;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Map;
 import java.util.Set;
 import java.util.SortedSet;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
 import java.util.concurrent.locks.ReentrantLock;
 
 import org.apache.solr.client.solrj.SolrRequest;
@@ -41,6 +43,7 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.util.LogLevel;
 import org.apache.solr.util.TimeOut;
 import org.apache.zookeeper.KeeperException;
@@ -50,6 +53,10 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE;
+
 @LogLevel("org.apache.solr.cloud.autoscaling=DEBUG;org.apache.solr.client.solrj.cloud.autoscaling=DEBUG")
 public class NodeMarkersRegistrationTest extends SolrCloudTestCase {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -81,7 +88,7 @@
     return triggerFiredLatch;
   }
 
-  @AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13376")
+  //@AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-13376")
   @Test
   public void testNodeMarkersRegistration() throws Exception {
     triggerFiredLatch = new CountDownLatch(1);
@@ -135,10 +142,16 @@
     String pathLost = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + overseerLeader;
     
     TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME);
+    AtomicBoolean markerInactive = new AtomicBoolean();
     try {
-      timeout.waitFor("zk path to go away", () -> {
+      timeout.waitFor("nodeLost marker to get inactive", () -> {
         try {
-          return !zkClient().exists(pathLost, true);
+          if (!zkClient().exists(pathLost, true)) {
+            throw new RuntimeException("marker " + pathLost + " should exist!");
+          }
+          Map<String, Object> markerData = Utils.getJson(zkClient(), pathLost, true);
+          markerInactive.set(markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE));
+          return markerInactive.get();
         } catch (KeeperException e) {
           throw new RuntimeException(e);
         } catch (InterruptedException e) {
@@ -149,8 +162,8 @@
       // okay
     }
 
-    // verify that a znode does NOT exist - the new overseer cleaned up existing nodeLost markers
-    assertFalse("Path " + pathLost + " exists", zkClient().exists(pathLost, true));
+    // verify that the marker is inactive - the new overseer should deactivate markers once they are processed
+    assertTrue("Marker " + pathLost + " still active!", markerInactive.get());
 
     listener.reset();
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java
index ea645c6..1258c6d 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/sim/TestSimTriggerIntegration.java
@@ -40,6 +40,7 @@
 import org.apache.solr.client.solrj.cloud.autoscaling.AutoScalingConfig;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
 import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventProcessorStage;
+import org.apache.solr.client.solrj.cloud.autoscaling.TriggerEventType;
 import org.apache.solr.client.solrj.request.CollectionAdminRequest;
 import org.apache.solr.cloud.CloudTestUtils;
 import org.apache.solr.cloud.CloudUtil;
@@ -62,6 +63,7 @@
 import org.apache.solr.common.cloud.LiveNodesListener;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.util.TimeSource;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.util.LogLevel;
 import org.apache.solr.util.TimeOut;
@@ -74,6 +76,10 @@
 
 import com.google.common.util.concurrent.AtomicDouble;
 
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_ACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_INACTIVE;
+import static org.apache.solr.cloud.autoscaling.OverseerTriggerThread.MARKER_STATE;
+
 /**
  * An end-to-end integration test for triggers
  */
@@ -864,10 +870,6 @@
 
   public static class TestEventMarkerAction extends TriggerActionBase {
 
-    public TestEventMarkerAction() {
-      actionConstructorCalled.countDown();
-    }
-
     @Override
     public void process(TriggerEvent event, ActionContext actionContext) {
       boolean locked = lock.tryLock();
@@ -887,19 +889,29 @@
     }
 
     @Override
-    public void configure(SolrResourceLoader loader, SolrCloudManager cloudManager, Map<String, Object> args) throws TriggerValidationException {
+    public void init() throws Exception {
       log.info("TestEventMarkerAction init");
-      actionInitCalled.countDown();
-      super.configure(loader, cloudManager, args);
+      super.init();
+    }
+  }
+
+  public static class AssertingListener extends TriggerListenerBase {
+    @Override
+    public void onEvent(TriggerEvent event, TriggerEventProcessorStage stage, String actionName, ActionContext context, Throwable error, String message) throws Exception {
+      if (!Thread.currentThread().getName().startsWith("ScheduledTrigger")) {
+        // for future safety
+        throw new IllegalThreadStateException("AssertingListener should have been invoked by a thread from the scheduled trigger thread pool");
+      }
+      log.debug(" --- listener fired for event: {}, stage: {}", event, stage);
+      listenerEventLatch.await();
+      log.debug(" --- listener wait complete for event: {}, stage: {}", event, stage);
     }
   }
 
   @Test
   public void testNodeMarkersRegistration() throws Exception {
-    // for this test we want to create two triggers so we must assert that the actions were created twice
-    actionInitCalled = new CountDownLatch(2);
-    // similarly we want both triggers to fire
-    triggerFiredLatch = new CountDownLatch(2);
+    triggerFiredLatch = new CountDownLatch(1);
+    listenerEventLatch = new CountDownLatch(1);
     TestLiveNodesListener listener = registerLiveNodesListener();
 
     SolrClient solrClient = cluster.simGetSolrClient();
@@ -912,7 +924,7 @@
     assertTrue("cluster onChange listener didn't execute even after await()ing an excessive amount of time",
                listener.onChangeLatch.await(60, TimeUnit.SECONDS));
     assertEquals(1, listener.addedNodes.size());
-    assertEquals(node, listener.addedNodes.iterator().next());
+    assertTrue(listener.addedNodes.toString(), listener.addedNodes.contains(node));
     // verify that a znode doesn't exist (no trigger)
     String pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node;
     assertFalse("Path " + pathAdded + " was created but there are no nodeAdded triggers",
@@ -931,22 +943,28 @@
     assertEquals(0, listener.addedNodes.size());
     // wait until the new overseer is up
     cluster.getTimeSource().sleep(5000);
-    // verify that a znode does NOT exist - there's no nodeLost trigger,
-    // so the new overseer cleaned up existing nodeLost markers
-    
+
     String pathLost = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + overseerLeader;
     
     TimeOut timeout = new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME);
-    timeout.waitFor("Path " + pathLost + " exists", () -> {
+    AtomicBoolean markerInactive = new AtomicBoolean();
+    timeout.waitFor("nodeLost marker to get inactive", () -> {
       try {
-        return !cluster.getDistribStateManager().hasData(pathLost);
+        if (!cluster.getDistribStateManager().hasData(pathLost)) {
+          throw new RuntimeException("marker " + pathLost + " should exist!");
+        }
+        Map<String, Object> markerData = Utils.getJson(cluster.getDistribStateManager(), pathLost);
+        markerInactive.set(markerData.getOrDefault(MARKER_STATE, MARKER_ACTIVE).equals(MARKER_INACTIVE));
+        return markerInactive.get();
+
       } catch (IOException | KeeperException | InterruptedException e) {
         e.printStackTrace();
         throw new RuntimeException(e);
       }
     });
 
-    assertFalse("Path " + pathLost + " exists", cluster.getDistribStateManager().hasData(pathLost));
+    // verify that the marker is inactive - the new overseer should deactivate markers once they are processed
+    assertTrue("Marker " + pathLost + " still active!", markerInactive.get());
 
     listener.reset();
 
@@ -956,7 +974,7 @@
     assertAutoScalingRequest
       ("{" +
        "'set-trigger' : {" +
-       "'name' : 'node_added_trigger'," +
+       "'name' : 'node_added_triggerMR'," +
        "'event' : 'nodeAdded'," +
        "'waitFor' : '1s'," +
        "'enabled' : true," +
@@ -966,14 +984,25 @@
     assertAutoScalingRequest
       ("{" +
         "'set-trigger' : {" +
-        "'name' : 'node_lost_trigger'," +
+        "'name' : 'node_lost_triggerMR'," +
         "'event' : 'nodeLost'," +
         "'waitFor' : '1s'," +
         "'enabled' : true," +
         "'actions' : [{'name':'test','class':'" + TestEventMarkerAction.class.getName() + "'}]" +
        "}}");
 
+    assertAutoScalingRequest(
+        "{\n" +
+            "  \"set-listener\" : {\n" +
+            "    \"name\" : \"listener_node_added_triggerMR\",\n" +
+            "    \"trigger\" : \"node_added_triggerMR\",\n" +
+            "    \"stage\" : \"STARTED\",\n" +
+            "    \"class\" : \"" + AssertingListener.class.getName()  + "\"\n" +
+            "  }\n" +
+            "}"
+    );
     assertAutoscalingUpdateComplete();
+
     overseerLeader = cluster.getSimClusterStateProvider().simGetOverseerLeader();
 
     // create another node
@@ -987,41 +1016,51 @@
     pathAdded = ZkStateReader.SOLR_AUTOSCALING_NODE_ADDED_PATH + "/" + node1;
     assertTrue("Path " + pathAdded + " wasn't created", cluster.getDistribStateManager().hasData(pathAdded));
 
+    listenerEventLatch.countDown(); // let the trigger thread continue
+
+    assertTrue(triggerFiredLatch.await(10, TimeUnit.SECONDS));
+
+    // kill this node
     listener.reset();
     events.clear();
-    // one nodeAdded (not cleared yet) and one nodeLost
-    triggerFiredLatch = new CountDownLatch(2);
+    triggerFiredLatch = new CountDownLatch(1);
+
+    cluster.simRemoveNode(node1, true);
+    if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) {
+      fail("onChange listener didn't execute on cluster change");
+    }
+    assertEquals(1, listener.lostNodes.size());
+    assertEquals(node1, listener.lostNodes.iterator().next());
+    // verify that a znode exists
+    String pathLost2 = ZkStateReader.SOLR_AUTOSCALING_NODE_LOST_PATH + "/" + node1;
+    assertTrue("Path " + pathLost2 + " wasn't created", cluster.getDistribStateManager().hasData(pathLost2));
+
+    listenerEventLatch.countDown(); // let the trigger thread continue
+
+    assertTrue(triggerFiredLatch.await(10, TimeUnit.SECONDS));
+
+    // triggers don't remove markers
+    assertTrue("Path " + pathLost2 + " should still exist", cluster.getDistribStateManager().hasData(pathLost2));
+
+    listener.reset();
+    events.clear();
+    triggerFiredLatch = new CountDownLatch(1);
     // kill overseer again
     log.info("====== KILL OVERSEER 2");
-    cluster.simRestartOverseer(overseerLeader);
-    assertTrue("cluster onChange listener didn't execute even after await()ing an excessive amount of time",
-               listener.onChangeLatch.await(60, TimeUnit.SECONDS));
-
-    assertAutoscalingUpdateComplete();
-
-    assertTrue("trigger did not fire event after await()ing an excessive amount of time",
-               triggerFiredLatch.await(60, TimeUnit.SECONDS));
-    assertEquals(2, events.size());
-    TriggerEvent nodeAdded = null;
-    TriggerEvent nodeLost = null;
-    for (TriggerEvent ev : events) {
-      switch (ev.getEventType()) {
-        case NODEADDED:
-          nodeAdded = ev;
-          break;
-        case NODELOST:
-          nodeLost = ev;
-          break;
-        default:
-          fail("unexpected event type: " + ev);
-      }
+    cluster.simRemoveNode(overseerLeader, true);
+    if (!listener.onChangeLatch.await(10, TimeUnit.SECONDS)) {
+      fail("onChange listener didn't execute on cluster change");
     }
-    assertNotNull("expected nodeAdded event", nodeAdded);
-    assertNotNull("expected nodeLost event", nodeLost);
-    List<String> nodeNames = (List<String>)nodeLost.getProperty(TriggerEvent.NODE_NAMES);
+
+
+    if (!triggerFiredLatch.await(20, TimeUnit.SECONDS)) {
+      fail("Trigger should have fired by now");
+    }
+    assertEquals(1, events.size());
+    TriggerEvent ev = events.iterator().next();
+    List<String> nodeNames = (List<String>) ev.getProperty(TriggerEvent.NODE_NAMES);
     assertTrue(nodeNames.contains(overseerLeader));
-    nodeNames = (List<String>)nodeAdded.getProperty(TriggerEvent.NODE_NAMES);
-    assertTrue(nodeNames.contains(node1));
+    assertEquals(TriggerEventType.NODELOST, ev.getEventType());
   }
 
   static final Map<String, List<CapturedEvent>> listenerEvents = new ConcurrentHashMap<>();
diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
index ce1f68e..4a0f1ba 100644
--- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
+++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
@@ -36,6 +36,7 @@
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 import static org.mockito.Mockito.any;
 import static org.mockito.Mockito.eq;
 import static org.mockito.Mockito.mock;
@@ -62,7 +63,6 @@
   boolean blobFetched = false;
   String blobKey = "";
   String url = null;
-  String sha256 = null;
   ByteBuffer filecontent = null;
   
   @BeforeClass
@@ -92,14 +92,6 @@
       }
 
       @Override
-      BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha256) {
-        if(!Objects.equals(sha256, BlobRepositoryMockingTest.this.sha256)) return null;
-        blobKey = key;
-        blobFetched = true;
-        return new BlobContentRef(new BlobContent(key, filecontent)) ;
-      }
-
-      @Override
       ConcurrentHashMap<String, BlobContent> createMap() {
         return mapMock;
       }
@@ -138,13 +130,21 @@
     when(mockContainer.isZooKeeperAware()).thenReturn(true);
     filecontent = TestDynamicLoading.getFileContent("runtimecode/runtimelibs_v2.jar.bin");
     url = "http://localhost:8080/myjar/location.jar";
-    sha256 = "79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4";
     BlobRepository.BlobContentRef ref = repository.getBlobIncRef( "filefoo",null,url,
-        "79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4");
+        "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417");
     assertTrue("filefoo".equals(blobKey));
     assertTrue(blobFetched);
     assertNotNull(ref.blob);
     assertEquals(filecontent, ref.blob.get());
+    verify(mockContainer).isZooKeeperAware();
+    try {
+      repository.getBlobIncRef( "filefoo",null,url,
+          "WRONG-SHA512-KEY");
+      fail("expected exception");
+    } catch (Exception e) {
+      assertTrue(e.getMessage().contains(" expected sha512 hash : WRONG-SHA512-KEY , actual :"));
+    }
+
     url = null;
     filecontent = null;
   }
diff --git a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java
index eb88dbb..0c17553 100644
--- a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java
+++ b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java
@@ -34,6 +34,7 @@
 import org.apache.lucene.analysis.ngram.NGramFilterFactory;
 import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.lucene.analysis.util.TokenFilterFactory;
+import org.apache.lucene.analysis.util.TokenizerFactory;
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.handler.admin.LukeRequestHandler;
@@ -42,6 +43,7 @@
 import org.apache.solr.util.plugin.SolrCoreAware;
 
 import static org.apache.solr.core.SolrResourceLoader.assertAwareCompatibility;
+import static org.apache.solr.core.SolrResourceLoader.clearCache;
 import static org.hamcrest.core.Is.is;
 
 public class ResourceLoaderTest extends SolrTestCaseJ4 {
@@ -206,4 +208,21 @@
     // TODO: How to check that a warning was printed to log file?
     loader.close();    
   }
+
+  public void testCacheWrongType() {
+    clearCache();
+
+    SolrResourceLoader loader = new SolrResourceLoader();
+    Class[] params = { Map.class };
+    Map<String,String> args = Map.of("minGramSize", "1", "maxGramSize", "2");
+    final String className = "solr.NGramTokenizerFactory";
+
+    // We could fail here since the class name and expected type don't match, but instead we try to infer what the user actually meant
+    TokenFilterFactory tff = loader.newInstance(className, TokenFilterFactory.class, new String[0], params, new Object[]{new HashMap<>(args)});
+    assertNotNull("Did not load TokenFilter when asking for corresponding Tokenizer", tff);
+
+    // This should work, but won't if earlier call succeeding corrupting the cache
+    TokenizerFactory tf = loader.newInstance(className, TokenizerFactory.class, new String[0], params, new Object[]{new HashMap<>(args)});
+    assertNotNull("Did not load Tokenizer after bad call earlier", tf);
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
index 3a8f2e6..22ee299 100644
--- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
+++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
@@ -48,7 +48,6 @@
   // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
   //17-Aug-2018 commented @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
   public void testDynamicLoading() throws Exception {
-
     System.setProperty("enable.runtime.lib", "true");
     setupRestTestHarnesses();
 
@@ -98,7 +97,7 @@
 
 
     assertNotNull(map = (Map) map.get("error"));
-    assertTrue("full output " + map, map.get("msg").toString().contains("no such resource available: colltest/1" ));
+    assertTrue("full output " + map, map.get("msg").toString().contains("no such blob or version available: colltest/1" ));
     payload = " {\n" +
         "  'set' : {'watched': {" +
         "                    'x':'X val',\n" +
@@ -129,6 +128,9 @@
     }
     ByteBuffer jar = null;
 
+//     jar = persistZip("/tmp/runtimelibs.jar.bin", TestDynamicLoading.class, RuntimeLibReqHandler.class, RuntimeLibResponseWriter.class, RuntimeLibSearchComponent.class);
+//    if(true) return;
+
     jar = getFileContent("runtimecode/runtimelibs.jar.bin");
     TestBlobHandler.postAndCheck(cloudClient, baseURL, blobName, jar, 1);
 
@@ -282,8 +284,4 @@
     return bos.getByteBuffer();
   }
 
-/*  public static void main(String[] args) throws Exception {
-    persistZip("/tmp/runtimelibs_v3.jar.bin", TestDynamicLoading.class, RuntimeLibReqHandler.class, RuntimeLibResponseWriter.class, RuntimeLibSearchComponent.class);
-    if(true) return;
-  }*/
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java
index 8fec3a4..575cf9e 100644
--- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java
+++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java
@@ -77,7 +77,7 @@
     try {
       String payload = "{\n" +
           "'add-runtimelib' : { 'name' : 'urljar', url : 'http://localhost:" + port + "/jar1.jar'" +
-          "  'sha256':'e01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" +
+          "  'sha512':'e01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" +
           "}";
       RestTestHarness client = randomRestTestHarness();
       TestSolrConfigHandler.runConfigCommandExpectFailure(client, "/config", payload, "Invalid jar");
@@ -85,7 +85,7 @@
 
       payload = "{\n" +
           "'add-runtimelib' : { 'name' : 'urljar', url : 'http://localhost:" + port + "/jar1.jar'" +
-          "  'sha256':'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}" +
+          "  'sha512':'d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" +
           "}";
       client = randomRestTestHarness();
       TestSolrConfigHandler.runConfigCommand(client, "/config", payload);
@@ -93,8 +93,8 @@
           null,
           "/config/overlay",
           null,
-          Arrays.asList("overlay", "runtimeLib", "urljar", "sha256"),
-          "e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc", 120);
+          Arrays.asList("overlay", "runtimeLib", "urljar", "sha512"),
+          "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420", 120);
 
       payload = "{\n" +
           "'create-requesthandler' : { 'name' : '/runtime', 'class': 'org.apache.solr.core.RuntimeLibReqHandler', 'runtimeLib' : true}" +
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
index 5f6a1c2..17494e0 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
@@ -41,7 +41,7 @@
 import org.apache.solr.handler.TestSolrConfigHandlerConcurrent;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.search.SolrCacheHolder;
+import org.apache.solr.search.SolrCache;
 import org.apache.solr.util.RESTfulServerProvider;
 import org.apache.solr.util.RestTestBase;
 import org.apache.solr.util.RestTestHarness;
@@ -543,8 +543,8 @@
         HashMap m = new HashMap();
         rsp.add("caches", m);
         for (String c : caches) {
-          SolrCacheHolder cache = (SolrCacheHolder) req.getSearcher().getCache(c);
-          if(cache != null) m.put(c, cache.get().getClass().getName());
+          SolrCache cache = req.getSearcher().getCache(c);
+          if(cache != null) m.put(c, cache.getClass().getName());
         }
       }
     }
diff --git a/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java
new file mode 100644
index 0000000..a99028a
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/filestore/TestDistribPackageStore.java
@@ -0,0 +1,250 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.filestore;
+
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.nio.ByteBuffer;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.Callable;
+import java.util.function.Predicate;
+
+import com.google.common.collect.ImmutableSet;
+import org.apache.commons.codec.digest.DigestUtils;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient.RemoteExecutionException;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.client.solrj.response.V2Response;
+import org.apache.solr.cloud.MiniSolrCloudCluster;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.NavigableObject;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.util.LogLevel;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.server.ByteBufferInputStream;
+
+import static org.apache.solr.common.util.Utils.JAVABINCONSUMER;
+import static org.apache.solr.core.TestDynamicLoading.getFileContent;
+
+@LogLevel("org.apache.solr.core.PackageStoreAPI=DEBUG;org.apache.solr.core.DistribPackageStore=DEBUG")
+public class TestDistribPackageStore extends SolrCloudTestCase {
+
+  public void testPackageStoreManagement() throws Exception {
+    MiniSolrCloudCluster cluster =
+        configureCluster(4)
+        .withJettyConfig(jetty -> jetty.enableV2(true))
+        .addConfig("conf", configset("cloud-minimal"))
+        .configure();
+    try {
+
+      byte[] derFile = readFile("cryptokeys/pub_key512.der");
+      cluster.getZkClient().makePath("/keys/exe", true);
+      cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
+
+      try {
+        postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"),
+            "/package/mypkg/v1.0/runtimelibs.jar",
+            "j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=="
+        );
+        fail("should have failed because of wrong signature ");
+      } catch (RemoteExecutionException e) {
+        assertTrue(e.getMessage().contains("Signature does not match"));
+      }
+
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"),
+          "/package/mypkg/v1.0/runtimelibs.jar",
+          "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="
+          );
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new V2Request.Builder("/node/files/package/mypkg/v1.0")
+              .withMethod(SolrRequest.METHOD.GET)
+              .build(),
+          Utils.makeMap(
+              ":files:/package/mypkg/v1.0[0]:name", "runtimelibs.jar",
+              ":files:/package/mypkg/v1.0[0]:sha512", "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420",
+              ":files:/package/mypkg/v1.0[0]:sig[0]", "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=="
+          )
+      );
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new V2Request.Builder("/node/files/package/mypkg")
+              .withMethod(SolrRequest.METHOD.GET)
+              .build(),
+          Utils.makeMap(
+              ":files:/package/mypkg[0]:name", "v1.0",
+              ":files:/package/mypkg[0]:dir", "true"
+          )
+      );
+
+      class Fetcher implements Callable {
+        String url;
+        JettySolrRunner jetty;
+        Fetcher(String s, JettySolrRunner jettySolrRunner){
+          this.url = s;
+          this.jetty = jettySolrRunner;
+        }
+        @Override
+        public NavigableObject call() throws Exception {
+          try (HttpSolrClient solrClient = (HttpSolrClient) jetty.newClient()) {
+            return (NavigableObject) Utils.executeGET(solrClient.getHttpClient(), this.url, JAVABINCONSUMER);
+          }
+        }
+
+        @Override
+        public String toString() {
+          return url;
+        }
+
+      }
+
+      Map expected = Utils.makeMap(
+          ":files:/package/mypkg/v1.0/runtimelibs.jar:name", "runtimelibs.jar",
+          ":files:/package/mypkg/v1.0[0]:sha512", "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"
+
+      );
+      for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) {
+        String baseUrl = jettySolrRunner.getBaseUrl().toString().replace("/solr", "/api");
+        String url = baseUrl + "/node/files/package/mypkg/v1.0/runtimelibs.jar?wt=javabin&meta=true";
+
+        assertResponseValues(10, new Fetcher(url, jettySolrRunner), expected);
+
+        try (HttpSolrClient solrClient = (HttpSolrClient) jettySolrRunner.newClient()) {
+          ByteBuffer buf = Utils.executeGET(solrClient.getHttpClient(), baseUrl + "/node/files/package/mypkg/v1.0/runtimelibs.jar",
+              Utils.newBytesConsumer(Integer.MAX_VALUE));
+          assertEquals(
+              "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420",
+              DigestUtils.sha512Hex(new ByteBufferInputStream(buf))
+          );
+
+        }
+
+      }
+
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
+          "/package/mypkg/v1.0/runtimelibs_v2.jar",
+          null
+      );
+
+      expected = Utils.makeMap(
+          ":files:/package/mypkg/v1.0", (Predicate<Object>) o -> {
+            List l = (List) o;
+            assertEquals(2, l.size());
+            Set expectedKeys = ImmutableSet.of("runtimelibs_v2.jar", "runtimelibs.jar");
+            for (Object file : l) {
+              if(! expectedKeys.contains(Utils.getObjectByPath(file, true, "name"))) return false;
+            }
+
+            return true;
+          }
+      );
+      for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) {
+        String baseUrl = jettySolrRunner.getBaseUrl().toString().replace("/solr", "/api");
+        String url = baseUrl + "/node/files/package/mypkg/v1.0?wt=javabin";
+
+        assertResponseValues(10, new Fetcher(url, jettySolrRunner), expected);
+
+      }
+
+
+
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+  public static NavigableObject assertResponseValues(int repeats, SolrClient client, SolrRequest req, Map vals) throws Exception {
+    Callable<NavigableObject> callable = () -> req.process(client);
+
+    return assertResponseValues(repeats, callable,vals);
+  }
+
+  public static NavigableObject assertResponseValues(int repeats,  Callable<NavigableObject> callable,Map vals) throws Exception {
+    NavigableObject rsp = null;
+
+    for (int i = 0; i < repeats; i++) {
+      if (i > 0) {
+        Thread.sleep(100);
+      }
+      try {
+        rsp = callable.call();
+      } catch (Exception e) {
+        if (i >= repeats - 1) throw e;
+        continue;
+      }
+      for (Object e : vals.entrySet()) {
+        Map.Entry entry = (Map.Entry) e;
+        String k = (String) entry.getKey();
+        List<String> key = StrUtils.split(k, '/');
+
+        Object val = entry.getValue();
+        Predicate p = val instanceof Predicate ? (Predicate) val : o -> {
+          String v = o == null ? null : String.valueOf(o);
+          return Objects.equals(val, o);
+        };
+        boolean isPass = p.test(rsp._get(key, null));
+        if (isPass) return rsp;
+        else if (i >= repeats - 1) {
+          fail("req: " + callable.toString() +" . attempt: " + i + " Mismatch for value : '" + key + "' in response , " + Utils.toJSONString(rsp));
+        }
+
+      }
+
+    }
+    return rsp;
+  }
+
+
+
+  private void postFile(SolrClient client, ByteBuffer buffer, String name, String sig)
+      throws SolrServerException, IOException {
+    String resource = "/cluster/files" + name;
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    params.add("sig", sig);
+    V2Response rsp = new V2Request.Builder(resource)
+        .withMethod(SolrRequest.METHOD.PUT)
+        .withPayload(buffer)
+        .forceV2(true)
+        .withMimeType("application/octet-stream")
+        .withParams(params)
+        .build()
+        .process(client);
+    assertEquals(name, rsp.getResponse().get(CommonParams.FILE));
+  }
+
+  public static byte[] readFile(String fname) throws IOException {
+    byte[] buf = null;
+    try (FileInputStream fis = new FileInputStream(getFile(fname))) {
+      buf = new byte[fis.available()];
+      fis.read(buf);
+    }
+    return buf;
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java b/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java
deleted file mode 100644
index 30e8e20..0000000
--- a/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java
+++ /dev/null
@@ -1,782 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.Reader;
-import java.lang.invoke.MethodHandles;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.function.Predicate;
-
-import com.google.common.collect.ImmutableMap;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.ResponseParser;
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrRequest;
-import org.apache.solr.client.solrj.SolrResponse;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
-import org.apache.solr.client.solrj.impl.HttpSolrClient;
-import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.request.GenericSolrRequest;
-import org.apache.solr.client.solrj.request.QueryRequest;
-import org.apache.solr.client.solrj.request.UpdateRequest;
-import org.apache.solr.client.solrj.request.V2Request;
-import org.apache.solr.client.solrj.response.SimpleSolrResponse;
-import org.apache.solr.client.solrj.response.V2Response;
-import org.apache.solr.cloud.ConfigRequest;
-import org.apache.solr.cloud.MiniSolrCloudCluster;
-import org.apache.solr.cloud.SolrCloudTestCase;
-import org.apache.solr.common.cloud.ClusterProperties;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.params.MapSolrParams;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.common.util.NamedList;
-import org.apache.solr.common.util.Pair;
-import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.ConfigOverlay;
-import org.apache.solr.core.MemClassLoader;
-import org.apache.solr.core.RuntimeLib;
-import org.apache.solr.request.SolrRequestHandler;
-import org.apache.solr.util.LogLevel;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.data.Stat;
-import org.eclipse.jetty.server.Server;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import static java.nio.charset.StandardCharsets.UTF_8;
-import static org.apache.solr.cloud.TestCryptoKeys.readFile;
-import static org.apache.solr.common.params.CommonParams.JAVABIN;
-import static org.apache.solr.common.params.CommonParams.WT;
-import static org.apache.solr.common.util.Utils.getObjectByPath;
-import static org.apache.solr.core.TestDynamicLoading.getFileContent;
-import static org.apache.solr.core.TestDynamicLoadingUrl.runHttpServer;
-
-@SolrTestCaseJ4.SuppressSSL
-@LogLevel("org.apache.solr.common.cloud.ZkStateReader=DEBUG;org.apache.solr.handler.admin.CollectionHandlerApi=DEBUG;org.apache.solr.core.PackageManager=DEBUG;org.apache.solr.common.cloud.ClusterProperties=DEBUG")
-public class TestContainerReqHandler extends SolrCloudTestCase {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-
-  @BeforeClass
-  public static void setupCluster() throws Exception {
-    System.setProperty("enable.runtime.lib", "true");
-
-  }
-
-  static SolrResponse assertResponseValues(int repeats, SolrClient client, SolrRequest req, Map vals) throws Exception {
-    SolrResponse rsp = null;
-
-    for (int i = 0; i < repeats; i++) {
-      if (i > 0) {
-        Thread.sleep(100);
-      }
-      try {
-        rsp = req.process(client);
-      } catch (Exception e) {
-        if (i >= repeats - 1) throw e;
-        continue;
-      }
-      for (Object e : vals.entrySet()) {
-        Map.Entry entry = (Map.Entry) e;
-        String k = (String) entry.getKey();
-        List<String> key = StrUtils.split(k, '/');
-
-        Object val = entry.getValue();
-        Predicate p = val instanceof Predicate ? (Predicate) val : o -> {
-          String v = o == null ? null : String.valueOf(o);
-          return Objects.equals(val, o);
-        };
-        boolean isPass = p.test(rsp._get(key, null));
-        if (isPass) return rsp;
-        else if (i >= repeats - 1) {
-          fail("attempt: " + i + " Mismatch for value : '" + key + "' in response " + Utils.toJSONString(rsp));
-        }
-
-      }
-
-    }
-    return rsp;
-  }
-
-  private static Map<String, Object> assertVersionInSync(SolrZkClient zkClient, SolrClient solrClient) throws SolrServerException, IOException {
-    Stat stat = new Stat();
-    Map<String, Object> map = new ClusterProperties(zkClient).getClusterProperties(stat);
-    assertEquals(String.valueOf(stat.getVersion()), getExtResponse(solrClient)._getStr("metadata/version", null));
-    return map;
-  }
-
-  private static V2Response getExtResponse(SolrClient solrClient) throws SolrServerException, IOException {
-    return new V2Request.Builder("/node/ext")
-        .withMethod(SolrRequest.METHOD.GET)
-        .build().process(solrClient);
-  }
-
-  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13781")
-  @Test
-  public void testPackageAPI() throws Exception {
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
-        "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-    MiniSolrCloudCluster cluster = configureCluster(4).configure();
-    try {
-      String payload = null;
-      try {
-        payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " +
-            "sha256 : 'wrong-sha256'}}";
-        new V2Request.Builder("/cluster")
-            .withPayload(payload)
-            .withMethod(SolrRequest.METHOD.POST)
-            .build().process(cluster.getSolrClient());
-        fail("Expected error");
-      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
-        assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("expected sha256 hash :"));
-      }
-
-      try {
-        payload = "{add-package:{name : 'foo', url: 'http://localhost:" + port + "/jar0.jar', " +
-            "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}";
-        new V2Request.Builder("/cluster")
-            .withPayload(payload)
-            .withMethod(SolrRequest.METHOD.POST)
-            .build().process(cluster.getSolrClient());
-        fail("Expected error");
-      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
-        assertTrue("Actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("no such resource available: foo"));
-      }
-
-      payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}";
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler', package : global}}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
-
-
-      V2Request request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", null));
-
-
-      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
-
-
-      payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar3.jar', " +
-          "sha256 : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3'}}";
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-
-      request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", "3")
-      );
-
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{delete-requesthandler: 'bar'}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      request = new V2Request.Builder("/node/ext")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(SolrRequestHandler.TYPE,
-          (Predicate<Object>) o -> o instanceof List && ((List) o).isEmpty()));
-      new V2Request.Builder("/cluster")
-          .withPayload("{delete-package : 'global'}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(RuntimeLib.TYPE,
-          (Predicate<Object>) o -> o instanceof List && ((List) o).isEmpty()));
-
-
-      URL baseUrl = cluster.getRandomJetty(random()).getBaseUrl();
-      try(HttpSolrClient client = new HttpSolrClient.Builder(baseUrl.toString()).build()){
-        SimpleSolrResponse rsp = new GenericSolrRequest(SolrRequest.METHOD.GET, "/____v2/node/blob", new ModifiableSolrParams()).process(client);
-        List l = (List) rsp.nl.get("blob");
-        assertTrue(l.contains("e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc"));
-        assertTrue(l.contains("20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3"));
-      }
-    } finally {
-      cluster.shutdown();
-      server.first().stop();
-    }
-  }
-
-  @Test
-  public void testRuntimeLibWithSig2048() throws Exception {
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
-        "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-    MiniSolrCloudCluster cluster = configureCluster(4).configure();
-
-    try {
-
-      byte[] derFile = readFile("cryptokeys/pub_key2048.der");
-      cluster.getZkClient().makePath("/keys/exe", true);
-      cluster.getZkClient().create("/keys/exe/pub_key2048.der", derFile, CreateMode.PERSISTENT, true);
-
-      String signature = "NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCcfNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvdRxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQKJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C6uRCKCeFMrzQ/k5inasXLw==";
-
-      String payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sig : 'EdYkvRpMZbvElN93/xUmyKXcj6xHP16AVk71TlTascEwCb5cFQ2AeKhPIlwYpkLWXEOcLZKfeXoWwOLaV5ZNhg==' ," +
-          "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}";
-      try {
-        new V2Request.Builder("/cluster")
-            .withPayload(payload)
-            .withMethod(SolrRequest.METHOD.POST)
-            .build().process(cluster.getSolrClient());
-      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
-        //No key matched signature for jar
-        assertTrue(e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("No key matched signature for jar"));
-      }
-
-
-      payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sig : '" + signature + "'," +
-          "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler' package : global}}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
-
-
-      V2Request request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", null));
-
-
-      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
-
-      payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar3.jar', " +
-          "sig : 'YxFr6SpYrDwG85miDfRWHTjU9UltjtIWQZEhcV55C2rczRUVowCYBxmsDv5mAM8j0CTv854xpI1DtBT86wpoTdbF95LQuP9FJId4TS1j8bZ9cxHP5Cqyz1uBHFfUUNUrnpzTHQkVTp02O9NAjh3c2W41bL4U7j6jQ32+4CW2M+x00TDG0y0H75rQDR8zbLt31oWCz+sBOdZ3rGKJgAvdoGm/wVCTmsabZN+xoz4JaDeBXF16O9Uk9SSq4G0dz5YXFuLxHK7ciB5t0+q6pXlF/tdlDqF76Abze0R3d2/0MhXBzyNp3UxJmj6DiprgysfB0TbQtJG0XGfdSmx0VChvcA==' ," +
-          "sha256 : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-
-      request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", "3"));
-
-
-    } finally {
-      server.first().stop();
-      cluster.shutdown();
-    }
-
-  }
-
-  @Test
-  public void testRuntimeLibWithSig512() throws Exception {
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
-        "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-    MiniSolrCloudCluster cluster = configureCluster(4).configure();
-
-    try {
-
-      byte[] derFile = readFile("cryptokeys/pub_key512.der");
-      cluster.getZkClient().makePath("/keys/exe", true);
-      cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
-
-      String signature = "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==";
-
-      String payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sig : '" + signature + "'," +
-          "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler' package : global }}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
-
-
-      V2Request request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", null));
-
-
-      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
-
-      payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar3.jar', " +
-          "sig : 'a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q==' ," +
-          "sha256 : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-
-      request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", "3"));
-
-    } finally {
-      server.first().stop();
-      cluster.shutdown();
-    }
-
-  }
-
-  @Test
-  public void testSetClusterReqHandler() throws Exception {
-    MiniSolrCloudCluster cluster = configureCluster(4).configure();
-    try {
-      SolrZkClient zkClient = cluster.getZkClient();
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'foo', class : 'org.apache.solr.handler.DumpRequestHandler'}}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-
-      Map<String, Object> map = assertVersionInSync(zkClient, cluster.getSolrClient());
-
-      assertEquals("org.apache.solr.handler.DumpRequestHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "foo", "class")));
-
-      assertVersionInSync(zkClient, cluster.getSolrClient());
-      V2Response rsp = new V2Request.Builder("/node/ext/foo")
-          .withMethod(SolrRequest.METHOD.GET)
-          .withParams(new MapSolrParams((Map) Utils.makeMap("testkey", "testval")))
-          .build().process(cluster.getSolrClient());
-      assertEquals("testval", rsp._getStr("params/testkey", null));
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{delete-requesthandler: 'foo'}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-
-      assertNull(getObjectByPath(map, true, Arrays.asList("requestHandler", "foo")));
-    } finally {
-      cluster.shutdown();
-    }
-
-  }
-
-  public void testPluginFrompackage() throws Exception {
-    String COLLECTION_NAME = "globalLoaderColl";
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
-        "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-    System.setProperty("enable.runtime.lib", "true");
-    MiniSolrCloudCluster cluster = configureCluster(4)
-        .addConfig("conf", configset("cloud-minimal"))
-        .configure();
-    try {
-      CollectionAdminRequest
-          .createCollection(COLLECTION_NAME, "conf", 2, 1)
-          .setMaxShardsPerNode(100)
-          .process(cluster.getSolrClient());
-
-
-      cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2);
-      String payload = "{add-package:{name : 'global', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sha256 : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc'}}";
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      String sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256");
-      String url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/url");
-
-      assertEquals(sha256,
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-
-      payload = "{\n" +
-          "'create-requesthandler' : { 'name' : '/runtime', 'class': 'org.apache.solr.core.RuntimeLibReqHandler' , 'package':global }," +
-          "'create-searchcomponent' : { 'name' : 'get', 'class': 'org.apache.solr.core.RuntimeLibSearchComponent' , 'package':global }," +
-          "'create-queryResponseWriter' : { 'name' : 'json1', 'class': 'org.apache.solr.core.RuntimeLibResponseWriter' , 'package':global }" +
-          "}";
-      cluster.getSolrClient().request(new ConfigRequest(payload) {
-        @Override
-        public String getCollection() {
-          return COLLECTION_NAME;
-        }
-      });
-
-      SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/queryResponseWriter/json1", params),
-          Utils.makeMap(
-              "/config/queryResponseWriter/json1/_packageinfo_/url", url,
-              "/config/queryResponseWriter/json1/_meta_/sha256", sha256
-          ));
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/searchComponent/get", params),
-          Utils.makeMap(
-              "config/searchComponent/get/_packageinfo_/url", url,
-              "config/searchComponent/get/_packageinfo_/sha256", sha256
-          ));
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/requestHandler/runtime", params),
-          Utils.makeMap(
-              ":config:requestHandler:/runtime:_packageinfo_:url", url,
-              ":config:requestHandler:/runtime:_packageinfo_:sha256", sha256
-          ));
-
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, JAVABIN));
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/overlay", params),
-          Utils.makeMap(
-              "overlay/queryResponseWriter/json1/class", "org.apache.solr.core.RuntimeLibResponseWriter",
-              "overlay/searchComponent/get/class", "org.apache.solr.core.RuntimeLibSearchComponent"
-          ));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/runtime", params),
-          Utils.makeMap("class", "org.apache.solr.core.RuntimeLibReqHandler",
-              "loader", MemClassLoader.class.getName()));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/get?abc=xyz", params),
-          Utils.makeMap("get", "org.apache.solr.core.RuntimeLibSearchComponent",
-              "loader", MemClassLoader.class.getName()));
-
-      GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/runtime",
-          new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, "json1")));
-      req.setResponseParser(new ResponseParser() {
-        @Override
-        public String getWriterType() {
-          return "json1";
-        }
-
-        @Override
-        public NamedList<Object> processResponse(InputStream body, String encoding) {
-          return new NamedList<>((Map) Utils.fromJSON(body));
-        }
-
-        @Override
-        public NamedList<Object> processResponse(Reader reader) {
-          return new NamedList<>((Map) Utils.fromJSON(reader));
-
-        }
-
-      });
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          req,
-          Utils.makeMap("wt", "org.apache.solr.core.RuntimeLibResponseWriter",
-              "loader", MemClassLoader.class.getName()));
-
-
-      payload = "{update-package:{name : 'global', url: 'http://localhost:" + port + "/jar2.jar', " +
-          "sha256 : '79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4'}}";
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256");
-      url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/url");
-
-      assertEquals(sha256,
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/global/sha256"));
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/queryResponseWriter/json1", params),
-          Utils.makeMap(
-              "/config/queryResponseWriter/json1/_packageinfo_/url", url,
-              "/config/queryResponseWriter/json1/_packageinfo_/sha256", sha256
-          ));
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/searchComponent/get", params),
-          Utils.makeMap(
-              "/config/searchComponent/get/_packageinfo_/url", url,
-              "/config/searchComponent/get/_packageinfo_/sha256", sha256
-          ));
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/requestHandler/runtime", params),
-          Utils.makeMap(
-              ":config:requestHandler:/runtime:_packageinfo_:url", url,
-              ":config:requestHandler:/runtime:_packageinfo_:sha256", sha256
-          ));
-
-
-
-      try {
-        new V2Request.Builder("/cluster")
-            .withPayload(payload)
-            .withMethod(SolrRequest.METHOD.POST)
-            .build().process(cluster.getSolrClient());
-        fail("should have failed");
-      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
-        assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("Trying to update a jar with the same sha256"));
-      }
-
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/get?abc=xyz", params),
-          Utils.makeMap("get", "org.apache.solr.core.RuntimeLibSearchComponent",
-              "loader", MemClassLoader.class.getName(),
-              "Version", "2"));
-    } finally {
-      cluster.deleteAllCollections();
-      cluster.shutdown();
-      server.first().stop();
-    }
-
-  }
-
-//  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13650")
-  public void testCacheLoadFromPackage() throws Exception {
-    String COLLECTION_NAME = "globalCacheColl";
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/cache.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/cache_v2.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-
-    String overlay = "{" +
-        "    \"props\":{\"query\":{\"documentCache\":{\n" +
-        "          \"class\":\"org.apache.solr.core.MyDocCache\",\n" +
-        "          \"size\":\"512\",\n" +
-        "          \"initialSize\":\"512\" , \"package\":\"cache_pkg\"}}}}";
-    MiniSolrCloudCluster cluster = configureCluster(4)
-        .addConfig("conf", configset("cloud-minimal"),
-            Collections.singletonMap(ConfigOverlay.RESOURCE_NAME, overlay.getBytes(UTF_8)))
-        .configure();
-    try {
-      String payload = "{add-package:{name : 'cache_pkg', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sha256 : '32e8b5b2a95ea306538b52017f0954aa1b0f8a8b2d0acbc498fd0e66a223f7bd'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/cache_pkg/sha256"));
-
-      CollectionAdminRequest
-          .createCollection(COLLECTION_NAME, "conf", 2, 1)
-          .setMaxShardsPerNode(100)
-          .process(cluster.getSolrClient());
-
-
-      cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2);
-      SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, JAVABIN));
-
-      NamedList<Object> rsp = cluster.getSolrClient().request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/overlay", params));
-      assertEquals("org.apache.solr.core.MyDocCache", rsp._getStr("overlay/props/query/documentCache/class", null));
-
-      String sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/sha256");
-      String url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add-package/url");
-
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/query/documentCache", params),
-          Utils.makeMap(
-              "/config/query/documentCache/_packageinfo_/url", url,
-              "/config/query/documentCache/_packageinfo_/sha256", sha256
-          ));
-
-
-      UpdateRequest req = new UpdateRequest();
-
-      req.add("id", "1", "desc_s", "document 1")
-          .setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true)
-          .setWaitSearcher(true);
-      cluster.getSolrClient().request(req, COLLECTION_NAME);
-
-      SolrQuery solrQuery = new SolrQuery("q", "id:1", "collection", COLLECTION_NAME);
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new QueryRequest(solrQuery),
-          Utils.makeMap("/response[0]/my_synthetic_fld_s", "version_1"));
-
-
-      payload = "{update-package:{name : 'cache_pkg', url: 'http://localhost:" + port + "/jar2.jar', " +
-          "sha256 : '0f670f6dcc2b00f9a448a7ebd457d4ff985ab702c85cdb3608dcae9889e8d702'}}";
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      sha256 = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256");
-      url = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update-package/url");
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-package/sha256"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "package/cache_pkg/sha256"));
-
-      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
-          WT, JAVABIN,
-          "meta","true"));
-
-      assertResponseValues(10,
-          cluster.getSolrClient(),
-          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/query/documentCache", params),
-          Utils.makeMap(
-              "/config/query/documentCache/_packageinfo_/url", url,
-              "/config/query/documentCache/_packageinfo_/sha256", sha256
-          ));
-      req = new UpdateRequest();
-      req.add("id", "2", "desc_s", "document 1")
-          .setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true)
-          .setWaitSearcher(true);
-      cluster.getSolrClient().request(req, COLLECTION_NAME);
-
-
-      solrQuery = new SolrQuery("q", "id:2", "collection", COLLECTION_NAME);
-      SolrResponse result = assertResponseValues(10,
-          cluster.getSolrClient(),
-          new QueryRequest(solrQuery),
-          Utils.makeMap("response[0]/my_synthetic_fld_s", "version_2"));
-
-    } finally {
-      cluster.deleteAllCollections();
-      cluster.shutdown();
-      server.first().stop();
-    }
-  }
-
-}
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index 356e865..a6dbd9e 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -53,12 +53,12 @@
   @AfterClass
   public static void cleanupMetrics() throws Exception {
     if (null != h) {
-      h.getCoreContainer().getMetricManager().registry("solr.jvm").remove("solrtest_foo");
+      h.getCoreContainer().getMetricManager().registry("solr.jvm"  ).remove("solrtest_foo");
       h.getCoreContainer().getMetricManager().registry("solr.jetty").remove("solrtest_foo");
       h.getCoreContainer().getMetricManager().registry("solr.jetty").remove("solrtest_foo:bar");
     }
   }
-
+  
   @Test
   public void test() throws Exception {
     MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
@@ -145,7 +145,7 @@
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
     assertEquals(1, values.size());
-    assertEquals(13, ((NamedList) values.get("solr.node")).size());
+    assertEquals(13, ((NamedList)values.get("solr.node")).size());
     assertNotNull(values.get("solr.node"));
     values = (NamedList) values.get("solr.node");
     assertNotNull(values.get("CONTAINER.cores.lazy")); // this is a gauge node
@@ -171,7 +171,7 @@
     assertNotNull(values.get("solr.core.collection1"));
     values = (NamedList) values.get("solr.core.collection1");
     assertEquals(1, values.size());
-    Map m = (Map) values.get("CACHE.core.fieldCache");
+    Map m = (Map)values.get("CACHE.core.fieldCache");
     assertNotNull(m);
     assertNotNull(m.get("entries_count"));
 
@@ -223,7 +223,7 @@
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
       assertTrue(v instanceof Map);
-      Map map = (Map) v;
+      Map map = (Map)v;
       assertTrue(map.size() > 2);
     });
 
@@ -238,7 +238,7 @@
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
       assertTrue(v instanceof Map);
-      Map map = (Map) v;
+      Map map = (Map)v;
       assertEquals(2, map.size());
       assertNotNull(map.get("inserts"));
       assertNotNull(map.get("size"));
@@ -257,7 +257,7 @@
     Object val = values.findRecursive("metrics", key1);
     assertNotNull(val);
     assertTrue(val instanceof Map);
-    assertTrue(((Map) val).size() >= 2);
+    assertTrue(((Map)val).size() >= 2);
 
     String key2 = "solr.core.collection1:CACHE.core.fieldCache:entries_count";
     resp = new SolrQueryResponse();
@@ -276,7 +276,7 @@
     val = values.findRecursive("metrics", key3);
     assertNotNull(val);
     assertTrue(val instanceof Number);
-    assertEquals(3, ((Number) val).intValue());
+    assertEquals(3, ((Number)val).intValue());
 
     // test multiple keys
     resp = new SolrQueryResponse();
@@ -306,7 +306,7 @@
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "foo", MetricsHandler.KEY_PARAM, "foo:bar:baz:xyz"), resp);
     values = resp.getValues();
-    NamedList metrics = (NamedList) values.get("metrics");
+    NamedList metrics = (NamedList)values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "foo"));
     assertNotNull(values.findRecursive("errors", "foo:bar:baz:xyz"));
@@ -316,7 +316,7 @@
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "foo:bar:baz"), resp);
     values = resp.getValues();
-    metrics = (NamedList) values.get("metrics");
+    metrics = (NamedList)values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "foo:bar:baz"));
 
@@ -325,7 +325,7 @@
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "solr.jetty:unknown:baz"), resp);
     values = resp.getValues();
-    metrics = (NamedList) values.get("metrics");
+    metrics = (NamedList)values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "solr.jetty:unknown:baz"));
   }
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
index 1762ec6..933b862 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
@@ -17,6 +17,9 @@
 
 package org.apache.solr.handler.admin;
 
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.io.InputStream;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -24,14 +27,23 @@
 import java.util.Map;
 import java.util.Set;
 
+import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.api.AnnotatedApi;
 import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
+import org.apache.solr.api.Command;
+import org.apache.solr.api.EndPoint;
 import org.apache.solr.api.V2HttpCall;
 import org.apache.solr.api.V2HttpCall.CompositeApi;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.common.util.ContentStream;
+import org.apache.solr.common.util.ContentStreamBase;
+import org.apache.solr.common.util.JsonSchemaValidator;
 import org.apache.solr.common.util.PathTrie;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
@@ -43,11 +55,15 @@
 import org.apache.solr.handler.SolrConfigHandler;
 import org.apache.solr.request.LocalSolrQueryRequest;
 import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.request.SolrQueryRequestBase;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.security.PermissionNameProvider;
 
+import static java.nio.charset.StandardCharsets.UTF_8;
 import static org.apache.solr.api.ApiBag.EMPTY_SPEC;
 import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET;
+import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
 import static org.apache.solr.common.params.CommonParams.COLLECTIONS_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.CONFIGSETS_HANDLER_PATH;
 import static org.apache.solr.common.params.CommonParams.CORES_HANDLER_PATH;
@@ -152,6 +168,124 @@
 
   }
 
+  public void testPayload() throws IOException {
+    String json = "{package:pkg1, version: '0.1', files  :[a.jar, b.jar]}";
+    Utils.fromJSONString(json);
+
+    ApiBag apiBag = new ApiBag(false);
+    AnnotatedApi api = new AnnotatedApi(new ApiTest());
+    apiBag.register(api, Collections.emptyMap());
+
+    ValidatingJsonMap spec = api.getSpec();
+
+    assertEquals("POST", spec._getStr("/methods[0]",null) );
+    assertEquals("POST", spec._getStr("/methods[0]",null) );
+    assertEquals("/cluster/package", spec._getStr("/url/paths[0]",null) );
+    assertEquals("string", spec._getStr("/commands/add/properties/package/type",null) );
+    assertEquals("array", spec._getStr("/commands/add/properties/files/type",null) );
+    assertEquals("string", spec._getStr("/commands/add/properties/files/items/type",null) );
+    assertEquals("string", spec._getStr("/commands/delete/items/type",null) );
+    SolrQueryResponse rsp = v2ApiInvoke(apiBag, "/cluster/package", "POST", new ModifiableSolrParams(),
+        new ByteArrayInputStream("{add:{package:mypkg, version: '1.0', files : [a.jar, b.jar]}}".getBytes(UTF_8)));
+
+
+    AddVersion addversion = (AddVersion) rsp.getValues().get("add");
+    assertEquals("mypkg", addversion.pkg);
+    assertEquals("1.0", addversion.version);
+    assertEquals("a.jar", addversion.files.get(0));
+    assertEquals("b.jar", addversion.files.get(1));
+
+
+
+  }
+
+  @EndPoint(method = POST, path = "/cluster/package", permission = PermissionNameProvider.Name.ALL)
+  public static class ApiTest {
+    @Command(name = "add")
+    public void add(SolrQueryRequest req, SolrQueryResponse rsp, AddVersion addVersion) {
+      rsp.add("add", addVersion);
+
+    }
+
+    @Command(name = "delete")
+    public void del(SolrQueryRequest req, SolrQueryResponse rsp, List<String> names) {
+      rsp.add("delete",names);
+
+    }
+
+
+
+  }
+
+  public static class AddVersion {
+    @JsonProperty(value = "package", required = true)
+    public String pkg;
+    @JsonProperty(value = "version", required = true)
+    public String version;
+    @JsonProperty(value = "files", required = true)
+    public List<String> files;
+  }
+
+  public void testAnnotatedApi() {
+    ApiBag apiBag = new ApiBag(false);
+    apiBag.register(new AnnotatedApi(new DummyTest()), Collections.emptyMap());
+    SolrQueryResponse rsp = v2ApiInvoke(apiBag, "/node/filestore/package/mypkg/jar1.jar", "GET",
+        new ModifiableSolrParams(), null);
+    assertEquals("/package/mypkg/jar1.jar", rsp.getValues().get("path"));
+  }
+
+  @EndPoint(
+      path = "/node/filestore/*",
+      method = SolrRequest.METHOD.GET,
+      permission = PermissionNameProvider.Name.ALL)
+  public class DummyTest {
+    @Command
+    public void read(SolrQueryRequest req, SolrQueryResponse rsp) {
+      rsp.add("FSRead.called", "true");
+      rsp.add("path", req.getPathTemplateValues().get("*"));
+    }
+  }
+
+  private static SolrQueryResponse v2ApiInvoke(ApiBag bag, String uri, String method, SolrParams params, InputStream payload) {
+    if (params == null) params = new ModifiableSolrParams();
+    SolrQueryResponse rsp = new SolrQueryResponse();
+    HashMap<String, String> templateVals = new HashMap<>();
+    Api[] currentApi = new Api[1];
+
+    SolrQueryRequestBase req = new SolrQueryRequestBase(null, params) {
+
+      @Override
+      public Map<String, String> getPathTemplateValues() {
+        return templateVals;
+      }
+
+      @Override
+      protected Map<String, JsonSchemaValidator> getValidators() {
+        return currentApi[0] == null?
+            Collections.emptyMap():
+            currentApi[0].getCommandSchema();
+      }
+
+      @Override
+      public Iterable<ContentStream> getContentStreams() {
+        return Collections.singletonList(new ContentStreamBase() {
+          @Override
+          public InputStream getStream() throws IOException {
+            return payload;
+          }
+        });
+
+      }
+    };
+    Api api = bag.lookup(uri, method, templateVals);
+    currentApi[0] = api;
+
+
+    api.call(req, rsp);
+    return rsp;
+
+  }
+
   public void testTrailingTemplatePaths() {
     PathTrie<Api> registry = new PathTrie<>();
     Api api = new Api(EMPTY_SPEC) {
@@ -204,7 +338,7 @@
   }
 
 
-  private void assertConditions(Map root, Map conditions) {
+  public static void assertConditions(Map root, Map conditions) {
     for (Object o : conditions.entrySet()) {
       Map.Entry e = (Map.Entry) o;
       String path = (String) e.getKey();
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java b/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java
index 130f1ef..0062fcf 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DebugComponentTest.java
@@ -185,9 +185,11 @@
       //if the request has debugQuery=true or debug=track, the sreq should get debug=track always
       assertTrue(Arrays.asList(sreq.params.getParams(CommonParams.DEBUG)).contains(CommonParams.TRACK));
       //the purpose must be added as readable param to be included in the shard logs
-      assertEquals("GET_FIELDS,GET_DEBUG", sreq.params.get(CommonParams.REQUEST_PURPOSE));
+      assertEquals("GET_FIELDS,GET_DEBUG,SET_TERM_STATS", sreq.params.get(CommonParams.REQUEST_PURPOSE));
       //the rid must be added to be included in the shard logs
       assertEquals("123456-my_rid", sreq.params.get(CommonParams.REQUEST_ID));
+      // close requests - this method obtains a searcher in order to access its StatsCache
+      req.close();
     }
     
   }
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
index e40db98..d35f8d1 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestBulkSchemaAPI.java
@@ -20,6 +20,7 @@
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
@@ -32,9 +33,12 @@
 import org.apache.lucene.search.similarities.DFISimilarity;
 import org.apache.lucene.search.similarities.PerFieldSimilarityWrapper;
 import org.apache.lucene.search.similarities.Similarity;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.request.schema.SchemaRequest;
 import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.SimilarityFactory;
 import org.apache.solr.search.similarities.SchemaSimilarityFactory;
 import org.apache.solr.util.RESTfulServerProvider;
@@ -42,6 +46,7 @@
 import org.apache.solr.util.RestTestHarness;
 import org.junit.After;
 import org.junit.Before;
+import org.junit.Test;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -188,7 +193,7 @@
     assertNotNull(map);
     Map analyzer = (Map)map.get("analyzer");
     assertEquals("org.apache.lucene.analysis.core.WhitespaceAnalyzer", String.valueOf(analyzer.get("class")));
-    assertEquals("5.0.0", String.valueOf(analyzer.get("luceneMatchVersion")));
+    assertEquals("5.0.0", String.valueOf(analyzer.get(IndexSchema.LUCENE_MATCH_VERSION_PARAM)));
   }
 
   public void testAnalyzerByName() throws Exception {
@@ -954,6 +959,24 @@
     
   }
   
+  @Test
+  public void testAddNewFieldAndQuery() throws Exception {
+    getSolrClient().add(Arrays.asList(
+        sdoc("id", "1", "term_s", "tux")));
+
+    getSolrClient().commit(true, true);
+    Map<String,Object> attrs = new HashMap<>();
+    attrs.put("name", "newstringtestfield");
+    attrs.put("type", "string");
+
+    new SchemaRequest.AddField(attrs).process(getSolrClient());
+
+    SolrQuery query = new SolrQuery("*:*");
+    query.addFacetField("newstringtestfield");
+    int size = getSolrClient().query(query).getResults().size();
+    assertEquals(1, size);
+  }
+  
   public void testSimilarityParser() throws Exception {
     RestTestHarness harness = restTestHarness;
 
diff --git a/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java
new file mode 100644
index 0000000..ae7a762
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/search/TestCaffeineCache.java
@@ -0,0 +1,285 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.search;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicReference;
+
+import com.github.benmanes.caffeine.cache.RemovalCause;
+import org.apache.lucene.util.Accountable;
+import org.apache.lucene.util.TestUtil;
+import org.apache.solr.SolrTestCase;
+import org.apache.solr.metrics.SolrMetricManager;
+import org.junit.Test;
+
+import com.github.benmanes.caffeine.cache.Cache;
+import com.github.benmanes.caffeine.cache.Caffeine;
+
+/**
+ * Test for {@link CaffeineCache}.
+ */
+public class TestCaffeineCache extends SolrTestCase {
+
+  SolrMetricManager metricManager = new SolrMetricManager();
+  String registry = TestUtil.randomSimpleString(random(), 2, 10);
+  String scope = TestUtil.randomSimpleString(random(), 2, 10);
+
+  @Test
+  public void testSimple() throws IOException {
+    CaffeineCache<Integer, String> lfuCache = new CaffeineCache<>();
+    lfuCache.initializeMetrics(metricManager, registry, "foo", scope + "-1");
+
+    CaffeineCache<Integer, String> newLFUCache = new CaffeineCache<>();
+    newLFUCache.initializeMetrics(metricManager, registry, "foo2", scope + "-2");
+
+    Map<String, String> params = new HashMap<>();
+    params.put("size", "100");
+    params.put("initialSize", "10");
+    params.put("autowarmCount", "25");
+
+    NoOpRegenerator regenerator = new NoOpRegenerator();
+    Object initObj = lfuCache.init(params, null, regenerator);
+    lfuCache.setState(SolrCache.State.LIVE);
+    for (int i = 0; i < 101; i++) {
+      lfuCache.put(i + 1, Integer.toString(i + 1));
+    }
+    assertEquals("15", lfuCache.get(15));
+    assertEquals("75", lfuCache.get(75));
+    assertEquals(null, lfuCache.get(110));
+    Map<String, Object> nl = lfuCache.getMetricsMap().getValue();
+    assertEquals(3L, nl.get("lookups"));
+    assertEquals(2L, nl.get("hits"));
+    assertEquals(101L, nl.get("inserts"));
+
+    assertEquals(null, lfuCache.get(1));  // first item put in should be the first out
+
+    // Test autowarming
+    newLFUCache.init(params, initObj, regenerator);
+    newLFUCache.warm(null, lfuCache);
+    newLFUCache.setState(SolrCache.State.LIVE);
+
+    newLFUCache.put(103, "103");
+    assertEquals("15", newLFUCache.get(15));
+    assertEquals("75", newLFUCache.get(75));
+    assertEquals(null, newLFUCache.get(50));
+    nl = newLFUCache.getMetricsMap().getValue();
+    assertEquals(3L, nl.get("lookups"));
+    assertEquals(2L, nl.get("hits"));
+    assertEquals(1L, nl.get("inserts"));
+    assertEquals(0L, nl.get("evictions"));
+
+    assertEquals(7L, nl.get("cumulative_lookups"));
+    assertEquals(4L, nl.get("cumulative_hits"));
+    assertEquals(102L, nl.get("cumulative_inserts"));
+  }
+
+  @Test
+  public void testTimeDecay() {
+    Cache<Integer, String> cacheDecay = Caffeine.newBuilder()
+        .executor(Runnable::run)
+        .maximumSize(20)
+        .build();
+    for (int i = 1; i < 21; i++) {
+      cacheDecay.put(i, Integer.toString(i));
+    }
+    Map<Integer, String> itemsDecay;
+
+    // Now increase the freq count for 5 items
+    for (int i = 0; i < 5; ++i) {
+      for (int j = 0; j < 10; ++j) {
+        cacheDecay.getIfPresent(i + 13);
+      }
+    }
+    // OK, 13 - 17 should have larger counts and should stick past next few collections
+    cacheDecay.put(22, "22");
+    cacheDecay.put(23, "23");
+    cacheDecay.put(24, "24");
+    cacheDecay.put(25, "25");
+    itemsDecay = cacheDecay.policy().eviction().get().hottest(10);
+    // 13 - 17 should be in cache, but 11 and 18 (among others) should not. Testing that elements before and
+    // after the ones with increased counts are removed, and all the increased count ones are still in the cache
+    assertNull(itemsDecay.get(11));
+    assertNull(itemsDecay.get(18));
+    assertNotNull(itemsDecay.get(13));
+    assertNotNull(itemsDecay.get(14));
+    assertNotNull(itemsDecay.get(15));
+    assertNotNull(itemsDecay.get(16));
+    assertNotNull(itemsDecay.get(17));
+
+
+    // Testing that all the elements in front of the ones with increased counts are gone
+    for (int idx = 26; idx < 32; ++idx) {
+      cacheDecay.put(idx, Integer.toString(idx));
+    }
+    //Surplus count should be at 0
+    itemsDecay = cacheDecay.policy().eviction().get().hottest(10);
+    assertNull(itemsDecay.get(20));
+    assertNull(itemsDecay.get(24));
+    assertNotNull(itemsDecay.get(13));
+    assertNotNull(itemsDecay.get(14));
+    assertNotNull(itemsDecay.get(15));
+    assertNotNull(itemsDecay.get(16));
+    assertNotNull(itemsDecay.get(17));
+  }
+
+  @Test
+  public void testMaxIdleTime() throws Exception {
+    int IDLE_TIME_SEC = 5;
+    CountDownLatch removed = new CountDownLatch(1);
+    AtomicReference<RemovalCause> removalCause = new AtomicReference<>();
+    CaffeineCache<String, String> cache = new CaffeineCache<>() {
+      @Override
+      public void onRemoval(String key, String value, RemovalCause cause) {
+        super.onRemoval(key, value, cause);
+        removalCause.set(cause);
+        removed.countDown();
+      }
+    };
+    Map<String, String> params = new HashMap<>();
+    params.put("size", "6");
+    params.put("maxIdleTime", "" + IDLE_TIME_SEC);
+    cache.init(params, null, new NoOpRegenerator());
+
+    cache.put("foo", "bar");
+    assertEquals("bar", cache.get("foo"));
+    // sleep for at least the idle time before inserting other entries
+    // the eviction is piggy-backed on put()
+    Thread.sleep(TimeUnit.SECONDS.toMillis(IDLE_TIME_SEC * 2));
+    cache.put("abc", "xyz");
+    boolean await = removed.await(30, TimeUnit.SECONDS);
+    assertTrue("did not expire entry in in time", await);
+    assertEquals(RemovalCause.EXPIRED, removalCause.get());
+    assertNull(cache.get("foo"));
+  }
+
+  @Test
+  public void testSetLimits() throws Exception {
+    AtomicReference<CountDownLatch> removed = new AtomicReference<>(new CountDownLatch(2));
+    List<RemovalCause> removalCauses = new ArrayList<>();
+    List<String> removedKeys = new ArrayList<>();
+    Set<String> allKeys = new HashSet<>();
+    CaffeineCache<String, Accountable> cache = new CaffeineCache<>() {
+      @Override
+      public Accountable put(String key, Accountable val) {
+        allKeys.add(key);
+        return super.put(key, val);
+      }
+
+      @Override
+      public void onRemoval(String key, Accountable value, RemovalCause cause) {
+        super.onRemoval(key, value, cause);
+        removalCauses.add(cause);
+        removedKeys.add(key);
+        removed.get().countDown();
+      }
+    };
+    Map<String, String> params = new HashMap<>();
+    params.put("size", "5");
+    cache.init(params, null, new NoOpRegenerator());
+
+    for (int i = 0; i < 5; i++) {
+      cache.put("foo-" + i, new Accountable() {
+        @Override
+        public long ramBytesUsed() {
+          return 1024 * 1024;
+        }
+      });
+    }
+    assertEquals(5, cache.size());
+    // no evictions yet
+    assertEquals(2, removed.get().getCount());
+
+    cache.put("abc1", new Accountable() {
+      @Override
+      public long ramBytesUsed() {
+        return 1;
+      }
+    });
+    cache.put("abc2", new Accountable() {
+      @Override
+      public long ramBytesUsed() {
+        return 2;
+      }
+    });
+    boolean await = removed.get().await(30, TimeUnit.SECONDS);
+    assertTrue("did not evict entries in in time", await);
+    assertEquals(5, cache.size());
+    assertEquals(2, cache.get("abc2").ramBytesUsed());
+    for (String key : removedKeys) {
+      assertNull("key " + key + " still present!", cache.get(key));
+      allKeys.remove(key);
+    }
+    for (RemovalCause cause : removalCauses) {
+      assertEquals(RemovalCause.SIZE, cause);
+    }
+
+    removed.set(new CountDownLatch(2));
+    removalCauses.clear();
+    removedKeys.clear();
+    // trim down by item count
+    cache.setMaxSize(3);
+    cache.put("abc3",  new Accountable() {
+      @Override
+      public long ramBytesUsed() {
+        return 3;
+      }
+    });
+    await = removed.get().await(30, TimeUnit.SECONDS);
+    assertTrue("did not evict entries in in time", await);
+    assertEquals(3, cache.size());
+    for (String key : removedKeys) {
+      assertNull("key " + key + " still present!", cache.get(key));
+      allKeys.remove(key);
+    }
+    for (RemovalCause cause : removalCauses) {
+      assertEquals(RemovalCause.SIZE, cause);
+    }
+
+    // at least one item has to go
+    removed.set(new CountDownLatch(1));
+    removalCauses.clear();
+    removedKeys.clear();
+    // trim down by ram size
+    cache.setMaxRamMB(1);
+    await = removed.get().await(30, TimeUnit.SECONDS);
+    assertTrue("did not evict entries in in time", await);
+    for (String key : removedKeys) {
+      assertNull("key " + key + " still present!", cache.get(key));
+      allKeys.remove(key);
+    }
+    for (RemovalCause cause : removalCauses) {
+      assertEquals(RemovalCause.SIZE, cause);
+    }
+    // check total size of remaining items
+    long total = 0;
+    for (String key : allKeys) {
+      Accountable a = cache.get(key);
+      assertNotNull("missing value for key " + key, a);
+      total += a.ramBytesUsed();
+    }
+    assertTrue("total ram bytes should be greater than 0", total > 0);
+    assertTrue("total ram bytes exceeded limit", total < 1024 * 1024);
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
index 5844dac..271e9a9 100644
--- a/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestFastLRUCache.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.search;
 
+import org.apache.commons.math3.stat.descriptive.SummaryStatistics;
 import org.apache.lucene.index.Term;
 import org.apache.lucene.search.Query;
 import org.apache.lucene.search.WildcardQuery;
@@ -33,6 +34,7 @@
 import java.util.HashMap;
 import java.util.Map;
 import java.util.Random;
+import java.util.TreeMap;
 import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicInteger;
@@ -52,14 +54,14 @@
 
   public void testPercentageAutowarm() throws IOException {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
-    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
-    MetricsMap metrics = fastCache.getMetricsMap();
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
     params.put("initialSize", "10");
     params.put("autowarmCount", "100%");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = fastCache.init(params, null, cr);
+    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
+    MetricsMap metrics = fastCache.getMetricsMap();
     fastCache.setState(SolrCache.State.LIVE);
     for (int i = 0; i < 101; i++) {
       fastCache.put(i + 1, "" + (i + 1));
@@ -72,9 +74,9 @@
     assertEquals(101L, nl.get("inserts"));
     assertEquals(null, fastCache.get(1));  // first item put in should be the first out
     FastLRUCache<Object, Object> fastCacheNew = new FastLRUCache<>();
+    fastCacheNew.init(params, o, cr);
     fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope);
     metrics = fastCacheNew.getMetricsMap();
-    fastCacheNew.init(params, o, cr);
     fastCacheNew.warm(null, fastCache);
     fastCacheNew.setState(SolrCache.State.LIVE);
     fastCache.close();
@@ -102,21 +104,21 @@
   
   private void doTestPercentageAutowarm(int limit, int percentage, int[] hits, int[]misses) {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
-    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
     Map<String, String> params = new HashMap<>();
     params.put("size", String.valueOf(limit));
     params.put("initialSize", "10");
     params.put("autowarmCount", percentage + "%");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = fastCache.init(params, null, cr);
+    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
     fastCache.setState(SolrCache.State.LIVE);
     for (int i = 1; i <= limit; i++) {
       fastCache.put(i, "" + i);//adds numbers from 1 to 100
     }
 
     FastLRUCache<Object, Object> fastCacheNew = new FastLRUCache<>();
-    fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope);
     fastCacheNew.init(params, o, cr);
+    fastCacheNew.initializeMetrics(metricManager, registry, "foo", scope);
     fastCacheNew.warm(null, fastCache);
     fastCacheNew.setState(SolrCache.State.LIVE);
     fastCache.close();
@@ -136,12 +138,12 @@
   
   public void testNoAutowarm() throws IOException {
     FastLRUCache<Object, Object> fastCache = new FastLRUCache<>();
-    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
     Map<String, String> params = new HashMap<>();
     params.put("size", "100");
     params.put("initialSize", "10");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = fastCache.init(params, null, cr);
+    fastCache.initializeMetrics(metricManager, registry, "foo", scope);
     fastCache.setState(SolrCache.State.LIVE);
     for (int i = 0; i < 101; i++) {
       fastCache.put(i + 1, "" + (i + 1));
@@ -196,13 +198,13 @@
   
   public void testSimple() throws IOException {
     FastLRUCache sc = new FastLRUCache();
-    sc.initializeMetrics(metricManager, registry, "foo", scope);
     Map l = new HashMap();
     l.put("size", "100");
     l.put("initialSize", "10");
     l.put("autowarmCount", "25");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = sc.init(l, null, cr);
+    sc.initializeMetrics(metricManager, registry, "foo", scope);
     sc.setState(SolrCache.State.LIVE);
     for (int i = 0; i < 101; i++) {
       sc.put(i + 1, "" + (i + 1));
@@ -219,8 +221,8 @@
 
 
     FastLRUCache scNew = new FastLRUCache();
-    scNew.initializeMetrics(metricManager, registry, "foo", scope);
     scNew.init(l, o, cr);
+    scNew.initializeMetrics(metricManager, registry, "foo", scope);
     scNew.warm(null, sc);
     scNew.setState(SolrCache.State.LIVE);
     sc.close();
@@ -305,13 +307,13 @@
   public void testAccountable() {
     FastLRUCache<Query, DocSet> sc = new FastLRUCache<>();
     try {
-      sc.initializeMetrics(metricManager, registry, "foo", scope);
       Map l = new HashMap();
       l.put("size", "100");
       l.put("initialSize", "10");
       l.put("autowarmCount", "25");
       CacheRegenerator cr = new NoOpRegenerator();
       Object o = sc.init(l, null, cr);
+      sc.initializeMetrics(metricManager, registry, "foo", scope);
       sc.setState(SolrCache.State.LIVE);
       long initialBytes = sc.ramBytesUsed();
       WildcardQuery q = new WildcardQuery(new Term("foo", "bar"));
@@ -332,12 +334,12 @@
 
   public void testSetLimits() throws Exception {
     FastLRUCache<String, Accountable> cache = new FastLRUCache<>();
-    cache.initializeMetrics(metricManager, registry, "foo", scope);
     Map<String, String> params = new HashMap<>();
     params.put("size", "6");
     params.put("maxRamMB", "8");
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = cache.init(params, null, cr);
+    cache.initializeMetrics(metricManager, registry, "foo", scope);
     for (int i = 0; i < 6; i++) {
       cache.put("" + i, new Accountable() {
         @Override
@@ -471,7 +473,7 @@
   }
 
 
-  void cachePerfTest(final SolrCache sc, final int nThreads, final int numGets, int cacheSize, final int maxKey) {
+  double[] cachePerfTest(final SolrCache sc, final int nThreads, final int numGets, int cacheSize, final int maxKey) {
     Map l = new HashMap();
     l.put("size", ""+cacheSize);
     l.put("initialSize", ""+cacheSize);
@@ -512,37 +514,73 @@
       }
     }
 
-    System.out.println("time=" + timer.getTime() + " impl=" +sc.getClass().getSimpleName()
-                       +" nThreads= " + nThreads + " size="+cacheSize+" maxKey="+maxKey+" gets="+numGets
-                       +" hitRatio="+(1-(((double)puts.get())/numGets)));
+    double time = timer.getTime();
+    double hitRatio = (1-(((double)puts.get())/numGets));
+//    System.out.println("time=" + time + " impl=" +sc.getClass().getSimpleName()
+//                       +" nThreads= " + nThreads + " size="+cacheSize+" maxKey="+maxKey+" gets="+numGets
+//                       +" hitRatio="+(1-(((double)puts.get())/numGets)));
+    return new double[]{time, hitRatio};
   }
 
-  void perfTestBoth(int nThreads, int numGets, int cacheSize, int maxKey) {
-    cachePerfTest(new LRUCache(), nThreads, numGets, cacheSize, maxKey);
-    cachePerfTest(new FastLRUCache(), nThreads, numGets, cacheSize, maxKey);
+  private int NUM_RUNS = 5;
+  void perfTestBoth(int maxThreads, int numGets, int cacheSize, int maxKey,
+                    Map<String, Map<String, SummaryStatistics>> timeStats,
+                    Map<String, Map<String, SummaryStatistics>> hitStats) {
+    for (int nThreads = 1 ; nThreads <= maxThreads; nThreads++) {
+      String testKey = "threads=" + nThreads + ",gets=" + numGets + ",size=" + cacheSize + ",maxKey=" + maxKey;
+      System.err.println(testKey);
+      for (int i = 0; i < NUM_RUNS; i++) {
+        double[] data = cachePerfTest(new LRUCache(), nThreads, numGets, cacheSize, maxKey);
+        timeStats.computeIfAbsent(testKey, k -> new TreeMap<>())
+            .computeIfAbsent("LRUCache", k -> new SummaryStatistics())
+            .addValue(data[0]);
+        hitStats.computeIfAbsent(testKey, k -> new TreeMap<>())
+            .computeIfAbsent("LRUCache", k -> new SummaryStatistics())
+            .addValue(data[1]);
+        data = cachePerfTest(new CaffeineCache(), nThreads, numGets, cacheSize, maxKey);
+        timeStats.computeIfAbsent(testKey, k -> new TreeMap<>())
+            .computeIfAbsent("CaffeineCache", k -> new SummaryStatistics())
+            .addValue(data[0]);
+        hitStats.computeIfAbsent(testKey, k -> new TreeMap<>())
+            .computeIfAbsent("CaffeineCache", k -> new SummaryStatistics())
+            .addValue(data[1]);
+        data = cachePerfTest(new FastLRUCache(), nThreads, numGets, cacheSize, maxKey);
+        timeStats.computeIfAbsent(testKey, k -> new TreeMap<>())
+            .computeIfAbsent("FastLRUCache", k -> new SummaryStatistics())
+            .addValue(data[0]);
+        hitStats.computeIfAbsent(testKey, k -> new TreeMap<>())
+            .computeIfAbsent("FastLRUCache", k -> new SummaryStatistics())
+            .addValue(data[1]);
+      }
+    }
   }
 
+  int NUM_THREADS = 4;
   /***
       public void testCachePerf() {
+        Map<String, Map<String, SummaryStatistics>> timeStats = new TreeMap<>();
+        Map<String, Map<String, SummaryStatistics>> hitStats = new TreeMap<>();
       // warmup
-      perfTestBoth(2, 100000, 100000, 120000);
-      perfTestBoth(1, 2000000, 100000, 100000); // big cache, 100% hit ratio
-      perfTestBoth(2, 2000000, 100000, 100000); // big cache, 100% hit ratio
-      perfTestBoth(1, 2000000, 100000, 120000); // big cache, bigger hit ratio
-      perfTestBoth(2, 2000000, 100000, 120000); // big cache, bigger hit ratio
-      perfTestBoth(1, 2000000, 100000, 200000); // big cache, ~50% hit ratio
-      perfTestBoth(2, 2000000, 100000, 200000); // big cache, ~50% hit ratio
-      perfTestBoth(1, 2000000, 100000, 1000000); // big cache, ~10% hit ratio
-      perfTestBoth(2, 2000000, 100000, 1000000); // big cache, ~10% hit ratio
+      perfTestBoth(NUM_THREADS, 100000, 100000, 120000, new HashMap<>(), new HashMap());
 
-      perfTestBoth(1, 2000000, 1000, 1000); // small cache, ~100% hit ratio
-      perfTestBoth(2, 2000000, 1000, 1000); // small cache, ~100% hit ratio
-      perfTestBoth(1, 2000000, 1000, 1200); // small cache, bigger hit ratio
-      perfTestBoth(2, 2000000, 1000, 1200); // small cache, bigger hit ratio
-      perfTestBoth(1, 2000000, 1000, 2000); // small cache, ~50% hit ratio
-      perfTestBoth(2, 2000000, 1000, 2000); // small cache, ~50% hit ratio
-      perfTestBoth(1, 2000000, 1000, 10000); // small cache, ~10% hit ratio
-      perfTestBoth(2, 2000000, 1000, 10000); // small cache, ~10% hit ratio
+      perfTestBoth(NUM_THREADS, 2000000, 100000, 100000, timeStats, hitStats); // big cache, 100% hit ratio
+      perfTestBoth(NUM_THREADS, 2000000, 100000, 120000, timeStats, hitStats); // big cache, bigger hit ratio
+      perfTestBoth(NUM_THREADS, 2000000, 100000, 200000, timeStats, hitStats); // big cache, ~50% hit ratio
+      perfTestBoth(NUM_THREADS, 2000000, 100000, 1000000, timeStats, hitStats); // big cache, ~10% hit ratio
+
+      perfTestBoth(NUM_THREADS, 2000000, 1000, 1000, timeStats, hitStats); // small cache, ~100% hit ratio
+      perfTestBoth(NUM_THREADS, 2000000, 1000, 1200, timeStats, hitStats); // small cache, bigger hit ratio
+      perfTestBoth(NUM_THREADS, 2000000, 1000, 2000, timeStats, hitStats); // small cache, ~50% hit ratio
+      perfTestBoth(NUM_THREADS, 2000000, 1000, 10000, timeStats, hitStats); // small cache, ~10% hit ratio
+
+        System.out.println("\n=====================\n");
+        timeStats.forEach((testKey, map) -> {
+          Map<String, SummaryStatistics> hits = hitStats.get(testKey);
+          System.out.println("* " + testKey);
+          map.forEach((type, summary) -> {
+            System.out.println("\t" + String.format("%14s", type) + "\ttime " + summary.getMean() + "\thitRatio " + hits.get(type).getMean());
+          });
+        });
       }
   ***/
 
diff --git a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
index 05e7557..7989d8e 100644
--- a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
@@ -63,7 +63,7 @@
   @Test
   public void testTimeDecayParams() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      LFUCache cacheDecayTrue = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayTrue")).get();
+      LFUCache cacheDecayTrue = (LFUCache) searcher.getCache("lfuCacheDecayTrue");
       assertNotNull(cacheDecayTrue);
       Map<String,Object> stats = cacheDecayTrue.getMetricsMap().getValue();
       assertTrue((Boolean) stats.get("timeDecay"));
@@ -74,7 +74,7 @@
       addCache(cacheDecayTrue, 11, 12, 13, 14, 15);
       assertCache(cacheDecayTrue, 1, 2, 3, 4, 5, 12, 13, 14, 15);
 
-      LFUCache cacheDecayDefault = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayDefault")).get();
+      LFUCache cacheDecayDefault = (LFUCache) searcher.getCache("lfuCacheDecayDefault");
       assertNotNull(cacheDecayDefault);
       stats = cacheDecayDefault.getMetricsMap().getValue();
       assertTrue((Boolean) stats.get("timeDecay"));
@@ -88,7 +88,7 @@
       addCache(cacheDecayDefault, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21);
       assertCache(cacheDecayDefault, 1, 2, 3, 4, 5, 17, 18, 19, 20, 21);
 
-      LFUCache cacheDecayFalse = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayFalse")).get();
+      LFUCache cacheDecayFalse = (LFUCache) searcher.getCache("lfuCacheDecayFalse");
       assertNotNull(cacheDecayFalse);
       stats = cacheDecayFalse.getMetricsMap().getValue();
       assertFalse((Boolean) stats.get("timeDecay"));
diff --git a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java
index b11ff3e..77663df 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/RangeFacetCloudTest.java
@@ -48,7 +48,7 @@
  * Builds a random index of a few simple fields, maintaining an in-memory model of the expected
  * doc counts so that we can verify the results of range facets w/ nested field facets that need refinement.
  *
- * The focus here is on stressing the casees where the document values fall directonly on the 
+ * The focus here is on stressing the cases where the document values fall direct only on the
  * range boundaries, and how the various "include" options affects refinement.
  */
 public class RangeFacetCloudTest extends SolrCloudTestCase {
@@ -63,8 +63,7 @@
   private static final int NUM_RANGE_VALUES = 6;
   private static final int TERM_VALUES_RANDOMIZER = 100;
 
-  // TODO: add 'count asc' once SOLR-12343 is fixed
-  private static final List<String> SORTS = Arrays.asList("count desc", "index asc", "index desc");
+  private static final List<String> SORTS = Arrays.asList("count desc", "count asc", "index asc", "index desc");
   
   private static final List<EnumSet<FacetRangeOther>> OTHERS = buildListOfFacetRangeOtherOptions();
   private static final List<FacetRangeOther> BEFORE_AFTER_BETWEEN
@@ -136,20 +135,20 @@
             ("q", "*:*", "rows", "0", "json.facet",
              // exclude a single low value from our ranges
              "{ foo:{ type:range, field:"+INT_FIELD+" start:1, end:5, gap:1"+otherStr+include+subFacet+" } }");
-        
+
           final QueryResponse rsp = cluster.getSolrClient().query(solrQuery);
           try {
             final NamedList<Object> foo = ((NamedList<NamedList<Object>>)rsp.getResponse().get("facets")).get("foo");
             final List<NamedList<Object>> buckets = (List<NamedList<Object>>) foo.get("buckets");
-            
+
             assertEquals("num buckets", 4, buckets.size());
             for (int i = 0; i < 4; i++) {
               int expectedVal = i+1;
               assertBucket("bucket#" + i, expectedVal, modelVals(expectedVal), subFacetLimit, buckets.get(i));
             }
-            
+
             assertBeforeAfterBetween(other, modelVals(0), modelVals(5), modelVals(1,4), subFacetLimit, foo);
-            
+
           } catch (AssertionError|RuntimeException ae) {
             throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae);
           }
@@ -157,7 +156,7 @@
       }
     }
   }
-  
+
   public void testInclude_Lower_Gap2() throws Exception {
     for (boolean doSubFacet : Arrays.asList(false, true)) {
       final Integer subFacetLimit = pickSubFacetLimit(doSubFacet);
@@ -538,10 +537,6 @@
           } catch (AssertionError|RuntimeException ae) {
             throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae);
           }
-
-
-
-          
         }
       }
     }
@@ -582,6 +577,137 @@
     }
   }
 
+  public void testRangeWithInterval() throws Exception {
+    for (boolean doSubFacet : Arrays.asList(false, true)) {
+      final Integer subFacetLimit = pickSubFacetLimit(doSubFacet);
+      final CharSequence subFacet = makeSubFacet(subFacetLimit);
+      for (boolean incUpper : Arrays.asList(false, true)) {
+        String incUpperStr = ",inclusive_to:"+incUpper;
+        final SolrQuery solrQuery = new SolrQuery
+            ("q", "*:*", "rows", "0", "json.facet",
+                "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:2"+ incUpperStr+ "}," +
+                    "{from:2, to:3"+ incUpperStr +"},{from:3, to:4"+ incUpperStr +"},{from:4, to:5"+ incUpperStr+"}]"
+                    + subFacet + " } }");
+
+        final QueryResponse rsp = cluster.getSolrClient().query(solrQuery);
+        try {
+          final NamedList<Object> foo = ((NamedList<NamedList<Object>>) rsp.getResponse().get("facets")).get("foo");
+          final List<NamedList<Object>> buckets = (List<NamedList<Object>>) foo.get("buckets");
+
+          assertEquals("num buckets", 4, buckets.size());
+          for (int i = 0; i < 4; i++) {
+            String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper? "]": ")");
+            ModelRange modelVals = incUpper? modelVals(i+1, i+2) : modelVals(i+1);
+            assertBucket("bucket#" + i, expectedVal, modelVals, subFacetLimit, buckets.get(i));
+          }
+        } catch (AssertionError | RuntimeException ae) {
+          throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae);
+        }
+      }
+    }
+  }
+
+  public void testRangeWithOldIntervalFormat() throws Exception {
+    for (boolean doSubFacet : Arrays.asList(false, true)) {
+      final Integer subFacetLimit = pickSubFacetLimit(doSubFacet);
+      final CharSequence subFacet = makeSubFacet(subFacetLimit);
+      for (boolean incUpper : Arrays.asList(false, true)) {
+        String incUpperStr = incUpper? "]\"":")\"";
+        final SolrQuery solrQuery = new SolrQuery
+            ("q", "*:*", "rows", "0", "json.facet",
+                "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{range:\"[1,2"+ incUpperStr+ "}," +
+                    "{range:\"[2,3"+ incUpperStr +"},{range:\"[3,4"+ incUpperStr +"},{range:\"[4,5"+ incUpperStr+"}]"
+                    + subFacet + " } }");
+
+        final QueryResponse rsp = cluster.getSolrClient().query(solrQuery);
+        try {
+          final NamedList<Object> foo = ((NamedList<NamedList<Object>>) rsp.getResponse().get("facets")).get("foo");
+          final List<NamedList<Object>> buckets = (List<NamedList<Object>>) foo.get("buckets");
+
+          assertEquals("num buckets", 4, buckets.size());
+          for (int i = 0; i < 4; i++) {
+            String expectedVal = "[" + (i + 1) + "," + (i + 2) + (incUpper? "]": ")");
+            ModelRange modelVals = incUpper? modelVals(i+1, i+2) : modelVals(i+1);
+            assertBucket("bucket#" + i, expectedVal, modelVals, subFacetLimit, buckets.get(i));
+          }
+        } catch (AssertionError | RuntimeException ae) {
+          throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae);
+        }
+      }
+    }
+  }
+
+  public void testIntervalWithMincount() throws Exception {
+    for (boolean doSubFacet : Arrays.asList(false, true)) {
+      final Integer subFacetLimit = pickSubFacetLimit(doSubFacet);
+      final CharSequence subFacet = makeSubFacet(subFacetLimit);
+
+      long mincount_to_use = -1;
+      Object expected_mincount_bucket_val = null;
+
+      // without mincount
+      SolrQuery solrQuery = new SolrQuery(
+          "q", "*:*", "rows", "0", "json.facet",
+          "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:3},{from:3, to:5}]" +
+              subFacet + " } }"
+      );
+
+      QueryResponse rsp = cluster.getSolrClient().query(solrQuery);
+      try {
+        final NamedList<Object> foo = ((NamedList<NamedList<Object>>)rsp.getResponse().get("facets")).get("foo");
+        final List<NamedList<Object>> buckets = (List<NamedList<Object>>) foo.get("buckets");
+
+        assertEquals("num buckets", 2, buckets.size());
+
+        // upper is not included
+        assertBucket("bucket#0", "[1,3)", modelVals(1,2), subFacetLimit, buckets.get(0));
+        assertBucket("bucket#1", "[3,5)", modelVals(3,4), subFacetLimit, buckets.get(1));
+
+        // if we've made it this far, then our buckets match the model
+        // now use our buckets to pick a mincount to use based on the MIN(+1) count seen
+        long count0 = ((Number)buckets.get(0).get("count")).longValue();
+        long count1 = ((Number)buckets.get(1).get("count")).longValue();
+
+        mincount_to_use = 1 + Math.min(count0, count1);
+        if (count0 > count1) {
+          expected_mincount_bucket_val = buckets.get(0).get("val");
+        } else if (count1 > count0) {
+          expected_mincount_bucket_val = buckets.get(1).get("val");
+        }
+
+      } catch (AssertionError|RuntimeException ae) {
+        throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae);
+      }
+
+      // with mincount
+      solrQuery = new SolrQuery(
+          "q", "*:*", "rows", "0", "json.facet",
+          "{ foo:{ type:range, field:" + INT_FIELD + " ranges:[{from:1, to:3},{from:3, to:5}]" +
+              ",mincount:" + mincount_to_use + subFacet + " } }"
+      );
+
+      rsp = cluster.getSolrClient().query(solrQuery);
+      try {
+        final NamedList<Object> foo = ((NamedList<NamedList<Object>>)rsp.getResponse().get("facets")).get("foo");
+        final List<NamedList<Object>> buckets = (List<NamedList<Object>>) foo.get("buckets");
+
+        if (null == expected_mincount_bucket_val) {
+          assertEquals("num buckets", 0, buckets.size());
+        } else {
+          assertEquals("num buckets", 1, buckets.size());
+          final Object actualBucket = buckets.get(0);
+          if (expected_mincount_bucket_val.equals("[1,3)")) {
+            assertBucket("bucket#0(0)", "[1,3)", modelVals(1,2), subFacetLimit, actualBucket);
+          } else {
+            assertBucket("bucket#0(1)", "[3,5)", modelVals(3,4), subFacetLimit, actualBucket);
+          }
+        }
+      } catch (AssertionError|RuntimeException ae) {
+        throw new AssertionError(solrQuery.toString() + " -> " + rsp.toString() + " ===> " + ae.getMessage(), ae);
+      }
+    }
+  }
+
   /**
    * Helper method for validating a single 'bucket' from a Range facet.
    *
@@ -592,7 +718,7 @@
    * @param actualBucket the actual bucket returned from a query for all assertions to be conducted against.
    */
   private static void assertBucket(final String label,
-                                   final Integer expectedVal,
+                                   final Object expectedVal,
                                    final ModelRange expectedRangeValues,
                                    final Integer subFacetLimitUsed,
                                    final Object actualBucket) {
@@ -614,7 +740,7 @@
         expectedCount += RANGE_MODEL[i];
         toMerge.add(TERM_MODEL[i]);
       }
-      
+
       assertEqualsHACK("count", expectedCount, bucket.get("count"));
       
       // merge the maps of our range values by summing the (int) values on key collisions
@@ -650,7 +776,7 @@
   }
   
   /**
-   * A convinience method for calling {@link #assertBucket} on the before/after/between buckets 
+   * A convenience method for calling {@link #assertBucket} on the before/after/between buckets
    * of a facet result, based on the {@link FacetRangeOther} specified for this facet.
    * 
    * @see #assertBucket
@@ -686,7 +812,7 @@
   private static final class ModelRange {
     public final int lower;
     public final int upper;
-    /** Don't use, use the convinience methods */
+    /** Don't use, use the convenience methods */
     public ModelRange(int lower, int upper) {
       if (lower < 0 || upper < 0) {
         assert(lower < 0 && upper < lower);
@@ -771,13 +897,13 @@
     String val = other.toString();
     if (random().nextBoolean()) {
       // two valid syntaxes to randomize between:
-      // - a JSON list of items (conviniently the default toString of EnumSet),
-      // - a single quoted string containing the comma seperated list
+      // - a JSON list of items (conveniently the default toString of EnumSet),
+      // - a single quoted string containing the comma separated list
       val = val.replaceAll("\\[|\\]","'");
 
       // HACK: work around SOLR-12539...
       //
-      // when sending a single string containing a comma seperated list of values, JSON Facets 'other'
+      // when sending a single string containing a comma separated list of values, JSON Facets 'other'
       // parsing can't handle any leading (or trailing?) whitespace
       val = val.replaceAll("\\s","");
     }
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
index 40dabea..461611c 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacetRefinement.java
@@ -211,7 +211,7 @@
         null,
         null
     );
-    
+
     // same test, but nested in a terms facet
     doTestRefine("{top:{type:terms, field:Afield, facet:{x : {type:terms, field:X, limit:2, refine:true} } } }",
         "{top: {buckets:[{val:'A', count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}], more:true} } ] } }",
@@ -290,7 +290,39 @@
                  // refinement...
                  null,
                  null);
-    
+
+    // same test, but nested in range facet with ranges
+    doTestRefine("{top:{type:range, field:R, ranges:[{from:0, to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }",
+        "{top: {buckets:[{val:\"[0,1)\", count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ] } }",
+        "{top: {buckets:[{val:\"[0,1)\", count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ] } }",
+        null,
+        "=={top: {" +
+            "_s:[  [\"[0,1)\" , {x:{_l:[x1]}} ]  ]" +
+            "    }  " +
+            "}"
+    );
+
+    doTestRefine("{top:{type:range, field:R, ranges:[{from:\"*\", to:1}], facet:{x : {type:terms, field:X, limit:2, refine:true} } } }",
+        "{top: {buckets:[{val:\"[*,1)\", count:2, x:{buckets:[{val:x1, count:5},{val:x2, count:3}],more:true} } ] } }",
+        "{top: {buckets:[{val:\"[*,1)\", count:1, x:{buckets:[{val:x2, count:4},{val:x3, count:2}],more:true} } ] } }",
+        null,
+        "=={top: {" +
+            "_s:[  [\"[*,1)\" , {x:{_l:[x1]}} ]  ]" +
+            "    }  " +
+            "}"
+    );
+
+    // a range facet w/o any sub facets shouldn't require any refinement
+    // other and include ignored for ranges
+    doTestRefine("{top:{type:range, other:all, field:R, ranges:[{from:0, to:2},{from:2, to:3}] } }" +
+            // phase #1
+            "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:2}]," +
+            "       } }",
+        "{top: {buckets:[{val:\"[0,2)\", count:2}, {val:\"[2,3)\", count:19}]," +
+            "       } }",
+        // refinement...
+        null,
+        null);
 
     // for testing partial _p, we need a partial facet within a partial facet
     doTestRefine("{top:{type:terms, field:Afield, refine:true, limit:1, facet:{x : {type:terms, field:X, limit:1, refine:true} } } }",
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index b70c8dd..b3586ee 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -295,7 +295,7 @@
                );
     }
   }
-  
+
   /**
    * whitebox sanity checks that a shard request range facet that returns "between" or "after"
    * will cause the correct "actual_end" to be returned
@@ -3208,6 +3208,256 @@
   }
 
   @Test
+  public void testRangeFacetWithRanges() throws Exception {
+    Client client = Client.localClient();
+    client.deleteByQuery("*:*", null);
+    indexSimple(client);
+
+    final SolrParams p = params("q", "*:*", "rows", "0");
+    // with lower and upper include
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i, ranges:[{range:\"  [-5,7] \"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}");
+
+    // with lower include and upper exclude
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{range:\"[-5,7)\"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,7)\",count:4}]}}");
+
+    // with lower exclude and upper include
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7]\"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}");
+
+    // with lower and upper exclude
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}");
+
+    // with other and include, they are not supported
+    // but wouldn't throw any error as they are not consumed
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{range:\"(-5,7)\"}],include:\"lower\",other:[\"after\"]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}");
+
+    // with mincount>0
+    client.testJQ(
+        params(p, "json.facet", "{price:{type : range,field : num_i,mincount:3," +
+            "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}"
+        ),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}");
+
+    // with multiple ranges
+    client.testJQ(
+        params(p, "json.facet", "{price:{type : range,field : num_i," +
+            "ranges:[{range:\"(-5,7)\"},{range:\"(-5,7]\"}]}}"
+        ),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2},{val:\"(-5,7]\",count:3}]}}");
+
+    // with * as one of the values
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{range:\"(*,10]\"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(*,10]\",count:5}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{range:\"[-5,*)\"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{range:\"[*,*]\"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[*,*]\",count:5}]}}");
+  }
+
+  @Test
+  public void testRangeFacetWithRangesInNewFormat() throws Exception {
+    Client client = Client.localClient();
+    client.deleteByQuery("*:*", null);
+    indexSimple(client);
+    SolrParams p = params("q", "*:*", "rows", "0");
+
+    //case without inclusive params
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,7)\",count:4}]}}");
+
+    //case without key param and to included
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}");
+
+    //case with all params
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:true ,inclusive_to:true}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,7]\",count:5}]}}");
+
+    // from and to excluded
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7)\",count:2}]}}");
+
+    // from excluded and to included
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}");
+
+    // multiple ranges
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,include:[\"lower\"], outer:\"before\"," +
+            "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3},{val:\"(-5,7)\",count:2}]}}");
+
+    // with mincount>0
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,mincount:3" +
+            "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{from:-5, to:7,inclusive_from:false ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3}]}}");
+
+    // mix of old and new formats
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i," +
+            "ranges:[{from:-5, to:7,inclusive_from:false ,inclusive_to:true},{range:\"(-5,7)\"}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,7]\",count:3},{val:\"(-5,7)\",count:2}]}}");
+
+    // from==to
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:true}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,-5]\",count:0}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:false ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(-5,-5)\",count:0}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,-5)\",count:0}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:-5,inclusive_from:true ,inclusive_to:true}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,-5]\",count:2}]}}");
+
+    // with * as one of the values
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:10,inclusive_from:false ,inclusive_to:true}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"(*,10]\",count:5}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5, to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:-5,inclusive_from:true ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[-5,*)\",count:5}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{from:\"*\", to:\"*\",inclusive_from:true ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[*,*)\",count:5}]}}");
+    client.testJQ(params(p, "json.facet"
+        , "{price:{type : range,field : num_i,ranges:[{inclusive_from:true ,inclusive_to:false}]}}"),
+        "facets=={count:6, price:{buckets:[{val:\"[*,*)\",count:5}]}}");
+  }
+
+  @Test
+  public void testRangeFacetsErrorCases() throws Exception {
+    Client client = Client.localClient();
+    client.deleteByQuery("*:*", null);
+    indexSimple(client);
+
+    SolrParams params = params("q", "*:*", "rows", "0");
+
+    // invalid format for ranges
+    SolrException ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i,start:-10,end:10,gap:2," +
+            "ranges:[{key:\"0-200\", to:200}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertEquals("Cannot set gap/start/end and ranges params together", ex.getMessage());
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:bleh}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertTrue(ex.getMessage().contains("Expected List for ranges but got String"));
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[bleh]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertTrue(ex.getMessage().contains("Expected Map for range but got String"));
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{from:0, to:200, inclusive_to:bleh}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertTrue(ex.getMessage().contains("Expected boolean type for param 'inclusive_to' but got String"));
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{from:0, to:200, inclusive_from:bleh}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertTrue(ex.getMessage().contains("Expected boolean type for param 'inclusive_from' but got String"));
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{from:bleh, to:200}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertEquals("Can't parse value bleh for field: num_i", ex.getMessage());
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{from:0, to:bleh}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertEquals("Can't parse value bleh for field: num_i", ex.getMessage());
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{from:200, to:0}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertEquals("'from' is higher than 'to' in range for key: [200,0)", ex.getMessage());
+
+    // with old format
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{range:\"\"}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertTrue(ex.getMessage().contains("empty facet range"));
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{range:\"bl\"}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertTrue(ex.getMessage().contains("Invalid start character b in facet range bl"));
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{range:\"(bl\"}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertTrue(ex.getMessage().contains("Invalid end character l in facet range (bl"));
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{range:\"(bleh,12)\"}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertEquals("Can't parse value bleh for field: num_i", ex.getMessage());
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{range:\"(12,bleh)\"}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertEquals("Can't parse value bleh for field: num_i", ex.getMessage());
+
+    ex = expectThrows(SolrException.class,
+        () -> h.query(req(params, "json.facet", "{price:{type :range, field : num_i," +
+            "ranges:[{range:\"(200,12)\"}]}}"))
+    );
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+    assertEquals("'start' is higher than 'end' in range for key: (200,12)", ex.getMessage());
+  }
+
+  @Test
   public void testOtherErrorCases() throws Exception {
     Client client = Client.localClient();
     client.deleteByQuery("*:*", null);
@@ -3273,6 +3523,39 @@
         "Expected boolean type for param 'perSeg' but got Long = 2 , path=facet/cat_s",
         req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,perSeg:2}}"),
         SolrException.ErrorCode.BAD_REQUEST);
+
+    assertQEx("Should fail as sort is invalid",
+        "Invalid sort option 'bleh' for field 'cat_s'",
+        req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh}}"),
+        SolrException.ErrorCode.BAD_REQUEST);
+
+    assertQEx("Should fail as sort order is invalid",
+        "Unknown Sort direction 'bleh'",
+        req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:{count: bleh}}}"),
+        SolrException.ErrorCode.BAD_REQUEST);
+
+    // test for prelim_sort
+    assertQEx("Should fail as prelim_sort is invalid",
+        "Invalid prelim_sort option 'bleh' for field 'cat_s'",
+        req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,prelim_sort:bleh}}"),
+        SolrException.ErrorCode.BAD_REQUEST);
+
+    assertQEx("Should fail as prelim_sort map is invalid",
+        "Invalid prelim_sort option '{bleh=desc}' for field 'cat_s'",
+        req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,prelim_sort:{bleh:desc}}}"),
+        SolrException.ErrorCode.BAD_REQUEST);
+
+    // with nested facet
+    assertQEx("Should fail as prelim_sort is invalid",
+        "Invalid sort option 'bleh' for field 'id'",
+        req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" +
+            "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:bleh}}}}"),
+        SolrException.ErrorCode.BAD_REQUEST);
+
+    assertQ("Should pass as sort is proper",
+        req("q", "*:*", "rows", "0", "json.facet", "{cat_s:{type:terms,field:cat_s,sort:bleh,facet:" +
+            "{bleh:\"unique(cat_s)\",id:{type:terms,field:id,sort:{bleh:desc},facet:{bleh:\"unique(id)\"}}}}}")
+    );
   }
 
 
diff --git a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java
index bcc936d..9f34db0 100644
--- a/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java
+++ b/solr/core/src/test/org/apache/solr/search/json/TestJsonRequest.java
@@ -19,12 +19,13 @@
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.JSONTestUtil;
 import org.apache.solr.SolrTestCaseHS;
-
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
+
 @LuceneTestCase.SuppressCodecs({"Lucene3x","Lucene40","Lucene41","Lucene42","Lucene45","Appending"})
 public class TestJsonRequest extends SolrTestCaseHS {
 
@@ -79,6 +80,15 @@
         , "response/numFound==2"
     );
 
+    // invalid value
+    SolrException ex = expectThrows(SolrException.class, () -> client.testJQ(params("q", "*:*", "json", "5")));
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, ex.code());
+
+    // this is to verify other json params are not affected
+    client.testJQ( params("q", "cat_s:A", "json.limit", "1"),
+        "response/numFound==2"
+    );
+
     // test multiple json params
     client.testJQ( params("json","{query:'cat_s:A'}", "json","{filter:'where_s:NY'}")
         , "response/numFound==1"
diff --git a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java
index e96fe29..9b848d1 100644
--- a/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java
+++ b/solr/core/src/test/org/apache/solr/search/stats/TestDefaultStatsCache.java
@@ -41,6 +41,7 @@
   @Test 
   public void test() throws Exception {
     del("*:*");
+    commit();
     String aDocId=null;
     for (int i = 0; i < clients.size(); i++) {
       int shard = i + 1;
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index 96c82ab..39c5e1c 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@ -138,9 +138,7 @@
 
       final SolrRequest genericReq;
       if (isUseV2Api) {
-        genericReq = new V2Request.Builder("/cluster/security/authentication")
-            .withPayload(command)
-            .withMethod(SolrRequest.METHOD.POST).build();
+        genericReq = new V2Request.Builder("/cluster/security/authentication").withMethod(SolrRequest.METHOD.POST).build();
       } else {
         genericReq = new GenericSolrRequest(SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams());
         ((GenericSolrRequest)genericReq).setContentWriter(new StringPayloadContentWriter(command, CommonParams.JSON_MIME));
diff --git a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java
index 3a44673..2ea3bf0 100644
--- a/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java
+++ b/solr/core/src/test/org/apache/solr/store/blockcache/BlockCacheTest.java
@@ -44,10 +44,10 @@
   public void testBlockCache() {
     int blocksInTest = 2000000;
     int blockSize = 1024;
-    
+
     int slabSize = blockSize * 4096;
     long totalMemory = 2 * slabSize;
-    
+
     BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize, blockSize);
     byte[] buffer = new byte[1024];
     Random random = random();
@@ -82,7 +82,7 @@
       long t3 = System.nanoTime();
       if (blockCache.fetch(blockCacheKey, buffer)) {
         fetchTime += (System.nanoTime() - t3);
-        assertTrue(Arrays.equals(testData, buffer));
+        assertTrue("buffer content differs", Arrays.equals(testData, buffer));
       }
     }
     System.out.println("Cache Hits    = " + hitsInCache.get());
@@ -101,7 +101,7 @@
   // always returns the same thing so we don't actually have to store the bytes redundantly to check them.
   private static byte getByte(long pos) {
     // knuth multiplicative hash method, then take top 8 bits
-    return (byte) ((((int)pos) * (int)(2654435761L)) >> 24);
+    return (byte) ((((int) pos) * (int) (2654435761L)) >> 24);
 
     // just the lower bits of the block number, to aid in debugging...
     // return (byte)(pos>>10);
@@ -117,17 +117,17 @@
     final long totalMemory = 2 * slabSize;  // 2 slabs of memory, so only half of what is needed for all blocks
 
     /***
-    final int blocksInTest = 16384;  // pick something bigger than 256, since that would lead to a slab size of 64 blocks and the bitset locks would consist of a single word.
-    final int blockSize = 1024;
-    final int slabSize = blocksInTest * blockSize / 4;
-    final long totalMemory = 2 * slabSize;  // 2 slabs of memory, so only half of what is needed for all blocks
-    ***/
+     final int blocksInTest = 16384;  // pick something bigger than 256, since that would lead to a slab size of 64 blocks and the bitset locks would consist of a single word.
+     final int blockSize = 1024;
+     final int slabSize = blocksInTest * blockSize / 4;
+     final long totalMemory = 2 * slabSize;  // 2 slabs of memory, so only half of what is needed for all blocks
+     ***/
 
-    final int nThreads=64;
-    final int nReads=1000000;
-    final int readsPerThread=nReads/nThreads;
-    final int readLastBlockOdds=10; // odds (1 in N) of the next block operation being on the same block as the previous operation... helps flush concurrency issues
-    final int showErrors=50; // show first 50 validation failures
+    final int nThreads = 64;
+    final int nReads = 1000000;
+    final int readsPerThread = nReads / nThreads;
+    final int readLastBlockOdds = 10; // odds (1 in N) of the next block operation being on the same block as the previous operation... helps flush concurrency issues
+    final int showErrors = 50; // show first 50 validation failures
 
     final BlockCache blockCache = new BlockCache(new Metrics(), true, totalMemory, slabSize, blockSize);
 
@@ -142,7 +142,7 @@
 
 
     Thread[] threads = new Thread[nThreads];
-    for (int i=0; i<threads.length; i++) {
+    for (int i = 0; i < threads.length; i++) {
       final int threadnum = i;
       final long seed = rnd.nextLong();
 
@@ -168,14 +168,15 @@
         }
 
         public void test(int iter) {
-          for (int i=0; i<iter; i++) {
+          for (int i = 0; i < iter; i++) {
             test();
           }
         }
 
         public void test() {
           long block = r.nextInt(blocksInTest);
-          if (r.nextInt(readLastBlockOdds) == 0) block = lastBlock.get();  // some percent of the time, try to read the last block another thread was just reading/writing
+          if (r.nextInt(readLastBlockOdds) == 0)
+            block = lastBlock.get();  // some percent of the time, try to read the last block another thread was just reading/writing
           lastBlock.set(block);
 
 
@@ -192,7 +193,8 @@
               long globalPos = globalOffset + i;
               if (buffer[i] != getByte(globalPos)) {
                 failed.set(true);
-                if (validateFails.incrementAndGet() <= showErrors) System.out.println("ERROR: read was " + "block=" + block + " blockOffset=" + blockOffset + " len=" + len + " globalPos=" + globalPos + " localReadOffset=" + i + " got=" + buffer[i] + " expected=" + getByte(globalPos));
+                if (validateFails.incrementAndGet() <= showErrors)
+                  System.out.println("ERROR: read was " + "block=" + block + " blockOffset=" + blockOffset + " len=" + len + " globalPos=" + globalPos + " localReadOffset=" + i + " got=" + buffer[i] + " expected=" + getByte(globalPos));
                 break;
               }
             }
@@ -229,7 +231,7 @@
     System.out.println("Cache Store Fails = " + storeFails.get());
     System.out.println("Blocks with Errors = " + validateFails.get());
 
-    assertFalse( failed.get() );
+    assertFalse("cached bytes differ from expected", failed.get());
   }
 
 
@@ -245,12 +247,12 @@
 
     // TODO: introduce more randomness in cache size, hit rate, etc
     final int blocksInTest = 400;
-    final int maxEntries = blocksInTest/2;
+    final int maxEntries = blocksInTest / 2;
 
-    final int nThreads=64;
-    final int nReads=1000000;
-    final int readsPerThread=nReads/nThreads;
-    final int readLastBlockOdds=10; // odds (1 in N) of the next block operation being on the same block as the previous operation... helps flush concurrency issues
+    final int nThreads = 64;
+    final int nReads = 1000000;
+    final int readsPerThread = nReads / nThreads;
+    final int readLastBlockOdds = 10; // odds (1 in N) of the next block operation being on the same block as the previous operation... helps flush concurrency issues
     final int updateAnywayOdds = 3; // sometimes insert a new entry for the key even if one was found
     final int invalidateOdds = 20; // sometimes invalidate an entry
 
@@ -258,17 +260,24 @@
     final AtomicLong removals = new AtomicLong();
     final AtomicLong inserts = new AtomicLong();
 
-    RemovalListener<Long,Val> listener = (k, v, removalCause) -> {
-      assert v.key == k;
+    RemovalListener<Long, Val> listener = (k, v, removalCause) -> {
+      removals.incrementAndGet();
+      if (v == null) {
+        if (removalCause != RemovalCause.COLLECTED) {
+          throw new RuntimeException("Null value for key " + k + ", removalCause=" + removalCause);
+        } else {
+          return;
+        }
+      }
+      assertEquals("cache key differs from value's key", (Long) k, (Long) v.key);
       if (!v.live.compareAndSet(true, false)) {
         throw new RuntimeException("listener called more than once! k=" + k + " v=" + v + " removalCause=" + removalCause);
         // return;  // use this variant if listeners may be called more than once
       }
-      removals.incrementAndGet();
     };
 
 
-    com.github.benmanes.caffeine.cache.Cache<Long,Val> cache = Caffeine.newBuilder()
+    com.github.benmanes.caffeine.cache.Cache<Long, Val> cache = Caffeine.newBuilder()
         .removalListener(listener)
         .maximumSize(maxEntries)
         .executor(Runnable::run)
@@ -279,11 +288,12 @@
     final AtomicLong maxObservedSize = new AtomicLong();
 
     Thread[] threads = new Thread[nThreads];
-    for (int i=0; i<threads.length; i++) {
+    for (int i = 0; i < threads.length; i++) {
       final long seed = rnd.nextLong();
 
       threads[i] = new Thread() {
         Random r;
+
         @Override
         public void run() {
           try {
@@ -296,13 +306,13 @@
         }
 
         public void test(int iter) {
-          for (int i=0; i<iter; i++) {
+          for (int i = 0; i < iter; i++) {
             test();
           }
         }
 
         boolean odds(int odds) {
-          return odds > 0 && r.nextInt(odds)==0;
+          return odds > 0 && r.nextInt(odds) == 0;
         }
 
         long getBlock() {
@@ -329,7 +339,7 @@
           Val v = cache.getIfPresent(k);
           if (v != null) {
             hits.incrementAndGet();
-            assert k.equals(v.key);
+            assertEquals("cache key differs from value's key", (Long) k, (Long) v.key);
           }
 
           if (v == null || odds(updateAnywayOdds)) {
@@ -358,13 +368,10 @@
 
     // Thread.sleep(1000); // need to wait if executor is used for listener?
     long cacheSize = cache.estimatedSize();
-    System.out.println("Done! # of Elements = " + cacheSize + " inserts=" + inserts.get() + " removals=" + removals.get() + " hits=" + hits.get() +  " maxObservedSize=" + maxObservedSize);
-    assert inserts.get() - removals.get() == cacheSize;
-    assertFalse( failed.get() );
+    System.out.println("Done! # of Elements = " + cacheSize + " inserts=" + inserts.get() + " removals=" + removals.get() + " hits=" + hits.get() + " maxObservedSize=" + maxObservedSize);
+    assertEquals("cache size different from (inserts - removal)", cacheSize,  inserts.get() - removals.get());
+    assertFalse(failed.get());
   }
 
 
-
-
-
 }
diff --git a/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java b/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java
new file mode 100644
index 0000000..05fd0e7
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/update/processor/AbstractAtomicUpdatesMultivalueTestBase.java
@@ -0,0 +1,428 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.update.processor;
+
+import static org.hamcrest.CoreMatchers.hasItems;
+import static org.hamcrest.CoreMatchers.not;
+
+import java.io.IOException;
+import java.time.ZonedDateTime;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.Date;
+import java.util.Optional;
+import java.util.UUID;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+import org.apache.curator.shaded.com.google.common.collect.Lists;
+import org.apache.solr.EmbeddedSolrServerTestBase;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer.RequestWriterSupplier;
+import org.apache.solr.common.util.ByteArrayUtf8CharSequence;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import com.google.common.collect.ImmutableMap;
+
+public abstract class AbstractAtomicUpdatesMultivalueTestBase extends EmbeddedSolrServerTestBase {
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    System.setProperty("enable.update.log","true");
+    initCore("solrconfig.xml", "schema.xml");
+  }
+
+  @Before
+  public void before() throws SolrServerException, IOException {
+    getSolrClient().deleteByQuery("*:*");
+  }
+
+  abstract RequestWriterSupplier getRequestWriterSupplier();
+
+  @Override
+  public synchronized EmbeddedSolrServer getSolrClient() {
+    return new EmbeddedSolrServer(h.getCoreContainer(), DEFAULT_CORE_NAME, getRequestWriterSupplier()) {
+
+      @Override
+      public void close() {
+        // do not close core container
+      }
+    };
+  }
+
+  private static void assertQR(final String fieldName, final String queryValue, final int numFound) {
+    assertQ(req("q", fieldName + ":" + queryValue, "indent", "true"), "//result[@numFound = '" + numFound + "']");
+  }
+
+  private void runTestForField(final String fieldName, final Object[] values, final String[] queries,
+      final Optional<Function<Object,Object>> valueConverter)
+      throws SolrServerException, IOException {
+
+    final Function<Object,Object> vc = valueConverter.orElse(o -> o);
+
+    getSolrClient().add(Arrays.asList(
+        sdoc("id", "20000", fieldName, Arrays.asList(values[0], values[1], values[2])),
+        sdoc("id", "20001", fieldName, Arrays.asList(values[1], values[2], values[3]))));
+    getSolrClient().commit(true, true);
+
+    if (queries != null) {
+      assertQR(fieldName, queries[0], 1);
+      assertQR(fieldName, queries[1], 2);
+      assertQR(fieldName, queries[2], 2);
+      assertQR(fieldName, queries[3], 1);
+    }
+
+    Collection<Object> fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(3, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[3]))));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(3, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[0]))));
+
+    getSolrClient().add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove",
+        Lists.newArrayList(values[0]))));
+    getSolrClient().commit(true, true);
+
+    if (queries != null) {
+      assertQR(fieldName, queries[0], 0);
+      assertQR(fieldName, queries[1], 2);
+      assertQR(fieldName, queries[2], 2);
+      assertQR(fieldName, queries[3], 1);
+    }
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(2, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[3]))));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(3, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[0]))));
+
+    getSolrClient().add(sdoc("id", "20001", fieldName, ImmutableMap.of("remove",
+        Lists.newArrayList(values[0], values[1], values[2]))));
+    getSolrClient().commit(true, true);
+
+    if (queries != null) {
+      assertQR(fieldName, queries[0], 0);
+      assertQR(fieldName, queries[1], 1);
+      assertQR(fieldName, queries[2], 1);
+      assertQR(fieldName, queries[3], 1);
+    }
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(2, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[1]), vc.apply(values[2])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[3]))));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(1, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[3])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]))));
+
+    getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("add",
+        Lists.newArrayList(values[0]), "remove", Lists.newArrayList(values[1], values[2]))),
+        sdoc("id", "20001", fieldName,
+            ImmutableMap.of("add", Lists.newArrayList(values[0]), "remove", Lists.newArrayList(values[3])))));
+    getSolrClient().commit(true, true);
+
+    if (queries != null) {
+      assertQR(fieldName, queries[0], 2);
+      assertQR(fieldName, queries[1], 0);
+      assertQR(fieldName, queries[2], 0);
+      assertQR(fieldName, queries[3], 0);
+    }
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(1, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[0])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(1, fieldValues.size());
+    assertThat(fieldValues, hasItems(vc.apply(values[0])));
+    assertThat(fieldValues, not(hasItems(vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3]))));
+
+    getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("set",
+        Lists.newArrayList(values[0], values[1], values[2], values[3]))), sdoc("id", "20001", fieldName,
+            ImmutableMap.of("set",
+                Lists.newArrayList(values[0], values[1], values[2], values[3])))));
+    getSolrClient().commit(true, true);
+
+    if (queries != null) {
+      assertQR(fieldName, queries[0], 2);
+      assertQR(fieldName, queries[1], 2);
+      assertQR(fieldName, queries[2], 2);
+      assertQR(fieldName, queries[3], 2);
+    }
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(4, fieldValues.size());
+    assertThat(fieldValues,
+        hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(4, fieldValues.size());
+    assertThat(fieldValues,
+        hasItems(vc.apply(values[0]), vc.apply(values[1]), vc.apply(values[2]), vc.apply(values[3])));
+  }
+
+  private String[] toStringArray(final Object[] values) {
+    return Arrays.stream(values).map(v -> v.toString()).collect(Collectors.toList()).toArray(new String[] {});
+  }
+
+  private void runTestForFieldWithQuery(final String fieldName, final Object[] values)
+      throws SolrServerException, IOException {
+    runTestForField(fieldName, values, toStringArray(values), Optional.empty());
+  }
+
+  private void runTestForFieldWithQuery(final String fieldName, final Object[] values, final String[] queries)
+      throws SolrServerException, IOException {
+    runTestForField(fieldName, values, queries, Optional.empty());
+  }
+
+  private void runTestForFieldWithQuery(final String fieldName, final Object[] values, final String[] queries,
+      final Function<Object,Object> valueConverter)
+      throws SolrServerException, IOException {
+    runTestForField(fieldName, values, queries, Optional.of(valueConverter));
+  }
+
+  private void runTestForFieldWithoutQuery(final String fieldName, final Object[] values)
+      throws SolrServerException, IOException {
+    runTestForField(fieldName, values, null, Optional.empty());
+  }
+
+  @Test
+  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13762")
+  public void testMultivalueBinaryField() throws SolrServerException, IOException {
+    runTestForFieldWithoutQuery("binaryRemove",
+        new byte[][] {new byte[] {0}, new byte[] {1}, new byte[] {2}, new byte[] {3}});
+  }
+
+  @Test
+  public void testMultivalueBooleanField() throws SolrServerException, IOException {
+
+    final String fieldName = "booleanRemove";
+
+    getSolrClient().add(Arrays.asList(
+        sdoc("id", "20000", fieldName, Lists.newArrayList(true, false)),
+        sdoc("id", "20001", fieldName, Lists.newArrayList(false, true))));
+    getSolrClient().commit(true, true);
+
+    assertQR(fieldName, "true", 2);
+    assertQR(fieldName, "false", 2);
+
+    Collection<Object> fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(2, fieldValues.size());
+    assertThat(fieldValues, hasItems(true, false));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(2, fieldValues.size());
+    assertThat(fieldValues, hasItems(true, false));
+
+    getSolrClient().add(sdoc("id", "20000", fieldName, ImmutableMap.of("remove",
+        Lists.newArrayList(false))));
+    getSolrClient().commit(true, true);
+
+    assertQR(fieldName, "true", 2);
+    assertQR(fieldName, "false", 1);
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(1, fieldValues.size());
+    assertThat(fieldValues, hasItems(true));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(2, fieldValues.size());
+    assertThat(fieldValues, hasItems(true, false));
+
+    getSolrClient().add(sdoc("id", "20001", fieldName, ImmutableMap.of("remove",
+        Lists.newArrayList(true, false))));
+    getSolrClient().commit(true, true);
+
+    assertQR(fieldName, "true", 1);
+    assertQR(fieldName, "false", 0);
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(1, fieldValues.size());
+    assertThat(fieldValues, hasItems(true));
+    assertThat(fieldValues, not(hasItems(false)));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertNull(fieldValues);
+
+    getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("add",
+        Lists.newArrayList(false, false)))));
+    getSolrClient().commit(true, true);
+
+    assertQR(fieldName, "true", 1);
+    assertQR(fieldName, "false", 1);
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(3, fieldValues.size());
+    assertThat(fieldValues, hasItems(true, false));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertNull(fieldValues);
+
+    getSolrClient().add(Arrays.asList(sdoc("id", "20000", fieldName, ImmutableMap.of("set",
+        Lists.newArrayList(true, false))), sdoc("id", "20001", fieldName,
+            ImmutableMap.of("set",
+                Lists.newArrayList(false, true)))));
+    getSolrClient().commit(true, true);
+
+    assertQR(fieldName, "true", 2);
+    assertQR(fieldName, "false", 2);
+
+    fieldValues = getSolrClient().getById("20000").getFieldValues(fieldName);
+    assertEquals(2, fieldValues.size());
+    assertThat(fieldValues, hasItems(true, false));
+    fieldValues = getSolrClient().getById("20001").getFieldValues(fieldName);
+    assertEquals(2, fieldValues.size());
+    assertThat(fieldValues, hasItems(true, false));
+  }
+
+  @Test
+  public void testMultivalueCollationField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("collationRemove", new String[] {"cf1", "cf2", "cf3", "cf4"});
+  }
+
+  @Test
+  public void testMultivalueDatePointField() throws SolrServerException, IOException {
+
+    final String s1 = "1980-01-01T00:00:00Z";
+    final Date d1 = Date.from(ZonedDateTime.parse(s1).toInstant());
+    final String s2 = "1990-01-01T00:00:00Z";
+    final Date d2 = Date.from(ZonedDateTime.parse(s2).toInstant());
+    final String s3 = "2000-01-01T00:00:00Z";
+    final Date d3 = Date.from(ZonedDateTime.parse(s3).toInstant());
+    final String s4 = "2010-01-01T00:00:00Z";
+    final Date d4 = Date.from(ZonedDateTime.parse(s4).toInstant());
+
+    runTestForFieldWithQuery("datePointRemove", new Date[] {d1, d2, d3, d4},
+        new String[] {"\"" + s1 + "\"", "\"" + s2 + "\"", "\"" + s3 + "\"", "\"" + s4 + "\""});
+  }
+
+  @Test
+  public void testMultivalueDateRangeField() throws SolrServerException, IOException {
+
+    final String s1 = "1980-01-01T00:00:00Z";
+    final String s2 = "1990-01-01T00:00:00Z";
+    final String s3 = "2000-01-01T00:00:00Z";
+    final String s4 = "2010-01-01T00:00:00Z";
+
+    runTestForFieldWithQuery("dateRangeRemove", new String[] {s1, s2, s3, s4},
+        new String[] {"\"" + s1 + "\"", "\"" + s2 + "\"", "\"" + s3 + "\"", "\"" + s4 + "\""});
+  }
+
+  @Test
+  public void testMultivalueDoublePointField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("doublePointRemove", new Double[] {1.0d, 2.0d, 3.0d, 4.0d});
+  }
+
+  @Test
+  public void testMultivalueEnumField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("enumRemove_sev_enum", new Object[] {"Low", "Medium", "High", "Critical"});
+  }
+
+  @Test
+  public void testMultivalueEnumFieldWithNumbers() throws SolrServerException, IOException {
+    final Object[] values = new Object[] {"Low", "Medium", "High", 11};
+    runTestForFieldWithQuery("enumRemove_sev_enum", values, toStringArray(values), o -> {
+      if (Integer.valueOf(11).equals(o)) {
+        return "Critical";
+      } else {
+        return o;
+      }
+    });
+  }
+
+  @Test
+  public void testMultivalueExternalFileField() throws SolrServerException, IOException {
+    runTestForFieldWithoutQuery("externalFileRemove",
+        new String[] {"file1.txt", "file2.txt", "file3.txt", "file4.txt"});
+  }
+
+  @Test
+  public void testMultivalueFloatPointField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("floatPointRemove", new Float[] {1.0f, 2.0f, 3.0f, 4.0f});
+  }
+
+  @Test
+  public void testMultivalueICUCollationField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("icuCollationRemove", new String[] {"iuccf1", "icucf2", "icucf3", "icucf4"});
+  }
+
+  @Test
+  public void testMultivalueIntPointField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("intPointRemove", new Integer[] {1, 2, 3, 4});
+  }
+
+  @Test
+  public void testMultivalueLatLonPointSpatialField() throws SolrServerException, IOException {
+    runTestForFieldWithoutQuery("latLonPointSpatialRemove",
+        new String[] {"1.0,-1.0", "2.0,-2.0", "3.0,-3.0", "4.0,-4.0"});
+  }
+
+  @Test
+  public void testMultivalueLatLonField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("latLonRemove", new String[] {"1.0,-1.0", "2.0,-2.0", "3.0,-3.0", "4.0,-4.0"});
+  }
+
+  @Test
+  public void testMultivalueLongPointField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("longPointRemove", new Long[] {1l, 2l, 3l, 4l});
+  }
+
+  @Test
+  public void testMultivaluePointField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("pointRemove", new String[] {"1,1", "2,2", "3,3", "4,4"});
+  }
+
+  @Test
+  public void testMultivalueRandomSortField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("randomSortRemove", new String[] {"rsf1", "rsf2", "rsf3", "rsf4"});
+  }
+
+  @Test
+  public void testMultivalueSpatialRecursivePrefixTreeFieldType() throws SolrServerException, IOException {
+    runTestForFieldWithoutQuery("spatialRecursivePrefixTreeRemove", new String[] {"1,1", "2,2", "3,3", "4,4"});
+  }
+
+  @Test
+  public void testMultivalueStringField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("stringRemove", new String[] {"str1", "str2", "str3", "str4"});
+  }
+
+  @Test
+  public void testMultivalueStringFieldUsingCharSequence() throws SolrServerException, IOException {
+    final ByteArrayUtf8CharSequence[] values = new ByteArrayUtf8CharSequence[] {new ByteArrayUtf8CharSequence("str1"),
+        new ByteArrayUtf8CharSequence("str2"),
+        new ByteArrayUtf8CharSequence("str3"), new ByteArrayUtf8CharSequence("str4")};
+    runTestForFieldWithQuery("stringRemove", values, toStringArray(values), o -> o.toString());
+  }
+
+  @Test
+  public void testMultivalueTextField() throws SolrServerException, IOException {
+    runTestForFieldWithQuery("textRemove", new String[] {"text1", "text2", "text3", "text4"});
+  }
+
+  @Test
+  public void testMultivalueUUIDField() throws SolrServerException, IOException {
+    final String[] values = new String[] {UUID.randomUUID().toString(), UUID.randomUUID().toString(),
+        UUID.randomUUID().toString(), UUID.randomUUID().toString()};
+    runTestForFieldWithQuery("uuidRemove", values);
+  }
+
+}
diff --git a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java
index 1240486..48c76b7 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/AtomicUpdatesTest.java
@@ -75,6 +75,7 @@
 
     assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -88,6 +89,7 @@
 
     assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "21");
@@ -142,6 +144,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -155,6 +158,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "1021");
@@ -210,6 +214,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -223,6 +228,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "1021");
@@ -274,6 +280,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -287,6 +294,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "1021");
@@ -339,6 +347,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']");
 
     doc = new SolrInputDocument();
     doc.setField("id", "1001");
@@ -351,6 +360,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "1021");
@@ -423,6 +433,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -436,6 +447,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:222", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "intRemove:333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "1021");
@@ -489,6 +501,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "intRemove:33333333", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -502,6 +515,7 @@
 
     assertQ(req("q", "intRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "intRemove:22222222", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "intRemove:33333333", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "1021");
@@ -559,6 +573,7 @@
       assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
     }
     assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']");
 
     doc = new SolrInputDocument();
     doc.setField("id", "10001");
@@ -672,6 +687,7 @@
 
     assertQ(req("q", "dateRemove:*", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "dateRemove:\"2014-09-02T12:00:00Z\"", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "dateRemove:\"2014-09-03T12:00:00Z\"", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "10021");
@@ -794,6 +810,7 @@
 
     assertQ(req("q", "floatRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "floatRemove:\"333.333\"", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -808,6 +825,7 @@
 
     assertQ(req("q", "floatRemove:[* TO *]", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "floatRemove:\"222.222\"", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "floatRemove:\"333.333\"", "indent", "true"), "//result[@numFound = '3']"); // remove only removed first occurrence
 
     doc = new SolrInputDocument();
     doc.setField("id", "10021");
@@ -832,7 +850,7 @@
     assertQ(req("q", "floatRemove:\"111.111\"", "indent", "true"), "//result[@numFound = '3']");
   }
 
- @Test
+  @Test
   public void testRemoveregex() throws Exception {
     SolrInputDocument doc;
 
@@ -862,6 +880,7 @@
 
     assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '3']");
+    assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '3']");
 
 
     doc = new SolrInputDocument();
@@ -875,6 +894,7 @@
 
     assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '4']");
     assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '2']");
+    assertQ(req("q", "cat:ccc", "indent", "true"), "//result[@numFound = '2']"); // removeregex does remove all occurrences
 
     doc = new SolrInputDocument();
     doc.setField("id", "21");
@@ -900,6 +920,43 @@
   }
 
   @Test
+  public void testRemoveregexMustMatchWholeValue() throws Exception {
+    SolrInputDocument doc;
+
+    doc = new SolrInputDocument();
+    doc.setField("id", "1");
+    doc.setField("cat", new String[]{"aaa", "bbb", "ccc", "ccc", "ddd"});
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']");
+
+
+    doc = new SolrInputDocument();
+    doc.setField("id", "1");
+    List<String> removeList = new ArrayList<>();
+    removeList.add("bb");
+    doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // Was not removed - regex didn't match whole value
+
+    doc = new SolrInputDocument();
+    doc.setField("id", "1");
+    removeList = new ArrayList<>();
+    removeList.add("bbb");
+    doc.setField("cat", ImmutableMap.of("removeregex", removeList)); //behavior when hitting Solr through ZK
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); // Was removed now - regex matches
+  }
+
+  @Test
   public void testAdd() throws Exception {
     SolrInputDocument doc = new SolrInputDocument();
     doc.setField("id", "3");
@@ -976,6 +1033,55 @@
   }
 
   @Test
+  public void testAddMultiple() throws Exception {
+    SolrInputDocument doc = new SolrInputDocument();
+    doc.setField("id", "3");
+    doc.setField("cat", new String[]{"aaa", "ccc"});
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']");
+
+
+    doc = new SolrInputDocument();
+    doc.setField("id", "3");
+    doc.setField("cat", ImmutableMap.of("add", "bbb"));
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']");
+
+    doc = new SolrInputDocument();
+    doc.setField("id", "3");
+    doc.setField("cat", ImmutableMap.of("add", "bbb"));
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // Should now have 2 occurrences of bbb
+
+    doc = new SolrInputDocument();
+    doc.setField("id", "3");
+    doc.setField("cat", ImmutableMap.of("remove", "bbb"));
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '1']"); // remove only removed first occurrence
+
+    doc = new SolrInputDocument();
+    doc.setField("id", "3");
+    doc.setField("cat", ImmutableMap.of("remove", "bbb"));
+    assertU(adoc(doc));
+    assertU(commit());
+
+    assertQ(req("q", "cat:*", "indent", "true"), "//result[@numFound = '1']");
+    assertQ(req("q", "cat:bbb", "indent", "true"), "//result[@numFound = '0']"); // remove now removed last occurrence
+  }
+
+  @Test
   public void testSet() throws Exception {
     SolrInputDocument doc;
 
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java
similarity index 64%
copy from solr/core/src/test-files/runtimecode/MyDocCache.java
copy to solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java
index 406b950..5f9889e 100644
--- a/solr/core/src/test-files/runtimecode/MyDocCache.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/JavaBinAtomicUpdateMultivalueTest.java
@@ -14,22 +14,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.update.processor;
 
-package runtimecode;
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer.RequestWriterSupplier;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.StoredField;
-import org.apache.solr.search.LRUCache;
+public class JavaBinAtomicUpdateMultivalueTest extends AbstractAtomicUpdatesMultivalueTestBase {
 
-public  class MyDocCache<K,V> extends LRUCache<K,V> {
-
-  static String fld_name= "my_synthetic_fld_s";
   @Override
-  public V put(K key, V value) {
-    if(value instanceof Document){
-      Document d = (Document) value;
-      d.add(new StoredField(fld_name, "version_2"));
-    }
-    return super.put(key, value);
+  RequestWriterSupplier getRequestWriterSupplier() {
+    return RequestWriterSupplier.JavaBin;
   }
+
 }
diff --git a/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java b/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java
index 6cee3d9..889b0bf 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java
@@ -31,7 +31,7 @@
     List<String>  names = new ArrayList<>();
     for (UpdateRequestProcessorFactory p : processorChain.getProcessors()) {
       if (p instanceof UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) {
-        p = ((UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) p).getDelegate();
+        p = ((UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) p).delegate;
       }
       names.add(p.getClass().getSimpleName());
     }
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java
similarity index 64%
copy from solr/core/src/test-files/runtimecode/MyDocCache.java
copy to solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java
index 406b950..1a5f62b 100644
--- a/solr/core/src/test-files/runtimecode/MyDocCache.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/XMLAtomicUpdateMultivalueTest.java
@@ -14,22 +14,15 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+package org.apache.solr.update.processor;
 
-package runtimecode;
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer.RequestWriterSupplier;
 
-import org.apache.lucene.document.Document;
-import org.apache.lucene.document.StoredField;
-import org.apache.solr.search.LRUCache;
+public class XMLAtomicUpdateMultivalueTest extends AbstractAtomicUpdatesMultivalueTestBase {
 
-public  class MyDocCache<K,V> extends LRUCache<K,V> {
-
-  static String fld_name= "my_synthetic_fld_s";
   @Override
-  public V put(K key, V value) {
-    if(value instanceof Document){
-      Document d = (Document) value;
-      d.add(new StoredField(fld_name, "version_2"));
-    }
-    return super.put(key, value);
+  RequestWriterSupplier getRequestWriterSupplier() {
+    return RequestWriterSupplier.XML;
   }
+
 }
diff --git a/solr/core/src/test/org/apache/solr/util/TestExportTool.java b/solr/core/src/test/org/apache/solr/util/TestExportTool.java
index fdfb3c0..9e637f9 100644
--- a/solr/core/src/test/org/apache/solr/util/TestExportTool.java
+++ b/solr/core/src/test/org/apache/solr/util/TestExportTool.java
@@ -36,7 +36,6 @@
 import org.apache.solr.client.solrj.request.JavaBinUpdateRequestCodec;
 import org.apache.solr.client.solrj.request.UpdateRequest;
 import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.cloud.MiniSolrCloudCluster;
 import org.apache.solr.cloud.SolrCloudTestCase;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Replica;
@@ -49,7 +48,7 @@
 
   public void testBasic() throws Exception {
     String COLLECTION_NAME = "globalLoaderColl";
-    MiniSolrCloudCluster cluster = configureCluster(4)
+    configureCluster(4)
         .addConfig("conf", configset("cloud-minimal"))
         .configure();
 
@@ -122,7 +121,7 @@
   @Nightly
   public void testVeryLargeCluster() throws Exception {
     String COLLECTION_NAME = "veryLargeColl";
-    MiniSolrCloudCluster cluster = configureCluster(4)
+    configureCluster(4)
         .addConfig("conf", configset("cloud-minimal"))
         .configure();
 
diff --git a/solr/licenses/caffeine-2.4.0.jar.sha1 b/solr/licenses/caffeine-2.4.0.jar.sha1
deleted file mode 100644
index 9c317d9..0000000
--- a/solr/licenses/caffeine-2.4.0.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5aa8bbb851b1ad403cc140094ba4a25998369efe
diff --git a/solr/licenses/caffeine-2.8.0.jar.sha1 b/solr/licenses/caffeine-2.8.0.jar.sha1
new file mode 100644
index 0000000..ce291c4
--- /dev/null
+++ b/solr/licenses/caffeine-2.8.0.jar.sha1
@@ -0,0 +1 @@
+6000774d7f8412ced005a704188ced78beeed2bb
diff --git a/solr/licenses/commons-beanutils-1.9.3.jar.sha1 b/solr/licenses/commons-beanutils-1.9.3.jar.sha1
deleted file mode 100644
index da389e5..0000000
--- a/solr/licenses/commons-beanutils-1.9.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-c845703de334ddc6b4b3cd26835458cb1cba1f3d
diff --git a/solr/licenses/commons-beanutils-LICENSE-ASL.txt b/solr/licenses/commons-beanutils-LICENSE-ASL.txt
deleted file mode 100644
index d645695..0000000
--- a/solr/licenses/commons-beanutils-LICENSE-ASL.txt
+++ /dev/null
@@ -1,202 +0,0 @@
-
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/solr/licenses/commons-beanutils-NOTICE.txt b/solr/licenses/commons-beanutils-NOTICE.txt
deleted file mode 100644
index c6c8ce9..0000000
--- a/solr/licenses/commons-beanutils-NOTICE.txt
+++ /dev/null
@@ -1,5 +0,0 @@
-Apache Commons BeanUtils
-Copyright 2000-2018 The Apache Software Foundation
-
-This product includes software developed at
-The Apache Software Foundation (http://www.apache.org/).
diff --git a/solr/server/etc/jetty-ssl.xml b/solr/server/etc/jetty-ssl.xml
index 9ff5acc..3670641 100644
--- a/solr/server/etc/jetty-ssl.xml
+++ b/solr/server/etc/jetty-ssl.xml
@@ -17,6 +17,7 @@
   <Set name="TrustStorePassword"><Ref refid="trustStorePassword"/></Set>
   <Set name="NeedClientAuth"><Property name="solr.jetty.ssl.needClientAuth" default="false"/></Set>
   <Set name="WantClientAuth"><Property name="solr.jetty.ssl.wantClientAuth" default="false"/></Set>
+  <Set name="EndpointIdentificationAlgorithm"><Property name="solr.jetty.ssl.verifyClientHostName"/></Set>
   <Set name="KeyStoreType"><Property name="solr.jetty.keystore.type" default="JKS"/></Set>
   <Set name="TrustStoreType"><Property name="solr.jetty.truststore.type" default="JKS"/></Set>
 
diff --git a/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc b/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc
index 5738c6f..2f5ed58 100644
--- a/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc
+++ b/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc
@@ -128,11 +128,11 @@
 ----
  curl -o runtimelibs.jar   -LO https://github.com/apache/lucene-solr/blob/master/solr/core/src/test-files/runtimecode/runtimelibs.jar.bin?raw=true
 ----
-Step 2: Get the `sha256` hash of the jar
+Step 2: Get the `sha512` hash of the jar
 
 [source,bash]
 ----
- openssl dgst -sha256 runtimelibs.jar
+ openssl dgst -sha512 runtimelibs.jar
 ----
 
 Step 3 :  Start solr with runtime lib enabled
@@ -154,9 +154,9 @@
 [source,bash]
 ----
  curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{
-    "add-package": { "name" : "my-pkg",
+    "add-runtimelib": { "name" : "testjar",
     "url":"http://localhost:8000/runtimelibs.jar" ,
-    "sha256" : "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"}
+    "sha512" : "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"}
     }'
 ----
 
@@ -166,7 +166,7 @@
 ----
 curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{
     "create-requesthandler": { "name" : "/test",
-    "class": "org.apache.solr.core.RuntimeLibReqHandler", "package" : "my-pkg" }
+    'class': 'org.apache.solr.core.RuntimeLibReqHandler', 'runtimeLib' : true}
     }'
 ----
 
@@ -198,15 +198,15 @@
 Example:
 
 * Host the new jar to a new url. eg:  http://localhost:8000/runtimelibs_v2.jar
-* get the `sha256` hash of the new jar
+* get the `sha512` hash of the new jar
 * run the update-runtime lib command
 
 [source,bash]
 ----
  curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{
-    "update-package": { "name" : "my-pkg",
+    "update-runtimelib": { "name" : "testjar",
     "url":"http://localhost:8000/runtimelibs_v2.jar" ,
-    "sha256" : "<replace-the-new-sha256-digest-here>"}
+    "sha512" : "<replace-the-new-sha512-digest-here>"}
     }'
 ----
 NOTE: Always upload your jar to a new url as the Solr cluster is still referring to the old jar. If the existing jar is modified it can cause errors as the hash may not match
diff --git a/solr/solr-ref-guide/src/enabling-ssl.adoc b/solr/solr-ref-guide/src/enabling-ssl.adoc
index 5edff5c..2d9e69c 100644
--- a/solr/solr-ref-guide/src/enabling-ssl.adoc
+++ b/solr/solr-ref-guide/src/enabling-ssl.adoc
@@ -90,6 +90,8 @@
 SOLR_SSL_NEED_CLIENT_AUTH=false
 # Enable clients to authenticate (but not require)
 SOLR_SSL_WANT_CLIENT_AUTH=false
+# Verify client's hostname during SSL handshake
+SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false
 # SSL Certificates contain host/ip "peer name" information that is validated by default. Setting
 # this to false can be useful to disable these checks when re-using a certificate on many hosts
 SOLR_SSL_CHECK_PEER_NAME=true
@@ -101,7 +103,7 @@
 When you start Solr, the `bin/solr` script includes the settings in `bin/solr.in.sh` and will pass these SSL-related system properties to the JVM.
 
 .Client Authentication Settings
-WARNING: Enable either SOLR_SSL_NEED_CLIENT_AUTH or SOLR_SSL_WANT_CLIENT_AUTH but not both at the same time. They are mutually exclusive and Jetty will select one of them which may not be what you expect.
+WARNING: Enable either SOLR_SSL_NEED_CLIENT_AUTH or SOLR_SSL_WANT_CLIENT_AUTH but not both at the same time. They are mutually exclusive and Jetty will select one of them which may not be what you expect. SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION should be set to true if you only want requests from authenticated host-names to be accepted.
 
 Similarly, when you start Solr on Windows, the `bin\solr.cmd` script includes the settings in `bin\solr.in.cmd` - uncomment and update the set of properties beginning with `SOLR_SSL_*` to pass these SSL-related system properties to the JVM:
 
@@ -121,6 +123,8 @@
 set SOLR_SSL_NEED_CLIENT_AUTH=false
 REM Enable clients to authenticate (but not require)
 set SOLR_SSL_WANT_CLIENT_AUTH=false
+REM Verify client hostname during SSL handshake
+set SOLR_SSL_CLIENT_HOSTNAME_VERIFICATION=false
 REM SSL Certificates contain host/ip "peer name" information that is validated by default. Setting
 REM this to false can be useful to disable these checks when re-using a certificate on many hosts
 set SOLR_SSL_CHECK_PEER_NAME=true
diff --git a/solr/solr-ref-guide/src/json-facet-api.adoc b/solr/solr-ref-guide/src/json-facet-api.adoc
index bb07c2e..ae4358d 100644
--- a/solr/solr-ref-guide/src/json-facet-api.adoc
+++ b/solr/solr-ref-guide/src/json-facet-api.adoc
@@ -407,8 +407,98 @@
 * "all" shorthand for lower, upper, edge, outer
 
 |facet |Aggregations, metrics, or nested facets that will be calculated for every returned bucket
+|ranges a|List of arbitrary range when specified calculates facet on given ranges rather than `start`, `gap` and `end`. With `start`, `end` and `gap` the width of the range or bucket is always fixed. If range faceting needs to computed on varying range width then, `ranges` should be specified.
+
+* Specifying `start`, `end` or `gap` along with `ranges` is disallowed and request would fail.
+* When `ranges` are specified in the range facet, `hardend`, `include` and `other` parameters are ignored.
+
+Refer <<Arbitrary Range>>
 |===
 
+==== Arbitrary Range
+
+An arbitrary range consists of from and to values over which range bucket is computed. This range can be specified in two syntax.
+
+[width="100%",cols="10%,90%",options="header",]
+|===
+|Parameter |Description
+|from |The lower bound of the range. When not specified defaults to `*`.
+|to |The upper bound of the range. When not specified defaults to `*`.
+|inclusive_from |A boolean, which if true means that include the lower bound `from`. This defaults to `true`.
+|inclusive_to |A boolean, which if true means that include the upper bound `to`. This default to `false`.
+|range a|The range is specified as string. This is semantically similar to `facet.interval`
+
+* When `range` is specified then, all the above parameters `from`, `to` and etc in the range are ignored
+* `range` always start with `(` or `[` and ends with `)` or `]`
+** `(` - exclude lower bound
+** `[` - include lower bound
+** `)` - exclude upper bound
+** `]` - include upper bound
+
+For example, For range `(5,10]` 5 is excluded and 10 is included
+|===
+
+===== other with ranges
+
+`other` parameter is ignored when `ranges` is specified but there are ways to achieve same behavior with `ranges`.
+
+* `before` - This is equivalent to `[*,some_val)` or just specifying `to` value
+* `after` - This is equivalent to `(som_val, *]` or just specifying `from` value
+* `between` - This is equivalent to specifying `start`, `end` as `from` and `to` respectively
+
+===== include with ranges
+
+`include` parameter is ignored when `ranges` is specified but there are ways to achieve same behavior with `ranges`. `lower`, `upper`, `outer`, `edge` all can be achieved using combination of `inclusive_to` and `inclusive_from`.
+
+Range facet with `ranges`
+
+[source,bash]
+----
+curl http://localhost:8983/solr/techproducts/query -d '
+{
+  "query": "*:*",
+  "facet": {
+    "prices": {
+      "type": "range",
+      "field": "price",
+      "ranges": [
+        {
+          "from": 0,
+          "to": 20,
+          "inclusive_from": true,
+          "inclusive_to": false
+        },
+        {
+          "range": "[40,100)"
+        }
+      ]
+    }
+  }
+}'
+----
+
+The output from the range facet above would look a bit like:
+
+[source,json]
+----
+{
+  "prices": {
+    "buckets": [
+      {
+        "val": "[0,20)",
+        "count": 5
+      },
+      {
+        "val": "[40,100)",
+        "count": 2
+      }
+    ]
+  }
+}
+----
+
+NOTE: When `range` is specified, its value in the request is used as key in the response. In the other case, key is generated using `from`, `to`, `inclusive_to` and `inclusive_from`. Currently, custom `key` is not supported.
+
 === Heatmap Facet
 
 The `heatmap` facet generates a 2D grid of facet counts for documents having spatial data in each grid cell.
diff --git a/solr/solr-ref-guide/src/ping.adoc b/solr/solr-ref-guide/src/ping.adoc
index c1de95c..ed4e7ce 100644
--- a/solr/solr-ref-guide/src/ping.adoc
+++ b/solr/solr-ref-guide/src/ping.adoc
@@ -69,7 +69,7 @@
 
 Both API calls have the same output. A status=OK indicates that the nodes are responding.
 
-*SolrJ Example*
+*SolrJ Example with SolrPing*
 
 [source,java]
 ----
@@ -78,3 +78,12 @@
 rsp = ping.process(solrClient, collectionName);
 int status = rsp.getStatus();
 ----
+
+*SolrJ Example with SolrClient*
+
+[source,java]
+----
+SolrClient client = new HttpSolrClient.Builder(solrUrl).build();
+SolrPingResponse pingResponse = client.ping(collectionName);
+int status = pingResponse.getStatus();
+----
diff --git a/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc b/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc
index 3729e53..4b44cd8 100644
--- a/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc
+++ b/solr/solr-ref-guide/src/query-settings-in-solrconfig.adoc
@@ -33,22 +33,27 @@
 
 When a new searcher is opened, the current searcher continues servicing requests while the new one auto-warms its cache. The new searcher uses the current searcher's cache to pre-populate its own. When the new searcher is ready, it is registered as the current searcher and begins handling all new search requests. The old searcher will be closed once it has finished servicing all its requests.
 
-In Solr, there are three cache implementations: `solr.search.LRUCache`, `solr.search.FastLRUCache,` and `solr.search.LFUCache`.
+=== Cache implementations
+In Solr, the following cache implementations are available: recommended `solr.search.CaffeineCache`, and legacy implementations: `solr.search.LRUCache`, `solr.search.FastLRUCache,` and `solr.search.LFUCache`.
+
+The `CaffeineCache` is an implementation backed by the https://github.com/ben-manes/caffeine[Caffeine caching library]. By default it uses a Window TinyLFU (W-TinyLFU) eviction policy, which allows the eviction based on both frequency and recency of use in O(1) time with a small footprint. Generally this cache implementation is recommended over other legacy caches as it usually offers lower memory footprint, higher hit ratio and better multi-threaded performance over legacy caches.
 
 The acronym LRU stands for Least Recently Used. When an LRU cache fills up, the entry with the oldest last-accessed timestamp is evicted to make room for the new entry. The net effect is that entries that are accessed frequently tend to stay in the cache, while those that are not accessed frequently tend to drop out and will be re-fetched from the index if needed again.
 
 The `FastLRUCache`, which was introduced in Solr 1.4, is designed to be lock-free, so it is well suited for caches which are hit several times in a request.
 
-Both `LRUCache` and `FastLRUCache` use an auto-warm count that supports both integers and percentages which get evaluated relative to the current size of the cache when warming happens.
+`CaffeineCache`, `LRUCache` and `FastLRUCache` use an auto-warm count that supports both integers and percentages which get evaluated relative to the current size of the cache when warming happens.
 
 The `LFUCache` refers to the Least Frequently Used cache. This works in a way similar to the LRU cache, except that when the cache fills up, the entry that has been used the least is evicted.
 
 The Statistics page in the Solr Admin UI will display information about the performance of all the active caches. This information can help you fine-tune the sizes of the various caches appropriately for your particular application. When a Searcher terminates, a summary of its cache usage is also written to the log.
 
-Each cache has settings to define its initial size (`initialSize`), maximum size (`size`) and number of items to use for during warming (`autowarmCount`). The LRU and FastLRU cache implementations can take a percentage instead of an absolute value for `autowarmCount`.
+Each cache has settings to define its initial size (`initialSize`), maximum size (`size`) and number of items to use for during warming (`autowarmCount`). The Caffeine, LRU and FastLRU cache implementations can take a percentage instead of an absolute value for `autowarmCount`.
 
 Each cache implementation also supports a `maxIdleTime` attribute that controls the automatic eviction of entries that haven't been used for a while. This attribute is expressed in seconds, with the default value of `0` meaning no entries are automatically evicted due to exceeded idle time. Smaller values of this attribute will cause older entries to be evicted quickly, which will reduce cache memory usage but may instead cause thrashing due to a repeating eviction-lookup-miss-insertion cycle of the same entries. Larger values will cause entries to stay around longer, waiting to be reused, at the cost of increased memory usage. Reasonable values, depending on the query volume and patterns, may lie somewhere between 60-3600. Please note that this condition is evaluated synchronously and before other eviction conditions on every entry insertion.
 
+`CaffeineCache`, `LRUCache` and `FastLRUCache` support a `maxRamMB` attribute that limits the maximum amount of memory a cache may consume. When both `size` and `maxRamMB` limits are specified the behavior will differ among implementations: in `CaffeineCache` the `maxRamMB` limit will take precedence and the `size` limit will be ignored, while in `LRUCache` and `FastLRUCache` both limits will be observed, with entries being evicted whenever any of the limits is reached.
+
 `FastLRUCache` and `LFUCache` support `showItems` attribute. This is the number of cache items to display in the stats page for the cache. It is for debugging.
 
 Details of each cache are described below.
diff --git a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc
index f237542..11b4c75 100644
--- a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc
+++ b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc
@@ -16,65 +16,154 @@
 // specific language governing permissions and limitations
 // under the License.
 
-Solr allows configuring roles to control user access to the system.
-
-This is accomplished through rule-based permission definitions which are assigned to users. The roles are fully customizable, and provide the ability to limit access to specific collections, request handlers, request parameters, and request methods.
-
-The roles can be used with any of the authentication plugins or with a custom authentication plugin if you have created one. You will only need to ensure that you configure the role-to-user mappings with the proper user IDs that your authentication system provides.
-
-Once defined through the API, roles are stored in `security.json`.
+Solr's authentication plugins control whether users can access Solr in a binary fashion.  A user is either authenticated, or they aren't.  For more fine-grained access control, Solr's Rule-Based Authorization Plugin (hereafter, "RBAP") can be used.
 
 [CAUTION]
 ====
 Solr's Admin UI interacts with Solr using its regular APIs. When rule-based authorization is in use, logged-in users not authorized to access the full range of these APIs may see some sections of the UI that appear blank or "broken". For best results, the Admin UI should only be accessed by users with full API access.
 ====
 
-== Enable the Authorization Plugin
+== Rule-Based Auth Concepts
 
-The plugin must be enabled in `security.json`. This file and where to put it in your system is described in detail in the section <<authentication-and-authorization-plugins.adoc#enable-plugins-with-security-json,Enable Plugins with security.json>>.
+"Users", "roles" and "permissions" play a central role in configuring authorization correctly.
 
-This file has two parts, the `authentication` part and the `authorization` part. The `authentication` part stores information about the class being used for authentication.
+In Rule-Based Authorization, administrators define a series of roles based on the permissions they want those roles to confer.  Users are then assigned one or more roles.
 
-The `authorization` part is not related to Basic authentication, but is a separate authorization plugin designed to support fine-grained user access control. When creating `security.json` you can add the permissions to the file, or you can use the Authorization API described below to add them as needed.
+=== Users
 
-This example `security.json` shows how the <<basic-authentication-plugin.adoc#basic-authentication-plugin,Basic authentication plugin>> can work with this authorization plugin:
+The users that RBAP sees come from whatever authentication plugin has been configured.  RBAP is compatible with all of the authentication plugins that Solr ships with out of the box.  It is also compatible with any custom authentication plugins users might write, provided that the plugin sets a user principal on the HttpServletRequest it receives.  The user value seen by RBAP in each case depends on the authentication plugin being used: the Kerberos principal if the <<kerberos-authentication-plugin.adoc#kerberos-authentication-plugin,Kerberos Authentication Plugin>> is being used, the "sub" JWT claim if the <<jwt-authentication-plugin.adoc#jwt-authentication-plugin,JWT Authentication Plugin>> is being used, etc.
+
+=== Roles
+
+Roles help bridge the gap between users and permissions. Users are assigned one or more roles, and permissions are then given to each of these roles in `security.json`
+
+=== Permissions
+
+Permissions control which roles (and consequently, which users) have access to particular chunks of Solr's API.  Each permission has two main components: a description of the APIs this permission applies to, and a list of the roles that should be allowed to access to this set of APIs.
+
+Administrators can use permissions from a list of predefined options or define their own custom permissions, are are free to mix and match both.
+
+== Configuring the Rule-Based Authorization Plugin
+
+Like all of Solr's security plugins, configuration for RBAP lives in a file or ZooKeeper node with the name `security.json`.  See <<authentication-and-authorization-plugins.adoc#enable-plugins-with-security-json,here>> for more information on how to setup `security.json` in your cluster.
+
+Solr offers an <<Authorization API>> for making changes to RBAP configuration.  Authorized administrators should use this to make changes under most circumstances.  Users may also make edits to `security.json` directly if it is stored in ZooKeeper, but this is an expert-level feature and is discouraged in most circumstances.  The API simplifies some aspects of configuration, and provides error feedback that isn't provided when editing ZooKeeper directly.
+
+=== Configuration Syntax
+
+RBAP configuration consists of a small number of required configuration properties.  Each of these lives under the `authorization` top level property in `security.json`
+
+class:: The authorization plugin to use.  For RBAP, this value will always be `solr.RuleBasedAuthorizationPlugin`
+user-role:: A mapping of individual users to the roles they belong to.  The value of this property is a JSON map, where each property name is a user, and each property value is either the name of a single role or a JSON array of multiple roles that the specified user belongs to.  For example:
++
+[source,json]
+----
+"user-role": {
+  "user1": "role1",
+  "user2": ["role1", "role2"]
+}
+----
+permissions:: A JSON array of permission rules used to restrict access to sections of Solr's API.  For example:
++
+[source,json]
+----
+"permissions": [
+  { "name": "read", "collection": "techproducts", "role": ["admin", "dev"] },
+  { "name": "all", "role": "admin"}
+]
+----
++
+The syntax for individual permissions is more involved and is treated in greater detail <<Permissions,below>>.
+
+=== Complete Example
+
+The example below shows how the configuration properties above can be used to achieve a typical (if simple) RBAP use-case.
 
 [source,json]
 ----
 {
-"authentication":{
-   "class":"solr.BasicAuthPlugin", <1>
-   "blockUnknown": true, <2>
-   "credentials":{"solr":"IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="} <3>
-},
-"authorization":{
-   "class":"solr.RuleBasedAuthorizationPlugin", <4>
-   "permissions":[{"name":"security-edit",
-      "role":"admin"}], <5>
-   "user-role":{"solr":"admin"} <6>
-}}
+  "authentication": {
+    "class": "solr.BasicAuthPlugin", <1>
+    "blockUnknown": true,
+    "credentials": {
+      "admin-user": "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c=",
+      "dev-user": "IV0EHq1OnNrj6gvRCwvFwTrZ1+z1oBbnQdiVC3otuq0= Ndd7LKvVBAaZIF0QAVi1ekCfAJXr1GGfLtRUXhgrF8c="
+    }
+  },
+  "authorization": {
+    "class": "solr.RuleBasedAuthorizationPlugin", <2>
+    "user-role": { <3>
+      "admin-user": "admin",
+      "dev-user": "dev"
+    },
+    "permissions": [ <4>
+      { "name": "dev-private-collection", "collection": "dev-private", "role": "dev"},
+      { "name": "security-read", "role": "admin"},
+      { "name": "security-edit", "role": "admin"}
+    ]
+  }
+}
 ----
 
-There are several things defined in this example:
+<1> Solr is using the Basic Authentication plugin for authentication.  This configuration establishes two users: `admin-user` and `dev-user`.
+<2> The `authorization` property begins the authorization configuration.  Solr will use RBAP for authorization.
+<3> Two roles are defined: `admin` and `dev`.  Each user belongs to one role: `admin-user` is an `admin`, and `dev-user` is a `dev`.
+<4> Three permissions restrict access to Solr.  The first permission (a "custom" permission) indicates that only the `dev` role can read from a special collection with the name `dev-private`.  The last two permissions ("predefined" permissions) indicate that only the `admin` role is permitted to use Solr's security APIs.  See below for more information on permission syntax.
 
-<1> Basic authentication plugin is enabled.
-<2> All requests w/o credentials will be rejected with a 401 error. Set `'blockUnknown'` to false (or remove it altogether) if you wish to let unauthenticated requests to go through. However, if a particular resource is protected by a rule, they are rejected anyway with a 401 error.
-<3> A user named 'solr', with a password has been defined.
-<4> Rule-based authorization plugin is enabled.
-<5> The 'admin' role has been defined, and it has permission to edit security settings.
-<6> The 'solr' user has been defined to the 'admin' role.
+Altogether, this example carves out two restricted areas.  Only `admin-user` can access Solr's Authentication and Authorization APIs, and only `dev-user` can access their `dev-private` collection.  All other APIs are left open, and can be accessed by both users.
 
-== Permission Attributes
+== Permissions
 
-Each role is comprised of one or more permissions which define what the user is allowed to do. Each permission is made up of several attributes that define the allowed activity. There are some pre-defined permissions which cannot be modified.
+Solr's Rule-Based Authorization plugin supports a flexible and powerful permission syntax.  RBAP supports two types of permissions, each with a slightly different syntax.
 
-The permissions are consulted in order they appear in `security.json`. The first permission that matches is applied for each user, so the strictest permissions should be at the top of the list. Permissions order can be controlled with a parameter of the Authorization API, as described below.
+=== Custom Permissions
+
+Administrators can write their own custom permissions that can match requests based on the collection, request handler, HTTP method, particular request parameters, etc.
+
+Each custom permission is a JSON object under the `permissions` property, with one or more of the properties below:
+
+name:: An optional identifier for the permission.  For custom permissions, this is used only as a clue to administrators about what this permission does.  Even so, care must be taken when setting this property to avoid colliding with one of Solr's predefined permissions, whose names are semantically meaningful.  If this name matches a predefined permission, Solr ignores any other properties set and uses the semantics of the predefined permission instead.
+collection:: An optional property identifying which collection(s) this permission applies to.  The value can either be a single collection name, or a JSON array containing multiple collections.  The wildcard `\*` can be used to indicate that this rule applies to all collections.  Similarly the special value "null" can be used to indicate that this permission governs Solr's collection-agnostic APIs.  If not specified, this property defaults to `["*", "null"]`.
++
+[NOTE]
+====
+The collection property can only be used to match _collections_.  It currently cannot be used to match aliases.  Aliases are resolved before Solr's security plugins are invoked; a `collection` property given an alias will never match because RBAP will be comparing an alias name to already-resolved collection names.  Instead, set a `collection` property that contains all collections in the alias concerned (or the `*` wildcard).
+====
+path:: An optional property identifying which request handlers this permission applies to.  The value can either be a single request handler, or a JSON list containing multiple.  The wildcard `\*` can be used to indicate that this permission applies to all request handlers.  If not specified, this property defaults to `*`.
+method:: An optional property identifying which HTTP methods this permission applies to.  Options include `HEAD`, `POST`, `PUT`, `GET`, `DELETE`, and the wildcard `\*`.  Multiple values can also be specified using a JSON array.  If not specified, this property defaults to `*`.
+params:: An optional property identifying which query parameters this permission applies to.  The value is a JSON object containing the names and values of request parameters that must be matched for this permission to apply.
++
+For example, this property could be used to limit the actions a role is allowed to perform with the Collections API. If the role should only be allowed to perform the LIST or CLUSTERSTATUS requests, you would define this as follows:
++
+[source,json]
+----
+"params": {
+   "action": ["LIST", "CLUSTERSTATUS"]
+}
+----
++
+The request parameter value can be a simple string or a regular expression. Use the prefix `REGEX:` to use a regular expression match instead of simpler string matching
++
+If the commands LIST and CLUSTERSTATUS are case insensitive, the example above can be written as follows:
++
+[source,json]
+----
+"params": {
+   "action": ["REGEX:(?i)LIST", "REGEX:(?i)CLUSTERSTATUS"]
+}
+----
++
+If not specified, the permission is independent of any parameters.
+role:: A required property identifying which role (or roles) are allowed access to the APIs controlled by this permission.  Multiple values can be specified using a JSON array.  The wildcard `*` can be used to indicate that all roles can access the described functionality.
+
 
 === Predefined Permissions
 
-There are several permissions that are pre-defined. These have fixed default values, which cannot be modified, and new attributes cannot be added. To use these attributes, simply define a role that includes this permission, and then assign a user to that role.
+Custom permissions give administrators flexibility in configuring fine-grained access control.  But in an effort to make configuration as simple as possible, RBAP also offers a handful of predefined permissions, which cover many common use-cases.
 
-The pre-defined permissions are:
+Administrators invoke a predefined permission by choosing a `name` property that matches one of Solr's predefined permission options (listed below).  Solr has its own definition for each of these permissions, and uses this information when checking whether a predefined permission matches an incoming request.  This trades flexibility for simplicity: predefined permissions do not support the `path`, `params`, or `method` properties which custom permissions allow.
+
+The predefined permission names (and their effects) are:
 
 * *security-edit:* this permission is allowed to edit the security configuration, meaning any update action that modifies `security.json` through the APIs will be allowed.
 * *security-read*: this permission is allowed to read the security configuration, meaning any action that reads `security.json` settings through the APIs will be allowed.
@@ -129,6 +218,42 @@
 * *read*: this permission is allowed to perform any read action on any collection. This includes querying using search handlers (using <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#searchhandlers,request handlers>>) such as `/select`, `/get`, `/browse`, `/tvrh`, `/terms`, `/clustering`, `/elevate`, `/export`, `/spell`, `/clustering`, and `/sql`. This applies to all collections by default ( `collection:"*"` ).
 * *all*: Any requests coming to Solr.
 
+=== Permission Ordering and Resolution
+
+The permission syntax discussed above doesn't do anything to prevent multiple permissions from overlapping and applying to the same Solr APIs.  In cases where multiple permissions match an incoming request, Solr chooses the first matching permission and ignores all others - even if those other permissions would match the incoming request!
+
+Since Solr only uses the first matching permission it finds, it's important for administrators to understand what ordering Solr uses when processing the permission list.
+
+The ordering Solr uses is complex.  Solr tries to check first any permissions which are specific or relevant to the incoming request, only moving on to more general permissions if none of the more-specific ones match.  In effect, this means that different requests may check the same permissions in very different orders.
+
+If the incoming request is collection-agnostic (doesn't apply to a paritcular collection), Solr checks permissions in the following order:
+
+. Permissions with a `collection` value of `null` and a `path` value matching the request's request handler
+. Permissions with a `collection` value of `null` and a `path` value of `*`
+
+If the incoming request is to a collection, Solr checks permissions in the following order:
+
+. Permissions with `collection` and `path` values matching the request specifically (not a wildcard match)
+. Permissions with `collection` matching the request specifically, and a `path` value of `*`
+. Permissions with `path` matching the request specifically, and a `collection` value of `*`
+. Permissions with both `collection` and `path` values of `*`.
+
+As an example, consider the permissions below:
+
+[source,json]
+----
+{"name": "read", "role": "dev"}, <1>
+{"name": "coll-read", "path": "/select", "role": "*"}, <2>
+{"name": "techproducts-read", "collection": "techproducts", "role": "other", "path": "/select"}, <3>
+{"name": "all", "role": "admin"} <4>
+----
+
+All of the permissions in this list match `/select` queries.  But different permissions will be used depending on the collection being queried.
+
+For a query to the `techproducts` collection, permission 3 will be used because it specifically targets `techproducts`.  Only users with the `other` role will be authorized.
+
+For a query to a collection called `collection1` on the other hand, the most specific permission present is permission 2, so _all_ roles are given access.
+
 == Authorization API
 
 === Authorization API Endpoint
@@ -143,53 +268,7 @@
 * `update-permission`: update some attributes of an existing permission definition.
 * `delete-permission`: remove a permission definition.
 
-Permissions need to be created if they are not on the list of pre-defined permissions above.
-
-Several properties can be used to define your custom permission.
-
-`name`::
-The name of the permission. This is required only if it is a predefined permission.
-
-`collection`::
-The collection or collections the permission will apply to.
-+
-When the path that will be allowed is collection-specific, such as when setting permissions to allow use of the Schema API, omitting the collection property will allow the defined path and/or method for all collections. However, when the path is one that is non-collection-specific, such as the Collections API, the collection value must be `null`. The default value is `*`, or all collections.
-
-`path`::
-A request handler name, such as `/update` or `/select`. A wild card is supported, to allow for all paths as appropriate (such as, `/update/*`).
-
-`method`:: HTTP methods that are allowed for this permission. You could allow only GET requests, or have a role that allows PUT and POST requests. The method values that are allowed for this property are GET, POST, PUT,DELETE and HEAD.
-
-`params`::
-The names and values of request parameters. This property can be omitted if all request parameters are to be matched, but will restrict access only to the values provided if defined.
-+
-For example, this property could be used to limit the actions a role is allowed to perform with the Collections API. If the role should only be allowed to perform the LIST or CLUSTERSTATUS requests, you would define this as follows:
-+
-[source,json]
-----
-{"params": {
-   "action": ["LIST", "CLUSTERSTATUS"]
-  }
-}
-----
-+
-The value of the parameter can be a simple string or it could be a regular expression. Use the prefix `REGEX:` to use a regular expression match instead of a string identity match
-+
-If the commands LIST and CLUSTERSTATUS are case insensitive, the above example should be as follows
-+
-[source,json]
-----
-{"params": {
-   "action": ["REGEX:(?i)LIST", "REGEX:(?i)CLUSTERSTATUS"]
-  }
-}
-----
-
-`before`::
-This property allows ordering of permissions. The value of this property is the index of the permission that this new permission should be placed before in `security.json`. The index is automatically assigned in the order they are created.
-
-`role`::
-The name of the role(s) to give this permission. This name will be used to map user IDs to the role to grant these permissions. The value can be wildcard such as (`*`), which means that any user is OK, but no user is NOT OK.
+Created properties can either be custom or predefined.  In addition to the permission syntax discussed above, these commands also allow permissions to have a `before` property, whose value matches the index of the permission that this new permission should be placed before in `security.json`.
 
 The following creates a new permission named "collection-mgr" that is allowed to create and list collections. The permission will be placed before the "read" permission. Note also that we have defined "collection as `null`, this is because requests to the Collections API are never collection-specific.
 
diff --git a/solr/solr-ref-guide/src/shard-management.adoc b/solr/solr-ref-guide/src/shard-management.adoc
index 1a18d04..089df7d 100644
--- a/solr/solr-ref-guide/src/shard-management.adoc
+++ b/solr/solr-ref-guide/src/shard-management.adoc
@@ -93,6 +93,35 @@
 `async`::
 Request ID to track this action which will be <<collections-api.adoc#asynchronous-calls,processed asynchronously>>
 
+`splitByPrefix`::
+If `true`, the split point will be selected by taking into account the distribution of compositeId values in the shard.
+A compositeId has the form `<prefix>!<suffix>`, where all documents with the same prefix are colocated on in the hash space.
+If there are multiple prefixes in the shard being split, then the split point will be selected to divide up the prefixes into as equal sized shards as possible without splitting any prefix.
+If there is only a single prefix in a shard, the range of the prefix will be divided in half.
++
+The id field is usually scanned to determine the number of documents with each prefix.
+As an optimization, if an optional field called `id_prefix` exists and has the document prefix indexed (including the !) for each document,
+then that will be used to generate the counts.
++
+One simple way to populate `id_prefix` is a copyField in the schema:
+[source,xml]
+----
+  <!-- OPTIONAL, for optimization used by splitByPrefix if it exists -->
+  <field name="id_prefix" type="composite_id_prefix" indexed="true" stored="false"/>
+  <copyField source="id" dest="id_prefix"/>
+  <fieldtype name="composite_id_prefix" class="solr.TextField">
+    <analyzer>
+      <tokenizer class="solr.PatternTokenizerFactory" pattern=".*!" group="0"/>
+    </analyzer>
+  </fieldtype>
+----
+
+Current implementation details and limitations:
+
+* Prefix size is calculated using number of documents with the prefix.
+* Only two level compositeIds are supported.
+* The shard can only be split into two.
+
 === SPLITSHARD Response
 
 The output will include the status of the request and the new shard names, which will use the original shard as their basis, adding an underscore and a number. For example, "shard1" will become "shard1_0" and "shard1_1". If the status is anything other than "success", an error message will explain why the request failed.
diff --git a/solr/solr-ref-guide/src/solr-system-requirements.adoc b/solr/solr-ref-guide/src/solr-system-requirements.adoc
index 6a4bdd4..6e48726 100644
--- a/solr/solr-ref-guide/src/solr-system-requirements.adoc
+++ b/solr/solr-ref-guide/src/solr-system-requirements.adoc
@@ -39,7 +39,7 @@
 
 The exact output will vary, but you need to make sure you meet the minimum version requirement. We also recommend choosing a version that is not end-of-life from its vendor. Oracle/OpenJDK are the most tested JREs and are preferred. It's also preferred to use the latest available official release.
 
-Some versions of Java VM have bugs that may impact your implementation. To be sure, check the page https://wiki.apache.org/lucene-java/JavaBugs[Lucene Java Bugs].
+Some versions of Java VM have bugs that may impact your implementation. To be sure, check the page https://wiki.apache.org/confluence/display/LUCENEJAVA/JavaBugs[Lucene Java Bugs].
 
 === Sources for Java
 
diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc
index 484f514..bf0953e 100644
--- a/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc
+++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc
@@ -522,6 +522,7 @@
 }
 ----
 
+[[scheduledtrigger]]
 === Scheduled Trigger
 
 The Scheduled trigger generates events according to a fixed rate schedule.
@@ -563,3 +564,46 @@
 Solr randomizes the order in which the triggers are resumed after the cooldown period to mitigate this problem. However, it is recommended that scheduled triggers
 are not used with low `every` values and an external scheduling process such as cron be used for such cases instead.
 ====
+
+== Default Triggers
+A fresh installation of SolrCloud always creates some default triggers. If these triggers are missing (eg. they were
+deleted) they are re-created on any autoscaling configuration change or Overseer restart. These triggers can be
+suspended if their functionality somehow interferes with other configuration but they can't be permanently deleted.
+
+=== Auto-add Replicas Trigger
+The default configuration and functionality of this trigger is described in detail in the
+section titled <<solrcloud-autoscaling-auto-add-replicas.adoc#solrcloud-autoscaling-auto-add-replicas,Automatically Adding Replicas>>.
+
+=== Scheduled Maintenance Trigger
+This is a <<scheduledtrigger>> named `.scheduled_maintenance` and it's configured to run once per day.
+It executes the following actions:
+
+==== `solr.InactiveShardPlanAction`
+This action checks existing collections for any shards in `INACTIVE` state, which indicates that they
+are the original parent shards remaining after a successful `SPLITSHARD` operation.
+
+These shards are not immediately deleted because shard splitting is a complex operation that may fail in
+non-obvious ways, so keeping the original parent shard gives users a chance to recover from potential failures.
+
+However, keeping these shards indefinitely doesn't make sense either because they still use system
+resources (their Solr cores are still being loaded, and their indexes still occupy disk space).
+This scheduled action is responsible for removing such inactive parent shards after their
+time-to-live expires. By default the TTL is set to 48 hours after the shard state was set to
+`INACTIVE`. When this TTL elapses this scheduled action requests that the shard be deleted, which is then
+executed by `solr.ExecutePlanAction` that is configured for this trigger.
+
+==== `solr.InactiveMarkersPlanAction`
+When a node is lost or added an event is generated - but if the lost node was the one running
+Overseer leader such event may not be properly processed by the triggers (which run in the Overseer leader context).
+For this reason a special marker is created in ZooKeeper so that when the next Overseer leader is elected the
+triggers will be able to learn about and process these past events.
+
+Triggers don't delete these markers once they are done processing (because several triggers may need them and eg.
+scheduled triggers may run at arbitrary times with arbitrary delays) so Solr needs a mechanism to clean up
+old markers for such events so that they don't accumulate over time. This trigger action performs the clean-up
+- it deletes markers older than the configured time-to-live (by default it's 48 hours).
+
+=== `solr.ExecutePlanAction`
+This action simply executes any collection admin requests generated by other
+actions - in particular, in the default configuration it executes `DELETESHARD` requests produced by
+`solr.InactiveShardPlanAction`, as described above.
\ No newline at end of file
diff --git a/solr/solr-ref-guide/src/taking-solr-to-production.adoc b/solr/solr-ref-guide/src/taking-solr-to-production.adoc
index 9e3595d..954c4dd 100644
--- a/solr/solr-ref-guide/src/taking-solr-to-production.adoc
+++ b/solr/solr-ref-guide/src/taking-solr-to-production.adoc
@@ -273,26 +273,33 @@
 SOLR_OPTS="$SOLR_OPTS -Dsolr.autoSoftCommit.maxTime=10000"
 ----
 
-=== File Handles and Processes (ulimit settings)
+=== Ulimit settings (*nix operating systems)
 
-Two common settings that result in errors on *nix systems are file handles and user processes.
-
-It is common for the default limits for number of processes and file handles to default to values that are too low for a large Solr installation. The required number of each of these will increase based on a combination of the number of replicas hosted per node and the number of segments in the index for each replica.
-
-The usual recommendation is to make processes and file handles at least 65,000 each, unlimited if possible. On most *nix systems, this command will show the currently-defined limits:
+There are several settings that should be monitored and set as high as possible, "unlimited" by preference. On most "*nix" operating systems, you can see the current values by typing the following at a command prompt.
 
 [source,bash]
 ----
 ulimit -a
 ----
 
-It is strongly recommended that file handle and process limits be permanently raised as above. The exact form of the command will vary per operating system, and some systems require editing configuration files and restarting your server. Consult your system administrators for guidance in your particular environment.
+These four settings in particular are important to have set very high, unlimited by preference.
+
+ * max processes (ulimit -u): 65,000 is the recommended _minimum_
+ * file handles (ulimit -n): 65,000 is the recommended _minimum_. All the files used by all replicas have their file handles open at once so this can grow quite large.
+ * virtual memory (ulimit -v): Set to unlimited. This is used to by MMapping the indexes.
+ * max memory size (ulimit -m): Also used by MMap, set to unlimited.
+ * If your system supports it, `sysctl vm.max_map_count`, should be set to unlimited as well.
+
+We strongly recommend that these settings be permanently raised. The exact process to permanently raise them will vary per operating system. Some systems require editing configuration files and restarting your server. Consult your system administrators for guidance in your particular environment.
+[WARNING]
+====
+Check these limits every time you upgrade your kernel or operating system. These operations can reset these to their defaults.
+====
 
 [WARNING]
 ====
 If these limits are exceeded, the problems reported by Solr vary depending on the specific operation responsible for exceeding the limit. Errors such as "too many open files", "connection error", and "max processes exceeded" have been reported, as well as SolrCloud recovery failures.
 
-Since exceeding these limits can result in such varied symptoms it is _strongly_ recommended that these limits be permanently raised as recommended above.
 ====
 
 == Running Multiple Solr Nodes per Host
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
index 885edc9..0bbdc1a 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrClient.java
@@ -959,6 +959,21 @@
   }
 
   /**
+   * Issues a ping request to check if the collection's replicas are alive
+   *
+   * @param collection collection to ping
+   *
+   * @return a {@link org.apache.solr.client.solrj.response.SolrPingResponse} containing the response
+   *         from the server
+   *
+   * @throws IOException If there is a low-level I/O error.
+   * @throws SolrServerException if there is an error on the server
+   */
+  public SolrPingResponse ping(String collection) throws SolrServerException, IOException {
+    return new SolrPing().process(this, collection);
+  }
+
+  /**
    * Issues a ping request to check if the server is alive
    *
    * @return a {@link org.apache.solr.client.solrj.response.SolrPingResponse} containing the response
@@ -971,6 +986,7 @@
     return new SolrPing().process(this, null);
   }
 
+
   /**
    * Performs a query to the Solr server
    *
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java
index 8f1af8c..c527327 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpSolrClient.java
@@ -374,7 +374,7 @@
 
     if (request instanceof V2Request) {
       if (System.getProperty("solr.v2RealPath") == null || ((V2Request) request).isForceV2()) {
-        basePath = changeV2RequestEndpoint(basePath);
+        basePath = baseUrl.replace("/solr", "/api");
       } else {
         basePath = baseUrl + "/____v2";
       }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
index eed6b87..bd3710f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
@@ -207,7 +207,7 @@
         .withFunctionName("ttest", TTestEvaluator.class)
         .withFunctionName("pairedTtest", PairedTTestEvaluator.class)
         .withFunctionName("multiVariateNormalDistribution", MultiVariateNormalDistributionEvaluator.class)
-        .withFunctionName("integrate", IntegrateEvaluator.class)
+        .withFunctionName("integral", IntegrateEvaluator.class)
         .withFunctionName("density", DensityEvaluator.class)
         .withFunctionName("mannWhitney", MannWhitneyUEvaluator.class)
         .withFunctionName("sumSq", SumSqEvaluator.class)
@@ -300,6 +300,8 @@
         .withFunctionName("upper", UpperEvaluator.class)
         .withFunctionName("split", SplitEvaluator.class)
         .withFunctionName("trim", TrimEvaluator.class)
+        .withFunctionName("cosine", CosineDistanceEvaluator.class)
+        .withFunctionName("trunc", TruncEvaluator.class)
 
         // Boolean Stream Evaluators
 
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java
index ac6f2e2..c8c72f4 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CorrelationEvaluator.java
@@ -17,6 +17,7 @@
 package org.apache.solr.client.solrj.io.eval;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
 
@@ -26,6 +27,7 @@
 import org.apache.commons.math3.stat.correlation.KendallsCorrelation;
 import org.apache.commons.math3.stat.correlation.SpearmansCorrelation;
 
+import org.apache.solr.client.solrj.io.stream.ZplotStream;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
@@ -109,6 +111,9 @@
           double[][] corrMatrixData = corrMatrix.getData();
           Matrix realMatrix = new Matrix(corrMatrixData);
           realMatrix.setAttribute("corr", pearsonsCorrelation);
+          List<String> labels = getColumnLabels(matrix.getColumnLabels(), corrMatrixData.length);
+          realMatrix.setColumnLabels(labels);
+          realMatrix.setRowLabels(labels);
           return realMatrix;
         } else if (type.equals(CorrelationType.kendalls)) {
           KendallsCorrelation kendallsCorrelation = new KendallsCorrelation(data);
@@ -116,6 +121,9 @@
           double[][] corrMatrixData = corrMatrix.getData();
           Matrix realMatrix =  new Matrix(corrMatrixData);
           realMatrix.setAttribute("corr", kendallsCorrelation);
+          List<String> labels = getColumnLabels(matrix.getColumnLabels(), corrMatrixData.length);
+          realMatrix.setColumnLabels(labels);
+          realMatrix.setRowLabels(labels);
           return realMatrix;
         } else if (type.equals(CorrelationType.spearmans)) {
           SpearmansCorrelation spearmansCorrelation = new SpearmansCorrelation(new Array2DRowRealMatrix(data, false));
@@ -123,6 +131,9 @@
           double[][] corrMatrixData = corrMatrix.getData();
           Matrix realMatrix =  new Matrix(corrMatrixData);
           realMatrix.setAttribute("corr", spearmansCorrelation.getRankCorrelation());
+          List<String> labels = getColumnLabels(matrix.getColumnLabels(), corrMatrixData.length);
+          realMatrix.setColumnLabels(labels);
+          realMatrix.setRowLabels(labels);
           return realMatrix;
         } else {
           return null;
@@ -134,4 +145,18 @@
       throw new IOException("corr function operates on either two numeric arrays or a single matrix as parameters.");
     }
   }
+
+  public static List<String> getColumnLabels(List<String> labels, int length) {
+    if(labels != null) {
+      return labels;
+    } else {
+      List<String> l = new ArrayList();
+      for(int i=0; i<length; i++) {
+        String label = "col"+ ZplotStream.pad(Integer.toString(i), length);
+        l.add(label);
+      }
+
+      return l;
+    }
+  }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineDistanceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineDistanceEvaluator.java
new file mode 100644
index 0000000..564c734
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineDistanceEvaluator.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.commons.math3.exception.DimensionMismatchException;
+import org.apache.commons.math3.ml.distance.DistanceMeasure;
+import org.apache.commons.math3.util.Precision;
+import org.apache.solr.client.solrj.io.Tuple;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class CosineDistanceEvaluator extends RecursiveEvaluator {
+  protected static final long serialVersionUID = 1L;
+
+  public CosineDistanceEvaluator(StreamExpression expression, StreamFactory factory) throws IOException{
+    super(expression, factory);
+  }
+
+  public CosineDistanceEvaluator(StreamExpression expression, StreamFactory factory, List<String> ignoredNamedParameters) throws IOException{
+    super(expression, factory, ignoredNamedParameters);
+  }
+
+  @Override
+  public Object evaluate(Tuple tuple) throws IOException {
+    return new CosineDistance();
+  }
+
+  @Override
+  public Object doWork(Object... values) throws IOException {
+    // Nothing to do here
+    throw new IOException("This call should never occur");
+  }
+
+  public static class CosineDistance implements DistanceMeasure {
+
+    private static final long serialVersionUID = -9108154600539125566L;
+
+    public double compute(double[] v1, double[] v2) throws DimensionMismatchException {
+      return Precision.round(1-Math.abs(CosineSimilarityEvaluator.cosineSimilarity(v1, v2)), 8);
+    }
+  }
+}
\ No newline at end of file
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java
index 2b21ac8..07823c0 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CosineSimilarityEvaluator.java
@@ -20,6 +20,7 @@
 import java.util.List;
 import java.util.Locale;
 
+import org.apache.commons.math3.util.Precision;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 
@@ -51,7 +52,7 @@
     return cosineSimilarity(d1, d2);
   }
 
-  private double cosineSimilarity(double[] vectorA, double[] vectorB) {
+  public static double cosineSimilarity(double[] vectorA, double[] vectorB) {
     double dotProduct = 0.0;
     double normA = 0.0;
     double normB = 0.0;
@@ -60,7 +61,8 @@
       normA += Math.pow(vectorA[i], 2);
       normB += Math.pow(vectorB[i], 2);
     }
-    return dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
+    double d = dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
+    return Precision.round(d, 8);
   }
 
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java
index 8a28951..3cb3161 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/CovarianceEvaluator.java
@@ -49,7 +49,11 @@
       Covariance covariance = new Covariance(data, true);
       RealMatrix coMatrix = covariance.getCovarianceMatrix();
       double[][] coData = coMatrix.getData();
-      return new Matrix(coData);
+      Matrix realMatrix = new Matrix(coData);
+      List<String> labels = CorrelationEvaluator.getColumnLabels(matrix.getColumnLabels(), coData.length);
+      realMatrix.setColumnLabels(labels);
+      realMatrix.setRowLabels(labels);
+      return realMatrix;
     } else {
       throw new IOException("The cov function expects either two numeric arrays or a matrix as parameters.");
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java
index 183a47b..895d3b5 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DerivativeEvaluator.java
@@ -21,6 +21,7 @@
 
 import org.apache.commons.math3.analysis.DifferentiableUnivariateFunction;
 import org.apache.commons.math3.analysis.UnivariateFunction;
+import org.apache.commons.math3.analysis.interpolation.AkimaSplineInterpolator;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
 
@@ -42,12 +43,17 @@
     }
 
     VectorFunction vectorFunction = (VectorFunction) value;
+
+    DifferentiableUnivariateFunction func = null;
+    double[] x = (double[])vectorFunction.getFromContext("x");
+
     if(!(vectorFunction.getFunction() instanceof DifferentiableUnivariateFunction)) {
-      throw new IOException("Cannot evaluate derivative from parameter.");
+      double[] y = (double[])vectorFunction.getFromContext("y");
+      func = new AkimaSplineInterpolator().interpolate(x, y);
+    } else {
+      func = (DifferentiableUnivariateFunction) vectorFunction.getFunction();
     }
 
-    DifferentiableUnivariateFunction func = (DifferentiableUnivariateFunction)vectorFunction.getFunction();
-    double[] x = (double[])vectorFunction.getFromContext("x");
     UnivariateFunction derfunc = func.derivative();
     double[] dvalues = new double[x.length];
     for(int i=0; i<x.length; i++) {
@@ -56,7 +62,7 @@
 
     VectorFunction vf = new VectorFunction(derfunc, dvalues);
     vf.addToContext("x", x);
-    vf.addToContext("y", vectorFunction.getFromContext("y"));
+    vf.addToContext("y", dvalues);
 
     return vf;
   }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java
index 6b956b6..888e145 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/DistanceEvaluator.java
@@ -123,6 +123,10 @@
         distanceMatrix[i][j] = dist;
       }
     }
-    return new Matrix(distanceMatrix);
+    Matrix m = new Matrix(distanceMatrix);
+    List<String> labels = CorrelationEvaluator.getColumnLabels(matrix.getColumnLabels(), data.length);
+    m.setColumnLabels(labels);
+    m.setRowLabels(labels);
+    return m;
   }
 }
\ No newline at end of file
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java
index 62a3444..fbd5561 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/FuzzyKmeansEvaluator.java
@@ -27,6 +27,7 @@
 import org.apache.commons.math3.ml.clustering.CentroidCluster;
 import org.apache.commons.math3.ml.distance.EuclideanDistance;
 import org.apache.commons.math3.ml.clustering.FuzzyKMeansClusterer;
+import org.apache.solr.client.solrj.io.stream.ZplotStream;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
 import org.apache.solr.client.solrj.io.stream.expr.StreamExpressionNamedParameter;
 import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
@@ -100,6 +101,11 @@
     double[][] mmData = realMatrix.getData();
     Matrix mmMatrix = new Matrix(mmData);
     mmMatrix.setRowLabels(matrix.getRowLabels());
+    List<String> clusterCols = new ArrayList();
+    for(int i=0; i<clusters.size(); i++) {
+      clusterCols.add("cluster"+ ZplotStream.pad(Integer.toString(i), clusters.size()));
+    }
+    mmMatrix.setRowLabels(clusterCols);
     return new KmeansEvaluator.ClusterTuple(fields, clusters, matrix.getColumnLabels(),mmMatrix);
   }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IntegrateEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IntegrateEvaluator.java
index 277748c..fe41c76 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IntegrateEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/IntegrateEvaluator.java
@@ -17,6 +17,7 @@
 package org.apache.solr.client.solrj.io.eval;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.Locale;
 
 import org.apache.commons.math3.analysis.UnivariateFunction;
@@ -34,8 +35,8 @@
   @Override
   public Object doWork(Object... values) throws IOException {
 
-    if(values.length != 3) {
-      throw new IOException("The integrate function requires 3 parameters");
+    if(values.length > 3) {
+      throw new IOException("The integrate function requires at most 3 parameters");
     }
 
     if (!(values[0] instanceof VectorFunction)) {
@@ -43,28 +44,45 @@
     }
 
     VectorFunction vectorFunction = (VectorFunction) values[0];
-    if(!(vectorFunction.getFunction() instanceof UnivariateFunction)) {
+    if (!(vectorFunction.getFunction() instanceof UnivariateFunction)) {
       throw new IOException("Cannot evaluate integral from parameter.");
     }
 
-    Number min = null;
-    Number max = null;
+    UnivariateFunction func = (UnivariateFunction) vectorFunction.getFunction();
 
-    if(values[1] instanceof Number) {
-      min = (Number) values[1];
+    if(values.length == 3) {
+
+
+      Number min = null;
+      Number max = null;
+
+      if (values[1] instanceof Number) {
+        min = (Number) values[1];
+      } else {
+        throw new IOException("The second parameter of the integrate function must be a number");
+      }
+
+      if (values[2] instanceof Number) {
+        max = (Number) values[2];
+      } else {
+        throw new IOException("The third parameter of the integrate function must be a number");
+      }
+
+      RombergIntegrator rombergIntegrator = new RombergIntegrator();
+      return rombergIntegrator.integrate(5000, func, min.doubleValue(), max.doubleValue());
     } else {
-      throw new IOException("The second parameter of the integrate function must be a number");
+      RombergIntegrator integrator = new RombergIntegrator();
+
+      double[] x = (double[])vectorFunction.getFromContext("x");
+      double[] y = (double[])vectorFunction.getFromContext("y");
+      ArrayList<Number> out = new ArrayList();
+      out.add(0);
+      for(int i=1; i<x.length; i++) {
+        out.add(integrator.integrate(5000, func, x[0], x[i]));
+      }
+
+      return out;
+
     }
-
-    if(values[2] instanceof Number ) {
-      max = (Number) values[2];
-    } else {
-      throw new IOException("The third parameter of the integrate function must be a number");
-    }
-
-    UnivariateFunction func = (UnivariateFunction)vectorFunction.getFunction();
-
-    RombergIntegrator rombergIntegrator = new RombergIntegrator();
-    return rombergIntegrator.integrate(5000, func, min.doubleValue(), max.doubleValue());
   }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java
index 22cbfc9..1971c15 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeEvaluator.java
@@ -53,7 +53,10 @@
         double[] row = data[i];
         standardized[i] = StatUtils.normalize(row);
       }
-      return new Matrix(standardized);
+      Matrix m = new Matrix(standardized);
+      m.setRowLabels(matrix.getRowLabels());
+      m.setColumnLabels(matrix.getColumnLabels());
+      return m;
     } else {
       return doWork(Arrays.asList((BigDecimal)value));
     }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java
index d300f59..6717909 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/NormalizeSumEvaluator.java
@@ -62,7 +62,10 @@
         unitData[i] = unitRow;
       }
 
-      return new Matrix(unitData);
+      Matrix m = new Matrix(unitData);
+      m.setRowLabels(matrix.getRowLabels());
+      m.setColumnLabels(matrix.getColumnLabels());
+      return m;
     } else if(value instanceof List) {
       List<Number> vals = (List<Number>)value;
       double[] doubles = new double[vals.size()];
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java
index e2100b1..e2dddfb 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TopFeaturesEvaluator.java
@@ -73,9 +73,11 @@
   private List<Integer> getMaxIndexes(double[] values, int k) {
     TreeSet<Pair> set = new TreeSet();
     for(int i=0; i<values.length; i++) {
-      set.add(new Pair(i, values[i]));
-      if(set.size() > k) {
-        set.pollFirst();
+      if(values[i] > 0){
+        set.add(new Pair(i, values[i]));
+        if (set.size() > k) {
+          set.pollFirst();
+        }
       }
     }
 
@@ -89,16 +91,22 @@
 
   public static class Pair implements Comparable<Pair> {
 
-    private int index;
+    private Integer index;
     private Double value;
 
-    public Pair(int index, Number value) {
-      this.index = index;
+    public Pair(int _index, Number value) {
+      this.index = _index;
       this.value = value.doubleValue();
     }
 
     public int compareTo(Pair pair) {
-      return value.compareTo(pair.value);
+
+      int c = value.compareTo(pair.value);
+      if(c==0) {
+        return index.compareTo(pair.index);
+      } else {
+        return c;
+      }
     }
 
     public int getIndex() {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TruncEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TruncEvaluator.java
new file mode 100644
index 0000000..0e4ebac
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/TruncEvaluator.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Locale;
+import java.util.stream.Collectors;
+
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class TruncEvaluator extends RecursiveObjectEvaluator implements TwoValueWorker {
+  protected static final long serialVersionUID = 1L;
+
+  public TruncEvaluator(StreamExpression expression, StreamFactory factory) throws IOException{
+    super(expression, factory);
+
+    if(2 != containedEvaluators.size()){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting exactly 2 values but found %d",expression,containedEvaluators.size()));
+    }
+  }
+
+  @Override
+  public Object doWork(Object value1, Object value2){
+    if(null == value1){
+      return null;
+    }
+
+    int endIndex = ((Number)value2).intValue();
+
+    if(value1 instanceof List){
+      return ((List<?>)value1).stream().map(innerValue -> doWork(innerValue, endIndex)).collect(Collectors.toList());
+    }
+    else {
+      return value1.toString().substring(0, endIndex);
+    }
+  }
+}
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java
index 16d72ae..f6463cd 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/UnitEvaluator.java
@@ -55,7 +55,7 @@
 
       Matrix m = new Matrix(unitData);
       m.setRowLabels(matrix.getRowLabels());
-      m.setColumnLabels(matrix.getRowLabels());
+      m.setColumnLabels(matrix.getColumnLabels());
       return m;
     } else if(value instanceof List) {
       List<Number> values = (List<Number>)value;
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java
index f99e4f8..cac1129 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/stream/ZplotStream.java
@@ -36,6 +36,7 @@
 import org.apache.solr.client.solrj.io.comp.StreamComparator;
 import org.apache.solr.client.solrj.io.eval.KmeansEvaluator;
 import org.apache.solr.client.solrj.io.eval.StreamEvaluator;
+import org.apache.solr.client.solrj.io.eval.Matrix;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation;
 import org.apache.solr.client.solrj.io.stream.expr.Explanation.ExpressionType;
 import org.apache.solr.client.solrj.io.stream.expr.Expressible;
@@ -129,6 +130,7 @@
     boolean table = false;
     boolean distribution = false;
     boolean clusters = false;
+    boolean heat = false;
     for(Map.Entry<String, Object> entry : entries) {
       ++columns;
 
@@ -139,6 +141,9 @@
         distribution = true;
       } else if(name.equals("clusters")) {
         clusters = true;
+      } else if(name.equals("heat")) {
+        heat = true;
+
       }
 
       Object o = entry.getValue();
@@ -176,6 +181,8 @@
           evaluated.put(name, l);
         } else if(eval instanceof Tuple) {
           evaluated.put(name, eval);
+        } else if(eval instanceof Matrix) {
+          evaluated.put(name, eval);
         }
       }
     }
@@ -186,7 +193,7 @@
     //Load the values into tuples
 
     List<Tuple> outTuples = new ArrayList();
-    if(!table && !distribution && !clusters) {
+    if(!table && !distribution && !clusters && !heat) {
       //Handle the vectors
       for (int i = 0; i < numTuples; i++) {
         Tuple tuple = new Tuple(new HashMap());
@@ -303,20 +310,96 @@
           }
         }
       }
-    } else if(table){
+    } else if(table) {
       //Handle the Tuple and List of Tuples
       Object o = evaluated.get("table");
-      if(o instanceof List) {
-        List<Tuple> tuples = (List<Tuple>)o;
-        outTuples.addAll(tuples);
-      } else if(o instanceof Tuple) {
-        outTuples.add((Tuple)o);
+      if (o instanceof Matrix) {
+        Matrix m = (Matrix) o;
+        List<String> rowLabels = m.getRowLabels();
+        List<String> colLabels = m.getColumnLabels();
+        double[][] data = m.getData();
+        for (int i = 0; i < data.length; i++) {
+          String rowLabel = null;
+          if (rowLabels != null) {
+            rowLabel = rowLabels.get(i);
+          } else {
+            rowLabel = Integer.toString(i);
+          }
+          Tuple tuple = new Tuple(new HashMap());
+          tuple.put("rowLabel", rowLabel);
+          double[] row = data[i];
+          for (int j = 0; j < row.length; j++) {
+            String colLabel = null;
+            if (colLabels != null) {
+              colLabel = colLabels.get(j);
+            } else {
+              colLabel = "col" + Integer.toString(j);
+            }
+
+            tuple.put(colLabel, data[i][j]);
+          }
+          outTuples.add(tuple);
+        }
+      }
+    } else if (heat) {
+      //Handle the Tuple and List of Tuples
+      Object o = evaluated.get("heat");
+      if(o instanceof Matrix) {
+        Matrix m = (Matrix) o;
+        List<String> rowLabels = m.getRowLabels();
+        List<String> colLabels = m.getColumnLabels();
+        double[][] data = m.getData();
+        for (int i = 0; i < data.length; i++) {
+          String rowLabel = null;
+          if (rowLabels != null) {
+            rowLabel = rowLabels.get(i);
+          } else {
+            rowLabel = "row"+pad(Integer.toString(i), data.length);
+          }
+
+          double[] row = data[i];
+          for (int j = 0; j < row.length; j++) {
+            Tuple tuple = new Tuple(new HashMap());
+            tuple.put("y", rowLabel);
+            String colLabel = null;
+            if (colLabels != null) {
+              colLabel = colLabels.get(j);
+            } else {
+              colLabel = "col" + pad(Integer.toString(j), row.length);
+            }
+            tuple.put("x", colLabel);
+            tuple.put("z", data[i][j]);
+            outTuples.add(tuple);
+          }
+        }
       }
     }
 
     this.out = outTuples.iterator();
   }
 
+  public static String pad(String v, int length) {
+    if(length < 11) {
+      return v;
+    } else if(length < 101) {
+      return prepend(v, 2);
+    } else if (length < 1001) {
+      return prepend(v, 3);
+    } else if(length < 10001){
+      return prepend(v, 4);
+    } else {
+      return prepend(v, 5);
+    }
+  }
+
+  private static String prepend(String v, int length) {
+    while(v.length() < length) {
+      v="0"+v;
+    }
+
+    return v;
+  }
+
   /** Return the stream sort - ie, the order in which records are returned */
   public StreamComparator getStreamSort(){
     return null;
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java
index 43ba737..74d0bbc 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java
@@ -207,23 +207,6 @@
         POST,
         null,
         "set-obj-property", null),
-
-    ADD_PACKAGE(CLUSTER_CMD,
-        POST,null,
-        "add-package",null ),
-    UPDATE_PACKAGE(CLUSTER_CMD,
-        POST,null,
-        "update-package",null ),
-    DELETE_RUNTIME_LIB(CLUSTER_CMD,
-        POST,null,
-        "delete-package",null ),
-    ADD_REQ_HANDLER(CLUSTER_CMD,
-        POST,null,
-        "add-requesthandler",null ),
-    DELETE_REQ_HANDLER(CLUSTER_CMD,
-        POST,null,
-        "delete-requesthandler",null ),
-
     UTILIZE_NODE(CLUSTER_CMD,
         POST,
         UTILIZENODE,
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
index 4236177..5334edd 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
@@ -18,11 +18,15 @@
 package org.apache.solr.client.solrj.request;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.ByteBuffer;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.io.IOUtils;
+import org.apache.solr.client.solrj.ResponseParser;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.response.V2Response;
@@ -42,8 +46,10 @@
   private SolrParams solrParams;
   public final boolean useBinary;
   private String collection;
+  private String mimeType;
   private boolean forceV2 = false;
   private boolean isPerCollectionRequest = false;
+  private ResponseParser parser;
 
   private V2Request(METHOD m, String resource, boolean useBinary) {
     super(m, resource);
@@ -56,7 +62,7 @@
 
   }
 
-  public boolean isForceV2(){
+  public boolean isForceV2() {
     return forceV2;
   }
 
@@ -75,6 +81,15 @@
     return new RequestWriter.ContentWriter() {
       @Override
       public void write(OutputStream os) throws IOException {
+        if (payload instanceof ByteBuffer) {
+          ByteBuffer b = (ByteBuffer) payload;
+          os.write(b.array(), b.arrayOffset(), b.limit());
+          return;
+        }
+        if (payload instanceof InputStream) {
+          IOUtils.copy((InputStream) payload, os);
+          return;
+        }
         if (useBinary) {
           new JavaBinCodec().marshal(payload, os);
         } else {
@@ -84,6 +99,7 @@
 
       @Override
       public String getContentType() {
+        if (mimeType != null) return mimeType;
         return useBinary ? JAVABIN_MIME : JSON_MIME;
       }
     };
@@ -111,6 +127,12 @@
     ew.putIfNotNull("command", payload);
   }
 
+  @Override
+  public ResponseParser getResponseParser() {
+    if (parser != null) return parser;
+    return super.getResponseParser();
+  }
+
   public static class Builder {
     private String resource;
     private METHOD method = METHOD.GET;
@@ -119,6 +141,8 @@
     private boolean useBinary = false;
 
     private boolean forceV2EndPoint = false;
+    private ResponseParser parser;
+    private String mimeType;
 
     /**
      * Create a Builder object based on the provided resource.
@@ -173,11 +197,24 @@
       return this;
     }
 
+    public Builder withResponseParser(ResponseParser parser) {
+      this.parser = parser;
+      return this;
+    }
+
+    public Builder withMimeType(String mimeType) {
+      this.mimeType = mimeType;
+      return this;
+
+    }
+
     public V2Request build() {
       V2Request v2Request = new V2Request(method, resource, useBinary);
       v2Request.solrParams = params;
       v2Request.payload = payload;
       v2Request.forceV2 = forceV2EndPoint;
+      v2Request.mimeType = mimeType;
+      v2Request.parser = parser;
       return v2Request;
     }
   }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java
index ea7c2fd..c088d45 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/json/TermsFacetMap.java
@@ -55,9 +55,6 @@
    * Defaults to 10 if not specified.
    */
   public TermsFacetMap setLimit(int maximumBuckets) {
-    if (maximumBuckets < 0) {
-      throw new IllegalArgumentException("Parameter 'maximumBuckets' must be non-negative");
-    }
     put("limit", maximumBuckets);
     return this;
   }
@@ -147,8 +144,8 @@
    * Defaults to 1 if not specified.
    */
   public TermsFacetMap setMinCount(int minCount) {
-    if (minCount < 1) {
-      throw new IllegalArgumentException("Parameter 'minCount' must be a positive integer");
+    if (minCount < 0) {
+      throw new IllegalArgumentException("Parameter 'minCount' must be a non-negative integer");
     }
     put("mincount", minCount);
     return this;
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java
index b700c2c..52f544e 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/response/json/NestableJsonFacet.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.client.solrj.response.json;
 
+import java.util.Date;
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.Map;
@@ -35,23 +36,25 @@
   private long domainCount;
   private final Map<String, NestableJsonFacet> queryFacetsByName;
   private final Map<String, BucketBasedJsonFacet> bucketBasedFacetByName;
-  private final Map<String, Number> statFacetsByName;
+  private final Map<String, Object> statsByName;
   private final Map<String, HeatmapJsonFacet> heatmapFacetsByName;
 
   public NestableJsonFacet(NamedList<Object> facetNL) {
     queryFacetsByName = new HashMap<>();
     bucketBasedFacetByName = new HashMap<>();
-    statFacetsByName = new HashMap<>();
     heatmapFacetsByName = new HashMap<>();
+    statsByName = new HashMap<>();
 
     for (Map.Entry<String, Object> entry : facetNL) {
       final String key = entry.getKey();
       if (getKeysToSkip().contains(key)) {
         continue;
       } else if ("count".equals(key)) {
-        domainCount = (int) entry.getValue();
-      } else if(entry.getValue() instanceof Number) { // Stat/agg facet value
-        statFacetsByName.put(key, (Number)entry.getValue());
+        domainCount = ((Number) entry.getValue()).longValue();
+      } else  if (entry.getValue() instanceof Number || entry.getValue() instanceof String ||
+          entry.getValue() instanceof Date) {
+        // Stat/agg facet value
+        statsByName.put(key, entry.getValue());
       } else if(entry.getValue() instanceof NamedList) { // Either heatmap/query/range/terms facet
         final NamedList<Object> facet = (NamedList<Object>) entry.getValue();
         final boolean isBucketBased = facet.get("buckets") != null;
@@ -103,17 +106,17 @@
   }
 
   /**
-   * Retrieve the value for a stat or agg facet with the provided name
+   * Retrieve the value for a stat or agg with the provided name
    */
-  public Number getStatFacetValue(String name) {
-    return statFacetsByName.get(name);
+  public Object getStatValue(String name) {
+    return statsByName.get(name);
   }
 
   /**
-   * @return the names of any stat or agg facets that are direct descendants of this facet
+   * @return the names of any stat or agg that are direct descendants of this facet
    */
-  public Set<String> getStatFacetNames() {
-    return statFacetsByName.keySet();
+  public Set<String> getStatNames() {
+    return statsByName.keySet();
   }
 
   /**
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
index fa35e88..96e5371 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
@@ -89,18 +89,14 @@
     return value;
   }
 
-  public Map<String, Object> getClusterProperties() throws IOException {
-    return getClusterProperties(new Stat());
-
-  }
   /**
    * Return the cluster properties
    * @throws IOException if there is an error reading properties from the cluster
    */
   @SuppressWarnings("unchecked")
-  public Map<String, Object> getClusterProperties(Stat stat) throws IOException {
+  public Map<String, Object> getClusterProperties() throws IOException {
     try {
-      Map<String, Object> properties = (Map<String, Object>) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, stat, true));
+      Map<String, Object> properties = (Map<String, Object>) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, new Stat(), true));
       return convertCollectionDefaultsToNestedFormat(properties);
     } catch (KeeperException.NoNodeException e) {
       return Collections.emptyMap();
@@ -109,12 +105,6 @@
     }
   }
 
-  /**This applies the new map over the existing map. it's a merge operation, not an overwrite
-   * This applies the changes atomically over an existing object tree even if multiple nodes are
-   * trying to update this simultaneously
-   *
-   * @param properties The partial Object tree that needs to be applied
-   */
   public void setClusterProperties(Map<String, Object> properties) throws IOException, KeeperException, InterruptedException {
     client.atomicUpdate(ZkStateReader.CLUSTER_PROPS, zkData -> {
       if (zkData == null) return Utils.toJSON(convertCollectionDefaultsToNestedFormat(properties));
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index b9c66cf..dcf7d9e 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -194,7 +194,6 @@
   private final ConcurrentHashMap<String, PropsWatcher> collectionPropsWatchers = new ConcurrentHashMap<>();
 
   private volatile SortedSet<String> liveNodes = emptySortedSet();
-  private volatile int clusterPropsVersion = -1;
 
   private volatile Map<String, Object> clusterProperties = Collections.emptyMap();
 
@@ -494,20 +493,40 @@
     return collection.getZNodeVersion();
   }
 
-  private final Watcher clusterPropertiesWatcher = event -> {
-    // session events are not change events, and do not remove the watcher
-    if (Watcher.Event.EventType.None.equals(event.getType())) {
-      return;
-    }
-    loadClusterProperties();
-  };
+  public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException,
+      InterruptedException {
+    // We need to fetch the current cluster state and the set of live nodes
 
-  public void forceRefreshClusterProps(int expectedVersion) {
-    log.debug("Expected version of clusterprops.json is {} , my version is {}", expectedVersion, clusterPropsVersion);
-    if (expectedVersion > clusterPropsVersion) {
-      log.info("reloading clusterprops.json");
-      loadClusterProperties();
+    log.debug("Updating cluster state from ZooKeeper... ");
+
+    // Sanity check ZK structure.
+    if (!zkClient.exists(CLUSTER_STATE, true)) {
+      throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
+          "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready");
     }
+
+    // on reconnect of SolrZkClient force refresh and re-add watches.
+    loadClusterProperties();
+    refreshLiveNodes(new LiveNodeWatcher());
+    refreshLegacyClusterState(new LegacyClusterStateWatcher());
+    refreshStateFormat2Collections();
+    refreshCollectionList(new CollectionsChildWatcher());
+    refreshAliases(aliasesManager);
+
+    if (securityNodeListener != null) {
+      addSecurityNodeWatcher(pair -> {
+        ConfigData cd = new ConfigData();
+        cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false);
+        cd.version = pair.second() == null ? -1 : pair.second().getVersion();
+        securityData = cd;
+        securityNodeListener.run();
+      });
+      securityData = getSecurityProps(true);
+    }
+
+    collectionPropsObservers.forEach((k, v) -> {
+      collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
+    });
   }
 
   private void addSecurityNodeWatcher(final Callable<Pair<byte[], Stat>> callback)
@@ -1083,52 +1102,22 @@
     return Collections.unmodifiableMap(clusterProperties);
   }
 
-  public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException,
-      InterruptedException {
-    // We need to fetch the current cluster state and the set of live nodes
-
-    log.debug("Updating cluster state from ZooKeeper... ");
-
-    // Sanity check ZK structure.
-    if (!zkClient.exists(CLUSTER_STATE, true)) {
-      throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
-          "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready");
+  private final Watcher clusterPropertiesWatcher = event -> {
+    // session events are not change events, and do not remove the watcher
+    if (Watcher.Event.EventType.None.equals(event.getType())) {
+      return;
     }
-
-    // on reconnect of SolrZkClient force refresh and re-add watches.
     loadClusterProperties();
-    refreshLiveNodes(new LiveNodeWatcher());
-    refreshLegacyClusterState(new LegacyClusterStateWatcher());
-    refreshStateFormat2Collections();
-    refreshCollectionList(new CollectionsChildWatcher());
-    refreshAliases(aliasesManager);
-
-    if (securityNodeListener != null) {
-      addSecurityNodeWatcher(pair -> {
-        ConfigData cd = new ConfigData();
-        cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false);
-        cd.version = pair.second() == null ? -1 : pair.second().getVersion();
-        securityData = cd;
-        securityNodeListener.run();
-      });
-      securityData = getSecurityProps(true);
-    }
-
-    collectionPropsObservers.forEach((k, v) -> {
-      collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
-    });
-  }
+  };
 
   @SuppressWarnings("unchecked")
   private void loadClusterProperties() {
     try {
       while (true) {
         try {
-          Stat stat = new Stat();
-          byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, stat, true);
+          byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, new Stat(), true);
           this.clusterProperties = ClusterProperties.convertCollectionDefaultsToNestedFormat((Map<String, Object>) Utils.fromJSON(data));
-          this.clusterPropsVersion = stat.getVersion();
-          log.debug("Loaded cluster properties: {} to version {}", this.clusterProperties, clusterPropsVersion);
+          log.debug("Loaded cluster properties: {}", this.clusterProperties);
 
           for (ClusterPropertiesListener listener : clusterPropertiesListeners) {
             listener.onChange(getClusterProperties());
@@ -1136,7 +1125,6 @@
           return;
         } catch (KeeperException.NoNodeException e) {
           this.clusterProperties = Collections.emptyMap();
-          this.clusterPropsVersion = -1;
           log.debug("Loaded empty cluster properties");
           // set an exists watch, and if the node has been created since the last call,
           // read the data again
@@ -1149,10 +1137,6 @@
     }
   }
 
-  public int getClusterPropsVersion() {
-    return clusterPropsVersion;
-  }
-
   /**
    * Get collection properties for a given collection. If the collection is watched, simply return it from the cache,
    * otherwise fetch it directly from zookeeper. This is a convenience for {@code getCollectionProperties(collection,0)}
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
index d0c85a8..70ea6ed 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
@@ -293,7 +293,8 @@
 
   String JAVABIN_MIME = "application/javabin";
 
-  String PACKAGE = "package";
+  String FILE = "file";
+  String FILES = "files";
 
 }
 
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
index a2f1563..088882a 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/ShardParams.java
@@ -41,7 +41,10 @@
   
   /** The requested URL for this shard */
   String SHARD_URL = "shard.url";
-  
+
+  /** The requested shard name */
+  String SHARD_NAME = "shard.name";
+
   /** The Request Handler for shard requests */
   String SHARDS_QT = "shards.qt";
   
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
index 3804f78..277324a 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
@@ -28,7 +28,6 @@
 import java.util.Map;
 import java.util.Set;
 
-import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.noggit.JSONParser;
 import org.noggit.ObjectBuilder;
@@ -39,7 +38,7 @@
 import static org.apache.solr.common.util.StrUtils.formatString;
 import static org.apache.solr.common.util.Utils.toJSON;
 
-public class CommandOperation implements MapWriter {
+public class CommandOperation {
   public final String name;
   private Object commandData;//this is most often a map
   private List<String> errors = new ArrayList<>();
@@ -387,10 +386,4 @@
     if (o == null) return null;
     return getInt(name, null);
   }
-
-  @Override
-  public void writeMap(EntryWriter ew) throws IOException {
-    ew.put(name, commandData);
-    ew.putIfNotNull("errors", errors);
-  }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java
index e5bad27..a053a18 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java
@@ -72,7 +72,6 @@
   }
 
   public static void shutdownAndAwaitTermination(ExecutorService pool) {
-    if(pool == null) return;
     pool.shutdown(); // Disable new tasks from being submitted
     awaitTermination(pool);
   }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java b/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java
index 1d64834..742c59d 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/PathTrie.java
@@ -26,24 +26,25 @@
 
 import static java.util.Collections.emptyList;
 
-/**A utility class to efficiently parse/store/lookup hierarchical paths which are templatized
+/**
+ * A utility class to efficiently parse/store/lookup hierarchical paths which are templatized
  * like /collections/{collection}/shards/{shard}/{replica}
  */
 public class PathTrie<T> {
   private final Set<String> reserved = new HashSet<>();
   Node root = new Node(emptyList(), null);
 
-  public PathTrie() { }
+  public PathTrie() {
+  }
 
   public PathTrie(Set<String> reserved) {
     this.reserved.addAll(reserved);
   }
 
 
-
   public void insert(String path, Map<String, String> replacements, T o) {
     List<String> parts = getPathSegments(path);
-    insert(parts,replacements, o);
+    insert(parts, replacements, o);
   }
 
   public void insert(List<String> parts, Map<String, String> replacements, T o) {
@@ -122,6 +123,9 @@
     private synchronized void insert(List<String> path, T o) {
       String part = path.get(0);
       Node matchedChild = null;
+      if ("*".equals(name)) {
+        return;
+      }
       if (children == null) children = new ConcurrentHashMap<>();
 
       String varName = templateName(part);
@@ -169,9 +173,8 @@
     }
 
     /**
-     *
-     * @param pathSegments pieces in the url /a/b/c has pieces as 'a' , 'b' , 'c'
-     * @param index current index of the pieces that we are looking at in /a/b/c 0='a' and 1='b'
+     * @param pathSegments      pieces in the url /a/b/c has pieces as 'a' , 'b' , 'c'
+     * @param index             current index of the pieces that we are looking at in /a/b/c 0='a' and 1='b'
      * @param templateVariables The mapping of template variable to its value
      * @param availableSubPaths If not null , available sub paths will be returned in this set
      */
@@ -179,13 +182,36 @@
       if (templateName != null) templateVariables.put(templateName, pathSegments.get(index - 1));
       if (pathSegments.size() < index + 1) {
         findAvailableChildren("", availableSubPaths);
+        if (obj == null) {//this is not a leaf node
+          Node n = children.get("*");
+          if (n != null) {
+            return n.obj;
+          }
+
+        }
         return obj;
       }
       String piece = pathSegments.get(index);
-      if (children == null) return null;
+      if (children == null) {
+        return null;
+      }
       Node n = children.get(piece);
       if (n == null && !reserved.contains(piece)) n = children.get("");
-      if (n == null) return null;
+      if (n == null) {
+        n = children.get("*");
+        if (n != null) {
+          StringBuffer sb = new StringBuffer();
+          for (int i = index; i < pathSegments.size(); i++) {
+            sb.append("/").append(pathSegments.get(i));
+          }
+          templateVariables.put("*", sb.toString());
+          return n.obj;
+
+        }
+      }
+      if (n == null) {
+        return null;
+      }
       return n.lookup(pathSegments, index + 1, templateVariables, availableSubPaths);
     }
   }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java
index 9a68c3b..d153657 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java
@@ -30,8 +30,8 @@
  *
  */
 public class StrUtils {
-  public static final char[] HEX_DIGITS = { '0', '1', '2', '3', '4', '5', '6',
-      '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f' };
+  public static final char[] HEX_DIGITS = {'0', '1', '2', '3', '4', '5', '6',
+      '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
 
   public static List<String> splitSmart(String s, char separator) {
     ArrayList<String> lst = new ArrayList<>(4);
@@ -41,17 +41,18 @@
   }
 
   static final String DELIM_CHARS = "/:;.,%#";
-  public static List<String> split(String s, char sep){
-    if(DELIM_CHARS.indexOf(s.charAt(0)) >-1){
-     sep = s.charAt(0);
+
+  public static List<String> split(String s, char sep) {
+    if (DELIM_CHARS.indexOf(s.charAt(0)) > -1) {
+      sep = s.charAt(0);
     }
-    return splitSmart(s,sep, true);
+    return splitSmart(s, sep, true);
 
   }
 
   public static List<String> splitSmart(String s, char separator, boolean trimEmpty) {
     List<String> l = splitSmart(s, separator);
-    if(trimEmpty){
+    if (trimEmpty) {
       if (l.size() > 0 && l.get(0).isEmpty()) l.remove(0);
     }
     return l;
@@ -63,77 +64,88 @@
    * outside strings.
    */
   public static void splitSmart(String s, char separator, List<String> lst) {
-    int pos=0, start=0, end=s.length();
-    char inString=0;
-    char ch=0;
+    int pos = 0, start = 0, end = s.length();
+    char inString = 0;
+    char ch = 0;
     while (pos < end) {
-      char prevChar=ch;
+      char prevChar = ch;
       ch = s.charAt(pos++);
-      if (ch=='\\') {    // skip escaped chars
+      if (ch == '\\') {    // skip escaped chars
         pos++;
-      } else if (inString != 0 && ch==inString) {
-        inString=0;
-      } else if (ch=='\'' || ch=='"') {
+      } else if (inString != 0 && ch == inString) {
+        inString = 0;
+      } else if (ch == '\'' || ch == '"') {
         // If char is directly preceeded by a number or letter
         // then don't treat it as the start of a string.
         // Examples: 50" TV, or can't
         if (!Character.isLetterOrDigit(prevChar)) {
-          inString=ch;
+          inString = ch;
         }
-      } else if (ch==separator && inString==0) {
-        lst.add(s.substring(start,pos-1));
-        start=pos;
+      } else if (ch == separator && inString == 0) {
+        lst.add(s.substring(start, pos - 1));
+        start = pos;
       }
     }
     if (start < end) {
-      lst.add(s.substring(start,end));
+      lst.add(s.substring(start, end));
     }
 
     /***
-    if (SolrCore.log.isLoggable(Level.FINEST)) {
-      SolrCore.log.trace("splitCommand=" + lst);
-    }
-    ***/
+     if (SolrCore.log.isLoggable(Level.FINEST)) {
+     SolrCore.log.trace("splitCommand=" + lst);
+     }
+     ***/
 
   }
 
-  /** Splits a backslash escaped string on the separator.
+  /**
+   * Splits a backslash escaped string on the separator.
    * <p>
    * Current backslash escaping supported:
    * <br> \n \t \r \b \f are escaped the same as a Java String
    * <br> Other characters following a backslash are produced verbatim (\c =&gt; c)
    *
-   * @param s  the string to split
+   * @param s         the string to split
    * @param separator the separator to split on
-   * @param decode decode backslash escaping
+   * @param decode    decode backslash escaping
    * @return not null
    */
   public static List<String> splitSmart(String s, String separator, boolean decode) {
     ArrayList<String> lst = new ArrayList<>(2);
     StringBuilder sb = new StringBuilder();
-    int pos=0, end=s.length();
+    int pos = 0, end = s.length();
     while (pos < end) {
-      if (s.startsWith(separator,pos)) {
+      if (s.startsWith(separator, pos)) {
         if (sb.length() > 0) {
           lst.add(sb.toString());
-          sb=new StringBuilder();
+          sb = new StringBuilder();
         }
-        pos+=separator.length();
+        pos += separator.length();
         continue;
       }
 
       char ch = s.charAt(pos++);
-      if (ch=='\\') {
+      if (ch == '\\') {
         if (!decode) sb.append(ch);
-        if (pos>=end) break;  // ERROR, or let it go?
+        if (pos >= end) break;  // ERROR, or let it go?
         ch = s.charAt(pos++);
         if (decode) {
-          switch(ch) {
-            case 'n' : ch='\n'; break;
-            case 't' : ch='\t'; break;
-            case 'r' : ch='\r'; break;
-            case 'b' : ch='\b'; break;
-            case 'f' : ch='\f'; break;
+          switch (ch) {
+            case 'n':
+              ch = '\n';
+              break;
+            case 't':
+              ch = '\t';
+              break;
+            case 'r':
+              ch = '\r';
+              break;
+            case 'b':
+              ch = '\b';
+              break;
+            case 'f':
+              ch = '\f';
+              break;
           }
         }
       }
@@ -157,7 +169,7 @@
    */
   public static List<String> splitFileNames(String fileNames) {
     if (fileNames == null)
-      return Collections.emptyList();
+      return Collections.<String>emptyList();
 
     List<String> result = new ArrayList<>();
     for (String file : fileNames.split("(?<!\\\\),")) {
@@ -167,14 +179,15 @@
     return result;
   }
 
-  /** 
-   * Creates a backslash escaped string, joining all the items. 
+  /**
+   * Creates a backslash escaped string, joining all the items.
+   *
    * @see #escapeTextWithSeparator
    */
   public static String join(Collection<?> items, char separator) {
     if (items == null) return "";
     StringBuilder sb = new StringBuilder(items.size() << 3);
-    boolean first=true;
+    boolean first = true;
     for (Object o : items) {
       String item = String.valueOf(o);
       if (first) {
@@ -188,32 +201,41 @@
   }
 
 
-
   public static List<String> splitWS(String s, boolean decode) {
     ArrayList<String> lst = new ArrayList<>(2);
     StringBuilder sb = new StringBuilder();
-    int pos=0, end=s.length();
+    int pos = 0, end = s.length();
     while (pos < end) {
       char ch = s.charAt(pos++);
       if (Character.isWhitespace(ch)) {
         if (sb.length() > 0) {
           lst.add(sb.toString());
-          sb=new StringBuilder();
+          sb = new StringBuilder();
         }
         continue;
       }
 
-      if (ch=='\\') {
+      if (ch == '\\') {
         if (!decode) sb.append(ch);
-        if (pos>=end) break;  // ERROR, or let it go?
+        if (pos >= end) break;  // ERROR, or let it go?
         ch = s.charAt(pos++);
         if (decode) {
-          switch(ch) {
-            case 'n' : ch='\n'; break;
-            case 't' : ch='\t'; break;
-            case 'r' : ch='\r'; break;
-            case 'b' : ch='\b'; break;
-            case 'f' : ch='\f'; break;
+          switch (ch) {
+            case 'n':
+              ch = '\n';
+              break;
+            case 't':
+              ch = '\t';
+              break;
+            case 'r':
+              ch = '\r';
+              break;
+            case 'b':
+              ch = '\b';
+              break;
+            case 'f':
+              ch = '\f';
+              break;
           }
         }
       }
@@ -237,46 +259,48 @@
   }
 
 
-
-  /** Return if a string starts with '1', 't', or 'T'
-   *  and return false otherwise.
+  /**
+   * Return if a string starts with '1', 't', or 'T'
+   * and return false otherwise.
    */
   public static boolean parseBoolean(String s) {
-    char ch = s.length()>0 ? s.charAt(0) : 0;
-    return (ch=='1' || ch=='t' || ch=='T');
+    char ch = s.length() > 0 ? s.charAt(0) : 0;
+    return (ch == '1' || ch == 't' || ch == 'T');
   }
-  
-  /** how to transform a String into a boolean... more flexible than
+
+  /**
+   * how to transform a String into a boolean... more flexible than
    * Boolean.parseBoolean() to enable easier integration with html forms.
    */
   public static boolean parseBool(String s) {
-    if( s != null ) {
-      if( s.startsWith("true") || s.startsWith("on") || s.startsWith("yes") ) {
+    if (s != null) {
+      if (s.startsWith("true") || s.startsWith("on") || s.startsWith("yes")) {
         return true;
       }
-      if( s.startsWith("false") || s.startsWith("off") || s.equals("no") ) {
+      if (s.startsWith("false") || s.startsWith("off") || s.equals("no")) {
         return false;
       }
     }
-    throw new SolrException( SolrException.ErrorCode.BAD_REQUEST, "invalid boolean value: "+s );
+    throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "invalid boolean value: " + s);
   }
 
   /**
    * {@link NullPointerException} and {@link SolrException} free version of {@link #parseBool(String)}
+   *
    * @return parsed boolean value (or def, if s is null or invalid)
    */
   public static boolean parseBool(String s, boolean def) {
-    if( s != null ) {
-      if( s.startsWith("true") || s.startsWith("on") || s.startsWith("yes") ) {
+    if (s != null) {
+      if (s.startsWith("true") || s.startsWith("on") || s.startsWith("yes")) {
         return true;
       }
-      if( s.startsWith("false") || s.startsWith("off") || s.equals("no") ) {
+      if (s.startsWith("false") || s.startsWith("off") || s.equals("no")) {
         return false;
       }
     }
     return def;
   }
-  
+
   /**
    * URLEncodes a value, replacing only enough chars so that
    * the URL may be unambiguously pasted back into a browser.
@@ -285,7 +309,7 @@
    * &amp;,=,%,+,space are encoded.
    */
   public static void partialURLEncodeVal(Appendable dest, String val) throws IOException {
-    for (int i=0; i<val.length(); i++) {
+    for (int i = 0; i < val.length(); i++) {
       char ch = val.charAt(i);
       if (ch < 32) {
         dest.append('%');
@@ -293,46 +317,60 @@
         dest.append(Integer.toHexString(ch));
       } else {
         switch (ch) {
-          case ' ': dest.append('+'); break;
-          case '&': dest.append("%26"); break;
-          case '%': dest.append("%25"); break;
-          case '=': dest.append("%3D"); break;
-          case '+': dest.append("%2B"); break;
-          default : dest.append(ch); break;
+          case ' ':
+            dest.append('+');
+            break;
+          case '&':
+            dest.append("%26");
+            break;
+          case '%':
+            dest.append("%25");
+            break;
+          case '=':
+            dest.append("%3D");
+            break;
+          case '+':
+            dest.append("%2B");
+            break;
+          default:
+            dest.append(ch);
+            break;
         }
       }
     }
   }
 
-  /** 
+  /**
    * Creates a new copy of the string with the separator backslash escaped.
+   *
    * @see #join
    */
   public static String escapeTextWithSeparator(String item, char separator) {
     StringBuilder sb = new StringBuilder(item.length() * 2);
     appendEscapedTextToBuilder(sb, item, separator);
     return sb.toString();
-  }  
+  }
 
   /**
-   * writes chars from item to out, backslash escaping as needed based on separator -- 
+   * writes chars from item to out, backslash escaping as needed based on separator --
    * but does not append the separator itself
    */
-  public static void appendEscapedTextToBuilder(StringBuilder out, 
-                                                 String item, 
-                                                 char separator) {
+  public static void appendEscapedTextToBuilder(StringBuilder out,
+                                                String item,
+                                                char separator) {
     for (int i = 0; i < item.length(); i++) {
       char ch = item.charAt(i);
-      if (ch == '\\' || ch == separator) { 
+      if (ch == '\\' || ch == separator) {
         out.append('\\');
       }
       out.append(ch);
     }
   }
 
-  /**Format using MesssageFormat but with the ROOT locale
+  /**
+   * Format using MesssageFormat but with the ROOT locale
    */
-  public static String formatString(String pattern, Object... args)  {
+  public static String formatString(String pattern, Object... args) {
     return new MessageFormat(pattern, Locale.ROOT).format(args);
   }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
index 4eb3a3c..db6ef37 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
@@ -28,6 +28,7 @@
 import java.lang.invoke.MethodHandles;
 import java.net.URL;
 import java.net.URLDecoder;
+import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
 import java.util.AbstractMap;
@@ -53,6 +54,9 @@
 import java.util.regex.Pattern;
 
 import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpGet;
 import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
@@ -100,18 +104,21 @@
     return getDeepCopy(map, maxDepth, mutable, false);
   }
 
-  public static final Function<JSONParser, ObjectBuilder> MAPWRITEROBJBUILDER = jsonParser -> {
-    try {
-      return new ObjectBuilder(jsonParser) {
-        @Override
-        public Object newObject() {
-          return new LinkedHashMapWriter();
-        }
-      };
-    } catch (IOException e) {
-      throw new RuntimeException(e);
+  public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) {
+    if (map == null) return null;
+    if (maxDepth < 1) return map;
+    Map copy;
+    if (sorted) {
+      copy = new TreeMap();
+    } else {
+      copy = map instanceof LinkedHashMap ? new LinkedHashMap(map.size()) : new HashMap(map.size());
     }
-  };
+    for (Object o : map.entrySet()) {
+      Map.Entry e = (Map.Entry) o;
+      copy.put(e.getKey(), makeDeepCopy(e.getValue(), maxDepth, mutable, sorted));
+    }
+    return mutable ? copy : Collections.unmodifiableMap(copy);
+  }
 
   public static void forEachMapEntry(Object o, String path, BiConsumer fun) {
     Object val = Utils.getObjectByPath(o, false, path);
@@ -141,40 +148,6 @@
       ((Map) o).forEach((k, v) -> fun.accept(k, v));
     }
   }
-  public static final Function<JSONParser, ObjectBuilder> MAPOBJBUILDER = jsonParser -> {
-    try {
-      return new ObjectBuilder(jsonParser) {
-        @Override
-        public Object newObject() {
-          return new HashMap();
-        }
-      };
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  };
-  public static final Pattern ARRAY_ELEMENT_INDEX = Pattern
-      .compile("(\\S*?)\\[([-]?\\d+)\\]");
-
-  public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) {
-    if (map == null) return null;
-    if (maxDepth < 1) return map;
-    Map copy;
-    if (sorted) {
-      copy = new TreeMap();
-    } else {
-      copy = map instanceof LinkedHashMap ? new LinkedHashMap(map.size()) : new HashMap(map.size());
-    }
-    for (Object o : map.entrySet()) {
-      Map.Entry e = (Map.Entry) o;
-      copy.put(e.getKey(), makeDeepCopy(e.getValue(), maxDepth, mutable, sorted));
-    }
-    return mutable ? copy : Collections.unmodifiableMap(copy);
-  }
-
-  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
-    return getDeepCopy(c, maxDepth, mutable, false);
-  }
 
   private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, boolean sorted) {
     if (v instanceof MapWriter && maxDepth > 1) {
@@ -194,6 +167,29 @@
     return v;
   }
 
+  public static InputStream toJavabin(Object o) throws IOException {
+    try (final JavaBinCodec jbc = new JavaBinCodec()) {
+      BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS();
+      jbc.marshal(o, baos);
+      return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(), 0, baos.size()));
+    }
+  }
+
+  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
+    return getDeepCopy(c, maxDepth, mutable, false);
+  }
+
+  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) {
+    if (c == null || maxDepth < 1) return c;
+    Collection result = c instanceof Set ?
+        (sorted ? new TreeSet() : new HashSet()) : new ArrayList();
+    for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted));
+    if (sorted && (result instanceof List)) {
+      Collections.sort((List) result);
+    }
+    return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result);
+  }
+
   public static void writeJson(Object o, OutputStream os, boolean indent) throws IOException {
     writeJson(o, new OutputStreamWriter(os, UTF_8), indent)
         .flush();
@@ -207,12 +203,35 @@
     return writer;
   }
 
-  public static InputStream toJavabin(Object o) throws IOException {
-    try (final JavaBinCodec jbc = new JavaBinCodec()) {
-      BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS();
-      jbc.marshal(o, baos);
-      return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(), 0, baos.size()));
+  private static class MapWriterJSONWriter extends JSONWriter {
+
+    public MapWriterJSONWriter(CharArr out, int indentSize) {
+      super(out, indentSize);
     }
+
+    @Override
+    public void handleUnknownClass(Object o) {
+      if (o instanceof MapWriter) {
+        Map m = ((MapWriter) o).toMap(new LinkedHashMap<>());
+        write(m);
+      } else {
+        super.handleUnknownClass(o);
+      }
+    }
+  }
+
+  public static byte[] toJSON(Object o) {
+    if (o == null) return new byte[0];
+    CharArr out = new CharArr();
+    if (!(o instanceof List) && !(o instanceof Map)) {
+      if (o instanceof MapWriter) {
+        o = ((MapWriter) o).toMap(new LinkedHashMap<>());
+      } else if (o instanceof IteratorWriter) {
+        o = ((IteratorWriter) o).toList(new ArrayList<>());
+      }
+    }
+    new MapWriterJSONWriter(out, 2).write(o); // indentation by default
+    return toUTF8(out);
   }
 
   public static String toJSONString(Object o) {
@@ -259,29 +278,16 @@
     return propMap;
   }
 
-  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) {
-    if (c == null || maxDepth < 1) return c;
-    Collection result = c instanceof Set ?
-        (sorted ? new TreeSet() : new HashSet()) : new ArrayList();
-    for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted));
-    if (sorted && (result instanceof List)) {
-      Collections.sort((List) result);
-    }
-    return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result);
+  public static Object fromJSON(InputStream is) {
+    return fromJSON(new InputStreamReader(is, UTF_8));
   }
 
-  public static byte[] toJSON(Object o) {
-    if (o == null) return new byte[0];
-    CharArr out = new CharArr();
-    if (!(o instanceof List) && !(o instanceof Map)) {
-      if (o instanceof MapWriter) {
-        o = ((MapWriter) o).toMap(new LinkedHashMap<>());
-      } else if (o instanceof IteratorWriter) {
-        o = ((IteratorWriter) o).toList(new ArrayList<>());
-      }
+  public static Object fromJSON(Reader is) {
+    try {
+      return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal();
+    } catch (IOException e) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
     }
-    new MapWriterJSONWriter(out, 2).write(o); // indentation by default
-    return toUTF8(out);
   }
 
 
@@ -292,14 +298,35 @@
       throw new RuntimeException(e);
     }
   };
-
-  public static Object fromJSON(InputStream is) {
-    return fromJSON(new InputStreamReader(is, UTF_8));
-  }
-
-  public static Object fromJSON(Reader is) {
+  public static final Function<JSONParser, ObjectBuilder> MAPWRITEROBJBUILDER = jsonParser -> {
     try {
-      return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal();
+      return new ObjectBuilder(jsonParser) {
+        @Override
+        public Object newObject() {
+          return new LinkedHashMapWriter();
+        }
+      };
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  };
+
+  public static final Function<JSONParser, ObjectBuilder> MAPOBJBUILDER = jsonParser -> {
+    try {
+      return new ObjectBuilder(jsonParser) {
+        @Override
+        public Object newObject() {
+          return new HashMap();
+        }
+      };
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  };
+
+  public static Object fromJSON(InputStream is, Function<JSONParser, ObjectBuilder> objBuilderProvider) {
+    try {
+      return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal();
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
     }
@@ -318,14 +345,6 @@
     }
   }
 
-  public static Object fromJSON(InputStream is, Function<JSONParser, ObjectBuilder> objBuilderProvider) {
-    try {
-      return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal();
-    } catch (IOException e) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
-    }
-  }
-
   public static JSONParser getJSONParser(Reader reader) {
     JSONParser parser = new JSONParser(reader);
     parser.setFlags(parser.getFlags() |
@@ -350,10 +369,10 @@
 
   public static boolean setObjectByPath(Object root, String hierarchy, Object value) {
     List<String> parts = StrUtils.splitSmart(hierarchy, '/', true);
-    return setObjectByPath(root, parts, value, true);
+    return setObjectByPath(root, parts, value);
   }
 
-  public static boolean setObjectByPath(Object root, List<String> hierarchy, Object value, boolean insertMissing) {
+  public static boolean setObjectByPath(Object root, List<String> hierarchy, Object value) {
     if (root == null) return false;
     if (!isMapLike(root)) throw new RuntimeException("must be a Map or NamedList");
     Object obj = root;
@@ -369,10 +388,7 @@
       }
       if (i < hierarchy.size() - 1) {
         Object o = getVal(obj, s, -1);
-        if (o == null) {
-          if (insertMissing) insertItem(o = new LinkedHashMap<>(), obj, s);
-          else return false;
-        }
+        if (o == null) return false;
         if (idx > -1) {
           List l = (List) o;
           o = idx < l.size() ? l.get(idx) : null;
@@ -381,7 +397,14 @@
         obj = o;
       } else {
         if (idx == -2) {
-          insertItem(value, obj, s);
+          if (obj instanceof NamedList) {
+            NamedList namedList = (NamedList) obj;
+            int location = namedList.indexOf(s, 0);
+            if (location == -1) namedList.add(s, value);
+            else namedList.setVal(location, value);
+          } else if (obj instanceof Map) {
+            ((Map) obj).put(s, value);
+          }
           return true;
         } else {
           Object v = getVal(obj, s, -1);
@@ -405,16 +428,6 @@
 
   }
 
-  private static void insertItem(Object value, Object container, String name) {
-    if (container instanceof NamedList) {
-      NamedList namedList = (NamedList) container;
-      int location = namedList.indexOf(name, 0);
-      if (location == -1) namedList.add(name, value);
-      else namedList.setVal(location, value);
-    } else if (container instanceof Map) {
-      ((Map) container).put(name, value);
-    }
-  }
 
   public static Object getObjectByPath(Object root, boolean onlyPrimitive, List<String> hierarchy) {
     if (root == null) return null;
@@ -565,6 +578,17 @@
     }
   }
 
+  public static Map<String, Object> getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException {
+    VersionedData data = null;
+    try {
+      data = distribStateManager.getData(path);
+    } catch (KeeperException.NoNodeException | NoSuchElementException e) {
+      return Collections.emptyMap();
+    }
+    if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap();
+    return (Map<String, Object>) Utils.fromJSON(data.getData());
+  }
+
   /**
    * Assumes data in ZooKeeper is a JSON string, deserializes it and returns as a Map
    *
@@ -585,20 +609,37 @@
     return Collections.emptyMap();
   }
 
-  public static Map<String, Object> getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException {
-    VersionedData data = null;
-    try {
-      data = distribStateManager.getData(path);
-    } catch (KeeperException.NoNodeException | NoSuchElementException e) {
-      return Collections.emptyMap();
+  public static final Pattern ARRAY_ELEMENT_INDEX = Pattern
+      .compile("(\\S*?)\\[([-]?\\d+)\\]");
+
+  public static SpecProvider getSpec(final String name) {
+    return () -> {
+      return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION);
+    };
+  }
+
+  public static String parseMetricsReplicaName(String collectionName, String coreName) {
+    if (collectionName == null || !coreName.startsWith(collectionName)) {
+      return null;
+    } else {
+      // split "collection1_shard1_1_replica1" into parts
+      if (coreName.length() > collectionName.length()) {
+        String str = coreName.substring(collectionName.length() + 1);
+        int pos = str.lastIndexOf("_replica");
+        if (pos == -1) { // ?? no _replicaN part ??
+          return str;
+        } else {
+          return str.substring(pos + 1);
+        }
+      } else {
+        return null;
+      }
     }
-    if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap();
-    return (Map<String, Object>) Utils.fromJSON(data.getData());
   }
 
   /**
    * Applies one json over other. The 'input' is applied over the sink
-   * The values in input are applied over the values in 'sink' . If a value is 'null'
+   * The values in input isapplied over the values in 'sink' . If a value is 'null'
    * that value is removed from sink
    *
    * @param sink  the original json object to start with. Ensure that this Map is mutable
@@ -639,31 +680,6 @@
     return isModified;
   }
 
-  public static SpecProvider getSpec(final String name) {
-    return () -> {
-      return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION);
-    };
-  }
-
-  public static String parseMetricsReplicaName(String collectionName, String coreName) {
-    if (collectionName == null || !coreName.startsWith(collectionName)) {
-      return null;
-    } else {
-      // split "collection1_shard1_1_replica1" into parts
-      if (coreName.length() > collectionName.length()) {
-        String str = coreName.substring(collectionName.length() + 1);
-        int pos = str.lastIndexOf("_replica");
-        if (pos == -1) { // ?? no _replicaN part ??
-          return str;
-        } else {
-          return str.substring(pos + 1);
-        }
-      } else {
-        return null;
-      }
-    }
-  }
-
   public static String getBaseUrlForNodeName(final String nodeName, String urlScheme) {
     final int _offset = nodeName.indexOf("_");
     if (_offset < 0) {
@@ -678,23 +694,6 @@
     }
   }
 
-  private static class MapWriterJSONWriter extends JSONWriter {
-
-    public MapWriterJSONWriter(CharArr out, int indentSize) {
-      super(out, indentSize);
-    }
-
-    @Override
-    public void handleUnknownClass(Object o) {
-      if (o instanceof MapWriter) {
-        Map m = ((MapWriter) o).toMap(new LinkedHashMap<>());
-        write(m);
-      } else {
-        super.handleUnknownClass(o);
-      }
-    }
-  }
-
   public static long time(TimeSource timeSource, TimeUnit unit) {
     return unit.convert(timeSource.getTimeNs(), TimeUnit.NANOSECONDS);
   }
@@ -722,4 +721,68 @@
     return def;
   }
 
+  public interface InputStreamConsumer<T> {
+
+    T accept(InputStream is) throws IOException;
+
+  }
+
+  public static final InputStreamConsumer<?> JAVABINCONSUMER = is -> new JavaBinCodec().unmarshal(is);
+  public static final InputStreamConsumer<?> JSONCONSUMER = is -> Utils.fromJSON(is);
+
+  public static InputStreamConsumer<ByteBuffer> newBytesConsumer(int maxSize) {
+    return is -> {
+      try (BinaryRequestWriter.BAOS bos = new BinaryRequestWriter.BAOS()) {
+        long sz = 0;
+        int next = is.read();
+        while (next > -1) {
+          if (++sz > maxSize) throw new BufferOverflowException();
+          bos.write(next);
+          next = is.read();
+        }
+        bos.flush();
+        return ByteBuffer.wrap(bos.getbuf(), 0, bos.size());
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    };
+
+  }
+
+
+  public static <T> T executeGET(HttpClient client, String url, InputStreamConsumer<T> consumer) throws SolrException {
+    T result = null;
+    HttpGet httpGet = new HttpGet(url);
+    HttpResponse rsp = null;
+    try {
+      rsp = client.execute(httpGet);
+    } catch (IOException e) {
+      log.error("Error in request to url : " + url, e);
+      throw new SolrException(SolrException.ErrorCode.UNKNOWN, "error sending request");
+    }
+    int statusCode = rsp.getStatusLine().getStatusCode();
+    if (statusCode != 200) {
+      try {
+        log.error("Failed a request to: {}, status: {}, body: {}", url, rsp.getStatusLine(), EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8));
+      } catch (IOException e) {
+        log.error("could not print error", e);
+      }
+      throw new SolrException(SolrException.ErrorCode.getErrorCode(statusCode), "Unknown error");
+    }
+    HttpEntity entity = rsp.getEntity();
+    try {
+      InputStream is = entity.getContent();
+      if (consumer != null) {
+
+        result = consumer.accept(is);
+      }
+    } catch (IOException e) {
+      throw new SolrException(SolrException.ErrorCode.UNKNOWN, e);
+    } finally {
+      Utils.consumeFully(entity);
+    }
+    return result;
+  }
+
+
 }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java b/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java
index 28c0019..b537536 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/ValidatingJsonMap.java
@@ -31,6 +31,7 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.solr.common.NavigableObject;
 import org.apache.solr.common.SolrException;
 import org.noggit.JSONParser;
 import org.noggit.ObjectBuilder;
@@ -39,7 +40,7 @@
 import static java.util.Collections.unmodifiableList;
 import static java.util.Collections.unmodifiableSet;
 
-public class ValidatingJsonMap implements Map<String, Object> {
+public class ValidatingJsonMap implements Map<String, Object>, NavigableObject {
 
   private static final String INCLUDE = "#include";
   private static final String RESOURCE_EXTENSION = ".json";
diff --git a/solr/solrj/src/resources/apispec/cluster.Commands.json b/solr/solrj/src/resources/apispec/cluster.Commands.json
index af7c19d..069cd1d 100644
--- a/solr/solrj/src/resources/apispec/cluster.Commands.json
+++ b/solr/solrj/src/resources/apispec/cluster.Commands.json
@@ -169,47 +169,6 @@
       "required": [
         "name"
       ]
-    },
-    "add-package": {
-      "documentation": "",
-      "description" : "Add a package to the classpath",
-      "#include": "cluster.Commands.runtimelib.properties"
-    },
-    "update-package": {
-      "documentation": "",
-      "description" : "Update the jar details",
-      "#include": "cluster.Commands.runtimelib.properties"
-    },
-    "delete-package": {
-      "documentation": "",
-      "description" : "delete a lib",
-      "type": "string"
-    },
-    "add-requesthandler": {
-      "type": "object",
-      "documentation": "",
-      "description" : "Create a node level request handler",
-      "properties": {
-        "name": {
-          "type": "string",
-          "description": "Name of the request handler. This is the path"
-        },
-        "class": {
-          "type": "string",
-          "description": "The class name"
-        },
-        "package" : {
-          "type": "string",
-          "description": " The package from where the plugin can be loaded from"
-        }
-      },
-      "required": ["name", "class"],
-      "additionalProperties": true
-    },
-    "delete-requesthandler" : {
-      "description" : "delete a requesthandler",
-      "type": "string"
     }
-
   }
 }
diff --git a/solr/solrj/src/resources/apispec/cluster.Commands.runtimelib.properties.json b/solr/solrj/src/resources/apispec/cluster.Commands.runtimelib.properties.json
deleted file mode 100644
index ab334b5..0000000
--- a/solr/solrj/src/resources/apispec/cluster.Commands.runtimelib.properties.json
+++ /dev/null
@@ -1,23 +0,0 @@
-{
-  "type": "object",
-  "properties": {
-    "name": {
-      "type": "string",
-      "description": "A name for the library"
-    },
-    "url": {
-      "type": "string",
-      "description": "The remote url"
-    },
-    "sha256": {
-      "type": "string",
-      "description": "The sha256 hash of the jar"
-    },
-    "sig": {
-      "type": "string",
-      "description": "the signature of the jar"
-    }
-  },
-  "required" : ["name","url","sha256"]
-
-}
\ No newline at end of file
diff --git a/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json b/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json
index 6ee1498..731c3d8 100644
--- a/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json
+++ b/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json
@@ -10,7 +10,7 @@
       "description": "The request handler class. Class names do not need to be fully qualified if they are included with Solr, so you can abbreviate the name as 'solr.SearchHandler'. Custom or third-party class names may need to be fully qualified, however."
     },
     "runtimeLib": {
-      "type": "string",
+      "type": "boolean",
       "description": "An optional parameter to use a custom .jar file that has been uploaded to Solr's blobstore. This additionally requires that the .jar has also been registered with the 'add-runtimelib' command, which is one of the available commands for the Config API."
     },
     "startup": {
diff --git a/solr/solrj/src/resources/apispec/core.config.Commands.generic.json b/solr/solrj/src/resources/apispec/core.config.Commands.generic.json
index 2ebfdf8..9d2b01d 100644
--- a/solr/solrj/src/resources/apispec/core.config.Commands.generic.json
+++ b/solr/solrj/src/resources/apispec/core.config.Commands.generic.json
@@ -10,7 +10,7 @@
       "description": "The configuration item class. Class names do not need to be fully qualified if they are included with Solr, so you can abbreviate the name as 'solr.SearchHandler'. Custom or third-party class names may need to be fully qualified, however."
    },
     "runtimeLib": {
-      "type": "string",
+      "type": "boolean",
       "description": "An optional parameter to use a custom .jar file that has been uploaded to Solr's blobstore. This additionally requires that the .jar has also been registered with the 'add-runtimelib' command, which is one of the available commands for the Config API."
    }
   },
diff --git a/solr/solrj/src/resources/apispec/core.config.json b/solr/solrj/src/resources/apispec/core.config.json
index 2324821..81e7d54 100644
--- a/solr/solrj/src/resources/apispec/core.config.json
+++ b/solr/solrj/src/resources/apispec/core.config.json
@@ -12,8 +12,7 @@
       "/config/jmx",
       "/config/requestDispatcher",
       "/config/znodeVersion",
-      "/config/{plugin}",
-      "/config/{plugin}/{pluginName}"
+      "/config/{plugin}"
     ]
   }
 }
diff --git a/solr/solrj/src/resources/apispec/node.blob.GET.json b/solr/solrj/src/resources/apispec/node.blob.GET.json
deleted file mode 100644
index 273333e..0000000
--- a/solr/solrj/src/resources/apispec/node.blob.GET.json
+++ /dev/null
@@ -1,11 +0,0 @@
-{
-  "methods": [
-    "GET"
-  ],
-  "url": {
-    "paths": [
-      "/node/blob",
-      "/node/blob/{sha256}"
-    ]
-  }
-}
diff --git a/solr/solrj/src/resources/apispec/node.ext.json b/solr/solrj/src/resources/apispec/node.ext.json
deleted file mode 100644
index 161b2aa..0000000
--- a/solr/solrj/src/resources/apispec/node.ext.json
+++ /dev/null
@@ -1,13 +0,0 @@
-{
-  "methods": [
-    "POST",
-    "GET",
-    "DELETE"
-  ],
-  "url": {
-    "paths": [
-      "/node/ext/{handlerName}",
-      "/node/ext"
-    ]
-  }
-}
diff --git a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java
index 5a64c3f..5d78d06 100644
--- a/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/ref_guide_examples/JsonRequestApiTest.java
@@ -20,6 +20,7 @@
 import java.io.File;
 import java.util.ArrayList;
 import java.util.Collection;
+import java.util.Date;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -34,10 +35,10 @@
 import org.apache.solr.client.solrj.request.json.QueryFacetMap;
 import org.apache.solr.client.solrj.request.json.RangeFacetMap;
 import org.apache.solr.client.solrj.request.json.TermsFacetMap;
-import org.apache.solr.client.solrj.response.json.BucketJsonFacet;
-import org.apache.solr.client.solrj.response.json.NestableJsonFacet;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.client.solrj.response.json.BucketJsonFacet;
+import org.apache.solr.client.solrj.response.json.NestableJsonFacet;
 import org.apache.solr.cloud.SolrCloudTestCase;
 import org.apache.solr.common.SolrDocument;
 import org.apache.solr.common.params.ModifiableSolrParams;
@@ -455,6 +456,7 @@
         .setQuery("memory")
         .withFilter("inStock:true")
         .withStatFacet("avg_price", "avg(price)")
+        .withStatFacet("min_manufacturedate_dt", "min(manufacturedate_dt)")
         .withStatFacet("num_suppliers", "unique(manu_exact)")
         .withStatFacet("median_weight", "percentile(weight,50)");
     QueryResponse queryResponse = request.process(solrClient, COLLECTION_NAME);
@@ -464,9 +466,13 @@
     assertEquals(4, queryResponse.getResults().getNumFound());
     assertEquals(4, queryResponse.getResults().size());
     final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse();
-    assertEquals(146.66, (double) topLevelFacetingData.getStatFacetValue("avg_price"), 0.5);
-    assertEquals(3, topLevelFacetingData.getStatFacetValue("num_suppliers"));
-    assertEquals(352.0, (double) topLevelFacetingData.getStatFacetValue("median_weight"), 0.5);
+    assertEquals(146.66, (double) topLevelFacetingData.getStatValue("avg_price"), 0.5);
+    assertEquals(3, topLevelFacetingData.getStatValue("num_suppliers"));
+    assertEquals(352.0, (double) topLevelFacetingData.getStatValue("median_weight"), 0.5);
+
+    Object val = topLevelFacetingData.getStatValue("min_manufacturedate_dt");
+    assertTrue(val instanceof Date);
+    assertEquals("2006-02-13T15:26:37Z", ((Date)val).toInstant().toString());
   }
 
   @Test
@@ -478,6 +484,7 @@
         .setQuery("*:*")
         .withFilter("price:[1.0 TO *]")
         .withFilter("popularity:[0 TO 10]")
+        .withStatFacet("min_manu_id_s", "min(manu_id_s)")
         .withStatFacet("avg_value", "avg(div(popularity,price))");
     QueryResponse queryResponse = request.process(solrClient, COLLECTION_NAME);
     //end::solrj-json-metrics-facet-simple[]
@@ -486,7 +493,10 @@
     assertEquals(13, queryResponse.getResults().getNumFound());
     assertEquals(10, queryResponse.getResults().size());
     final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse();
-    assertEquals(0.036, (double) topLevelFacetingData.getStatFacetValue("avg_value"), 0.1);
+    assertEquals(0.036, (double) topLevelFacetingData.getStatValue("avg_value"), 0.1);
+    Object val = topLevelFacetingData.getStatValue("min_manu_id_s");
+    assertTrue(val instanceof String);
+    assertEquals("apple", val.toString());
   }
 
   @Test
@@ -511,7 +521,7 @@
     assertEquals(13, queryResponse.getResults().getNumFound());
     assertEquals(10, queryResponse.getResults().size());
     final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse();
-    assertEquals(0.108, (double) topLevelFacetingData.getStatFacetValue("avg_value"), 0.1);
+    assertEquals(0.108, (double) topLevelFacetingData.getStatValue("avg_value"), 0.1);
   }
 
   @Test
@@ -551,7 +561,7 @@
     assertEquals(10, queryResponse.getResults().size());
     final NestableJsonFacet topLevelFacetingData = queryResponse.getJsonFacetingResponse();
     assertEquals(2, topLevelFacetingData.getQueryFacet("high_popularity").getCount());
-    assertEquals(199.5, topLevelFacetingData.getQueryFacet("high_popularity").getStatFacetValue("average_price"));
+    assertEquals(199.5, topLevelFacetingData.getQueryFacet("high_popularity").getStatValue("average_price"));
   }
 
   @Test
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java
index 6a1ab9bd..85279d7 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/MergeIndexesEmbeddedTest.java
@@ -52,7 +52,7 @@
 
   @Override
   protected SolrClient getSolrAdmin() {
-    return new EmbeddedSolrServer(cores, "core0");
+    return new EmbeddedSolrServer(cores, null);
   }
 
   @Override
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java
index 6972d96..30ddfc9 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/embedded/TestSolrProperties.java
@@ -47,13 +47,9 @@
     RuleChain.outerRule(new SystemPropertiesRestoreRule());
 
   protected SolrClient getSolrAdmin() {
-    return new EmbeddedSolrServer(cores, "core0");
+    return new EmbeddedSolrServer(cores, null);
   }
   
-  protected SolrClient getRenamedSolrAdmin() {
-    return new EmbeddedSolrServer(cores, "renamed_core");
-  }
-
   @Test
   public void testProperties() throws Exception {
 
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
index 1c9ba04..a505799 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/impl/CloudSolrClientTest.java
@@ -49,6 +49,7 @@
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.client.solrj.response.RequestStatusState;
 import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.client.solrj.response.SolrPingResponse;
 import org.apache.solr.cloud.AbstractDistribZkTestBase;
 import org.apache.solr.cloud.SolrCloudTestCase;
 import org.apache.solr.common.SolrDocument;
@@ -951,4 +952,16 @@
     log.info("Shards giving the response: " + Arrays.toString(shardAddresses.toArray()));
   }
 
+  @Test
+  public void testPing() throws Exception {
+    final String testCollection = "ping_test";
+    CollectionAdminRequest.createCollection(testCollection, "conf", 2, 1).process(cluster.getSolrClient());
+    cluster.waitForActiveCollection(testCollection, 2, 2);
+    final SolrClient clientUnderTest = getRandomClient();
+
+    final SolrPingResponse response = clientUnderTest.ping(testCollection);
+
+    assertEquals("This should be OK", 0, response.getStatus());
+  }
+
 }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
index 0435ed5..2e42700 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
@@ -57,7 +57,7 @@
       "triangularDistribution", "precision", "minMaxScale", "markovChain", "grandSum",
       "scalarAdd", "scalarSubtract", "scalarMultiply", "scalarDivide", "sumRows",
       "sumColumns", "diff", "corrPValues", "normalizeSum", "geometricDistribution", "olsRegress",
-      "derivative", "spline", "ttest", "pairedTtest", "multiVariateNormalDistribution", "integrate",
+      "derivative", "spline", "ttest", "pairedTtest", "multiVariateNormalDistribution", "integral",
       "density", "mannWhitney", "sumSq", "akima", "lerp", "chiSquareDataSet", "gtestDataSet",
       "termVectors", "getColumnLabels", "getRowLabels", "getAttribute", "kmeans", "getCentroids",
       "getCluster", "topFeatures", "featureSelect", "rowAt", "colAt", "setColumnLabels",
@@ -77,7 +77,7 @@
       "getSupportPoints", "pairSort", "log10", "plist", "recip", "pivot", "ltrim", "rtrim", "export",
       "zplot", "natural", "repeat", "movingMAD", "hashRollup", "noop", "var", "stddev", "recNum", "isNull",
       "notNull", "matches", "projectToBorder", "double", "long", "parseCSV", "parseTSV", "dateTime",
-       "split", "upper", "trim", "lower"};
+       "split", "upper", "trim", "lower", "trunc", "cosine"};
 
   @Test
   public void testLang() {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
index 890d0d3..9cca3f9 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
@@ -229,6 +229,27 @@
     assertEquals(s2, "c-d-hello");
   }
 
+
+  @Test
+  public void testTrunc() throws Exception {
+    String expr = " select(list(tuple(field1=\"abcde\", field2=\"012345\")), trunc(field1, 2) as field3, trunc(field2, 4) as field4)";
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", expr);
+    paramsLoc.set("qt", "/stream");
+
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
+    TupleStream solrStream = new SolrStream(url, paramsLoc);
+
+    StreamContext context = new StreamContext();
+    solrStream.setStreamContext(context);
+    List<Tuple> tuples = getTuples(solrStream);
+    assertEquals(tuples.size(),  1);
+    String s1 = tuples.get(0).getString("field3");
+    assertEquals(s1, "ab");
+    String s2 = tuples.get(0).getString("field4");
+    assertEquals(s2, "0123");
+  }
+
   @Test
   public void testUpperLowerSingle() throws Exception {
     String expr = " select(list(tuple(field1=\"a\", field2=\"C\")), upper(field1) as field3, lower(field2) as field4)";
@@ -249,6 +270,28 @@
     assertEquals(s2, "c");
   }
 
+
+  @Test
+  public void testTruncArray() throws Exception {
+    String expr = " select(list(tuple(field1=array(\"aaaa\",\"bbbb\",\"cccc\"))), trunc(field1, 3) as field2)";
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", expr);
+    paramsLoc.set("qt", "/stream");
+
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
+    TupleStream solrStream = new SolrStream(url, paramsLoc);
+
+    StreamContext context = new StreamContext();
+    solrStream.setStreamContext(context);
+    List<Tuple> tuples = getTuples(solrStream);
+    assertEquals(tuples.size(),  1);
+    List<String> l1 = (List<String>)tuples.get(0).get("field2");
+    assertEquals(l1.get(0), "aaa");
+    assertEquals(l1.get(1), "bbb");
+    assertEquals(l1.get(2), "ccc");
+
+  }
+
   @Test
   public void testUpperLowerArray() throws Exception {
     String expr = " select(list(tuple(field1=array(\"a\",\"b\",\"c\"), field2=array(\"X\",\"Y\",\"Z\"))), upper(field1) as field3, lower(field2) as field4)";
@@ -723,6 +766,27 @@
   }
 
   @Test
+  public void testCosineDistance() throws Exception {
+    String cexpr = "let(echo=true, " +
+        "a=array(1,2,3,4)," +
+        "b=array(10, 20, 30, 45), " +
+        "c=distance(a, b, cosine()), " +
+        ")";
+
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cexpr);
+    paramsLoc.set("qt", "/stream");
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString() + "/" + COLLECTIONORALIAS;
+    TupleStream solrStream = new SolrStream(url, paramsLoc);
+    StreamContext context = new StreamContext();
+    solrStream.setStreamContext(context);
+    List<Tuple> tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 1);
+    Number d = (Number) tuples.get(0).get("c");
+    assertEquals(d.doubleValue(), 0.0017046159, 0.0001);
+  }
+
+  @Test
   public void testDistance() throws Exception {
     String cexpr = "let(echo=true, " +
                        "a=array(1,2,3,4)," +
@@ -1498,58 +1562,24 @@
 
   @Test
   public void testZplot() throws Exception {
-    String cexpr = "let(c=tuple(a=add(1,2), b=add(2,3))," +
-        "               zplot(table=c))";
+
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
+
+
+
+    String cexpr = "let(a=array(1,2,3,4)," +
+        "        b=array(10,11,12,13),"+
+        "        zplot(x=a, y=b))";
 
     ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", cexpr);
     paramsLoc.set("qt", "/stream");
-    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
     TupleStream solrStream = new SolrStream(url, paramsLoc);
     StreamContext context = new StreamContext();
     solrStream.setStreamContext(context);
     List<Tuple> tuples = getTuples(solrStream);
-    assertTrue(tuples.size() == 1);
-    Tuple out = tuples.get(0);
-
-    assertEquals(out.getDouble("a").doubleValue(), 3.0, 0.0);
-    assertEquals(out.getDouble("b").doubleValue(), 5.0, 0.0);
-
-    cexpr = "let(c=list(tuple(a=add(1,2), b=add(2,3)), tuple(a=add(1,3), b=add(2,4)))," +
-        "        zplot(table=c))";
-
-    paramsLoc = new ModifiableSolrParams();
-    paramsLoc.set("expr", cexpr);
-    paramsLoc.set("qt", "/stream");
-    solrStream = new SolrStream(url, paramsLoc);
-    context = new StreamContext();
-    solrStream.setStreamContext(context);
-    tuples = getTuples(solrStream);
-    assertTrue(tuples.size() == 2);
-    out = tuples.get(0);
-
-    assertEquals(out.getDouble("a").doubleValue(), 3.0, 0.0);
-    assertEquals(out.getDouble("b").doubleValue(), 5.0, 0.0);
-
-    out = tuples.get(1);
-
-    assertEquals(out.getDouble("a").doubleValue(), 4.0, 0.0);
-    assertEquals(out.getDouble("b").doubleValue(), 6.0, 0.0);
-
-
-    cexpr = "let(a=array(1,2,3,4)," +
-        "        b=array(10,11,12,13),"+
-        "        zplot(x=a, y=b))";
-
-    paramsLoc = new ModifiableSolrParams();
-    paramsLoc.set("expr", cexpr);
-    paramsLoc.set("qt", "/stream");
-    solrStream = new SolrStream(url, paramsLoc);
-    context = new StreamContext();
-    solrStream.setStreamContext(context);
-    tuples = getTuples(solrStream);
     assertTrue(tuples.size() == 4);
-    out = tuples.get(0);
+    Tuple out = tuples.get(0);
 
     assertEquals(out.getDouble("x").doubleValue(), 1.0, 0.0);
     assertEquals(out.getDouble("y").doubleValue(), 10.0, 0.0);
@@ -1680,6 +1710,152 @@
     assertTrue(clusters.contains("cluster3"));
     assertTrue(clusters.contains("cluster4"));
     assertTrue(clusters.contains("cluster5"));
+
+    cexpr = "let(a=matrix(array(0,1,2,3,4,5,6,7,8,9,10,11), array(10,11,12,13,14,15,16,17,18,19,20,21))," +
+        "        zplot(heat=a))";
+
+    paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cexpr);
+    paramsLoc.set("qt", "/stream");
+    solrStream = new SolrStream(url, paramsLoc);
+    context = new StreamContext();
+    solrStream.setStreamContext(context);
+    tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 24);
+    Tuple tuple = tuples.get(0);
+    String xLabel = tuple.getString("x");
+    String yLabel = tuple.getString("y");
+    Number z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col00");
+    assertEquals(yLabel, "row0");
+    assertEquals(z.longValue(), 0L);
+
+    tuple = tuples.get(1);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col01");
+    assertEquals(yLabel, "row0");
+    assertEquals(z.longValue(), 1L);
+
+    tuple = tuples.get(2);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col02");
+    assertEquals(yLabel, "row0");
+    assertEquals(z.longValue(), 2L);
+
+    tuple = tuples.get(12);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col00");
+    assertEquals(yLabel, "row1");
+    assertEquals(z.longValue(), 10L);
+
+
+    cexpr = "let(a=transpose(matrix(array(0, 1, 2, 3, 4, 5, 6, 7,8,9,10,11), " +
+        "                           array(10,11,12,13,14,15,16,17,18,19,20,21)))," +
+        "        zplot(heat=a))";
+
+    paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cexpr);
+    paramsLoc.set("qt", "/stream");
+    solrStream = new SolrStream(url, paramsLoc);
+    context = new StreamContext();
+    solrStream.setStreamContext(context);
+    tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 24);
+    tuple = tuples.get(0);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col0");
+    assertEquals(yLabel, "row00");
+    assertEquals(z.longValue(), 0L);
+
+    tuple = tuples.get(1);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col1");
+    assertEquals(yLabel, "row00");
+    assertEquals(z.longValue(), 10L);
+
+    tuple = tuples.get(2);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col0");
+    assertEquals(yLabel, "row01");
+    assertEquals(z.longValue(), 1L);
+
+    tuple = tuples.get(12);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "col0");
+    assertEquals(yLabel, "row06");
+    assertEquals(z.longValue(), 6L);
+
+    cexpr = "let(a=matrix(array(0, 1, 2, 3, 4, 5, 6, 7,8,9,10,11), " +
+        "                 array(10,11,12,13,14,15,16,17,18,19,20,21))," +
+        "        b=setRowLabels(a, array(\"blah1\", \"blah2\")),"+
+        "        c=setColumnLabels(b, array(\"rah1\", \"rah2\", \"rah3\", \"rah4\", \"rah5\", \"rah6\", \"rah7\", \"rah8\", \"rah9\", \"rah10\", \"rah11\", \"rah12\")),"+
+        "        zplot(heat=c))";
+
+    paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cexpr);
+    paramsLoc.set("qt", "/stream");
+    solrStream = new SolrStream(url, paramsLoc);
+    context = new StreamContext();
+    solrStream.setStreamContext(context);
+    tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 24);
+    tuple = tuples.get(0);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "rah1");
+    assertEquals(yLabel, "blah1");
+    assertEquals(z.longValue(), 0L);
+
+    tuple = tuples.get(1);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "rah2");
+    assertEquals(yLabel, "blah1");
+    assertEquals(z.longValue(), 1L);
+
+    tuple = tuples.get(2);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "rah3");
+    assertEquals(yLabel, "blah1");
+    assertEquals(z.longValue(), 2L);
+
+    tuple = tuples.get(12);
+    xLabel = tuple.getString("x");
+    yLabel = tuple.getString("y");
+    z = tuple.getLong("z");
+
+    assertEquals(xLabel, "rah1");
+    assertEquals(yLabel, "blah2");
+    assertEquals(z.longValue(), 10L);
   }
 
 
@@ -3343,7 +3519,7 @@
     List<Tuple> tuples = getTuples(solrStream);
     assertTrue(tuples.size() == 1);
     Number cs = (Number)tuples.get(0).get("return-value");
-    assertTrue(cs.doubleValue() == 0.9838197164968291);
+    assertEquals(cs.doubleValue(),0.9838197164968291, .00000001);
   }
 
   @Test
@@ -4085,9 +4261,10 @@
     String cexpr = "let(echo=true, " +
                        "a=sequence(50, 1, 0), " +
                        "b=spline(a), " +
-                       "c=integrate(b, 0, 49), " +
-                       "d=integrate(b, 0, 20), " +
-                       "e=integrate(b, 20, 49))";
+                       "c=integral(b, 0, 49), " +
+                       "d=integral(b, 0, 20), " +
+                       "e=integral(b, 20, 49)," +
+                       "f=integral(b))";
     ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", cexpr);
     paramsLoc.set("qt", "/stream");
@@ -4103,6 +4280,9 @@
     assertEquals(integral.doubleValue(), 20, 0.0);
     integral = (Number)tuples.get(0).get("e");
     assertEquals(integral.doubleValue(), 29, 0.0);
+    List<Number> integrals = (List<Number>)tuples.get(0).get("f");
+    assertEquals(integrals.size(), 50);
+    assertEquals(integrals.get(49).intValue(), 49);
   }
 
   @Test
@@ -4313,7 +4493,8 @@
 
   }
 
-  @Test
+
+    @Test
   public void testLerp() throws Exception {
     String cexpr = "let(echo=true," +
         "    a=array(0,1,2,3,4,5,6,7), " +
@@ -5068,7 +5249,9 @@
                        "f=corr(d), " +
                        "g=corr(d, type=kendalls), " +
                        "h=corr(d, type=spearmans)," +
-                       "i=corrPValues(f))";
+                       "i=corrPValues(f)," +
+        "               j=getRowLabels(f)," +
+        "               k=getColumnLabels(f))";
     ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
     paramsLoc.set("expr", cexpr);
     paramsLoc.set("qt", "/stream");
@@ -5157,6 +5340,20 @@
     assertEquals(row3.get(0).doubleValue(), 0.28548201004998375, 0);
     assertEquals(row3.get(1).doubleValue(), 0.28548201004998375, 0);
     assertEquals(row3.get(2).doubleValue(), 0, 0);
+
+    List<String> rowLabels = (List<String>)tuples.get(0).get("j");
+    assertEquals(rowLabels.size(), 3);
+    assertEquals(rowLabels.get(0), "col0");
+    assertEquals(rowLabels.get(1), "col1");
+    assertEquals(rowLabels.get(2), "col2");
+
+    List<String> colLabels = (List<String>)tuples.get(0).get("k");
+    assertEquals(colLabels.size(), 3);
+    assertEquals(colLabels.get(0), "col0");
+    assertEquals(colLabels.get(1), "col1");
+    assertEquals(colLabels.get(2), "col2");
+
+
   }
 
   @Test
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java
index 76ce4ab..09235bc 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java
@@ -16,6 +16,10 @@
  */
 package org.apache.solr.client.solrj.request;
 
+import static org.hamcrest.CoreMatchers.anyOf;
+import static org.hamcrest.CoreMatchers.equalTo;
+import static org.hamcrest.CoreMatchers.is;
+
 import java.io.File;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -47,10 +51,6 @@
 import org.junit.Test;
 import org.restlet.ext.servlet.ServerServlet;
 
-import static org.hamcrest.CoreMatchers.anyOf;
-import static org.hamcrest.CoreMatchers.equalTo;
-import static org.hamcrest.CoreMatchers.is;
-
 /**
  * Test the functionality (accuracy and failure) of the methods exposed by the classes
  * {@link SchemaRequest} and {@link SchemaResponse}.
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
index 0cf59b7..44247a7 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/TestCoreAdmin.java
@@ -82,7 +82,7 @@
   */
 
   protected SolrClient getSolrAdmin() {
-    return new EmbeddedSolrServer(cores, "core0");
+    return new EmbeddedSolrServer(cores, null);
   }
 
   @Test
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingEmbeddedTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingEmbeddedTest.java
new file mode 100644
index 0000000..606debb
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingEmbeddedTest.java
@@ -0,0 +1,592 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.request.json;
+
+import java.io.File;
+import java.io.OutputStreamWriter;
+import java.io.Writer;
+import java.util.List;
+import java.util.Properties;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.lucene.util.LuceneTestCase;
+import org.apache.solr.EmbeddedSolrServerTestBase;
+import org.apache.solr.SolrTestCaseJ4.SuppressSSL;
+import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
+import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
+import org.apache.solr.client.solrj.request.ContentStreamUpdateRequest;
+import org.apache.solr.client.solrj.response.QueryResponse;
+import org.apache.solr.client.solrj.response.UpdateResponse;
+import org.apache.solr.client.solrj.response.json.BucketJsonFacet;
+import org.apache.solr.client.solrj.response.json.NestableJsonFacet;
+import org.apache.solr.common.SolrDocumentList;
+import org.apache.solr.util.ExternalPaths;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+@SuppressSSL
+public class DirectJsonQueryRequestFacetingEmbeddedTest extends EmbeddedSolrServerTestBase {
+
+  private static final String COLLECTION_NAME = "techproducts";
+  private static final int NUM_TECHPRODUCTS_DOCS = 32;
+  private static final int NUM_IN_STOCK = 17;
+  private static final int NUM_ELECTRONICS = 12;
+  private static final int NUM_CURRENCY = 4;
+  private static final int NUM_MEMORY = 3;
+  private static final int NUM_CORSAIR = 3;
+  private static final int NUM_BELKIN = 2;
+  private static final int NUM_CANON = 2;
+
+  @BeforeClass
+  public static void beforeClass() throws Exception {
+    final String sourceHome = ExternalPaths.SOURCE_HOME;
+
+    final File tempSolrHome = LuceneTestCase.createTempDir().toFile();
+    FileUtils.copyFileToDirectory(new File(sourceHome, "server/solr/solr.xml"), tempSolrHome);
+    final File collectionDir = new File(tempSolrHome, COLLECTION_NAME);
+    FileUtils.forceMkdir(collectionDir);
+    final File configSetDir = new File(sourceHome, "server/solr/configsets/sample_techproducts_configs/conf");
+    FileUtils.copyDirectoryToDirectory(configSetDir, collectionDir);
+
+    final Properties props = new Properties();
+    props.setProperty("name", COLLECTION_NAME);
+
+    try (Writer writer = new OutputStreamWriter(FileUtils.openOutputStream(new File(collectionDir, "core.properties")),
+        "UTF-8");) {
+      props.store(writer, null);
+    }
+
+    final String config = tempSolrHome.getAbsolutePath() + "/" + COLLECTION_NAME + "/conf/solrconfig.xml";
+    final String schema = tempSolrHome.getAbsolutePath() + "/" + COLLECTION_NAME + "/conf/managed-schema";
+    initCore(config, schema, tempSolrHome.getAbsolutePath(), COLLECTION_NAME);
+
+    client = new EmbeddedSolrServer(h.getCoreContainer(), COLLECTION_NAME) {
+      @Override
+      public void close() {
+        // do not close core container
+      }
+    };
+
+    ContentStreamUpdateRequest up = new ContentStreamUpdateRequest("/update");
+    up.setParam("collection", COLLECTION_NAME);
+    up.addFile(getFile("solrj/techproducts.xml"), "application/xml");
+    up.setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true);
+    UpdateResponse updateResponse = up.process(client);
+    assertEquals(0, updateResponse.getStatus());
+  }
+
+  @Test
+  public void testSingleTermsFacet() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'top_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount());
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_cats",
+        new FacetBucket("electronics", NUM_ELECTRONICS),
+        new FacetBucket("currency", NUM_CURRENCY),
+        new FacetBucket("memory", NUM_MEMORY));
+  }
+
+  @Test
+  public void testMultiTermsFacet() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'top_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "    },",
+        "    'top_manufacturers': {",
+        "      'type': 'terms',",
+        "      'field': 'manu_id_s',",
+        "      'limit': 3",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount());
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_cats",
+        new FacetBucket("electronics", NUM_ELECTRONICS),
+        new FacetBucket("currency", NUM_CURRENCY),
+        new FacetBucket("memory", NUM_MEMORY));
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_manufacturers",
+        new FacetBucket("corsair", NUM_CORSAIR),
+        new FacetBucket("belkin", NUM_BELKIN),
+        new FacetBucket("canon", NUM_CANON));
+  }
+
+  @Test
+  public void testSingleRangeFacet() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'prices': {",
+        "      'type': 'range',",
+        "      'field': 'price',",
+        "      'start': 0,",
+        "      'end': 100,",
+        "      'gap': 20",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount());
+    assertHasFacetWithBucketValues(topLevelFacetData, "prices",
+        new FacetBucket(0.0f, 5),
+        new FacetBucket(20.0f, 0),
+        new FacetBucket(40.0f, 0),
+        new FacetBucket(60.0f, 1),
+        new FacetBucket(80.0f, 1));
+  }
+
+  @Test
+  public void testMultiRangeFacet() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'prices': {",
+        "      'type': 'range',",
+        "      'field': 'price',",
+        "      'start': 0,",
+        "      'end': 100,",
+        "      'gap': 20",
+        "    },",
+        "    'shipping_weights': {",
+        "      'type': 'range',",
+        "      'field': 'weight',",
+        "      'start': 0,",
+        "      'end': 200,",
+        "      'gap': 50",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertEquals(NUM_TECHPRODUCTS_DOCS, topLevelFacetData.getCount());
+    assertHasFacetWithBucketValues(topLevelFacetData, "prices",
+        new FacetBucket(0.0f, 5),
+        new FacetBucket(20.0f, 0),
+        new FacetBucket(40.0f, 0),
+        new FacetBucket(60.0f, 1),
+        new FacetBucket(80.0f, 1));
+    assertHasFacetWithBucketValues(topLevelFacetData, "shipping_weights",
+        new FacetBucket(0.0f, 6),
+        new FacetBucket(50.0f, 0),
+        new FacetBucket(100.0f, 0),
+        new FacetBucket(150.0f, 1));
+  }
+
+  @Test
+  public void testSingleStatFacet() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'sum_price': 'sum(price)'",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasStatFacetWithValue(topLevelFacetData, "sum_price", 5251.270030975342);
+  }
+
+  @Test
+  public void testMultiStatFacet() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'sum_price': 'sum(price)',",
+        "    'avg_price': 'avg(price)'",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasStatFacetWithValue(topLevelFacetData, "sum_price", 5251.270030975342);
+    assertHasStatFacetWithValue(topLevelFacetData, "avg_price", 328.20437693595886);
+  }
+
+  @Test
+  public void testMultiFacetsMixedTypes() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'avg_price': 'avg(price)',",
+        "    'top_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasStatFacetWithValue(topLevelFacetData, "avg_price", 328.20437693595886);
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_cats",
+        new FacetBucket("electronics", NUM_ELECTRONICS),
+        new FacetBucket("currency", NUM_CURRENCY),
+        new FacetBucket("memory", NUM_MEMORY));
+  }
+
+  @Test
+  public void testNestedTermsFacet() throws Exception {
+    final String subfacetName = "top_manufacturers_for_cat";
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'top_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "      'facet': {",
+        "        'top_manufacturers_for_cat': {",
+        "          'type': 'terms',",
+        "          'field': 'manu_id_s',",
+        "          'limit': 1",
+        "        }",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    // Test top level facets
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_cats",
+        new FacetBucket("electronics", NUM_ELECTRONICS),
+        new FacetBucket("currency", NUM_CURRENCY),
+        new FacetBucket("memory", NUM_MEMORY));
+    // Test subfacet values for each top-level facet bucket
+    final List<BucketJsonFacet> catBuckets = topLevelFacetData.getBucketBasedFacets("top_cats").getBuckets();
+    assertHasFacetWithBucketValues(catBuckets.get(0), subfacetName, new FacetBucket("corsair", 3));
+    assertHasFacetWithBucketValues(catBuckets.get(1), subfacetName, new FacetBucket("boa", 1));
+    assertHasFacetWithBucketValues(catBuckets.get(2), subfacetName, new FacetBucket("corsair", 3));
+  }
+
+  @Test
+  public void testNestedFacetsOfMixedTypes() throws Exception {
+    final String subfacetName = "avg_price_for_cat";
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'top_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "      'facet': {",
+        "        'avg_price_for_cat': 'avg(price)'",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    // Test top level facets
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_cats",
+        new FacetBucket("electronics", NUM_ELECTRONICS),
+        new FacetBucket("currency", NUM_CURRENCY),
+        new FacetBucket("memory", NUM_MEMORY));
+    // Test subfacet values for each top-level facet bucket
+    final List<BucketJsonFacet> catBuckets = topLevelFacetData.getBucketBasedFacets("top_cats").getBuckets();
+    assertHasStatFacetWithValue(catBuckets.get(0), subfacetName, 252.02909261530095); // electronics
+    assertHasStatFacetWithValue(catBuckets.get(1), subfacetName, 0.0); // currency
+    assertHasStatFacetWithValue(catBuckets.get(2), subfacetName, 129.99499893188477); // memory
+  }
+
+  @Test
+  public void testFacetWithDomainFilteredBySimpleQueryString() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'top_popular_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "      'domain': {",
+        "        'filter': 'popularity:[5 TO 10]'",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_popular_cats",
+        new FacetBucket("electronics", 9),
+        new FacetBucket("graphics card", 2),
+        new FacetBucket("hard drive", 2));
+  }
+
+  @Test
+  public void testFacetWithDomainFilteredByLocalParamsQueryString() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'facet': {",
+        "    'top_popular_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "      'domain': {",
+        "        'filter': '{!lucene df=\"popularity\" v=\"[5 TO 10]\"}'",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_TECHPRODUCTS_DOCS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_popular_cats",
+        new FacetBucket("electronics", 9),
+        new FacetBucket("graphics card", 2),
+        new FacetBucket("hard drive", 2));
+  }
+
+  @Test
+  public void testFacetWithArbitraryDomainFromQueryString() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': 'cat:electronics',",
+        "  'facet': {",
+        "    'top_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 3",
+        "      'domain': {",
+        "        'query': '*:*'",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasFacetWithBucketValues(topLevelFacetData, "top_cats",
+        new FacetBucket("electronics", NUM_ELECTRONICS),
+        new FacetBucket("currency", NUM_CURRENCY),
+        new FacetBucket("memory", NUM_MEMORY));
+  }
+
+  @Test
+  public void testFacetWithArbitraryDomainFromLocalParamsQuery() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': 'cat:electronics',",
+        "  'facet': {",
+        "    'largest_search_cats': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'domain': {",
+        "        'query': '{!lucene df=\"cat\" v=\"search\"}'",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasFacetWithBucketValues(topLevelFacetData, "largest_search_cats",
+        new FacetBucket("search", 2),
+        new FacetBucket("software", 2));
+  }
+
+  @Test
+  public void testFacetWithMultipleSimpleQueryClausesInArbitraryDomain() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': 'cat:electronics',",
+        "  'facet': {",
+        "    'cats_matching_solr': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'domain': {",
+        "        'query': ['cat:search', 'name:Solr']",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasFacetWithBucketValues(topLevelFacetData, "cats_matching_solr",
+        new FacetBucket("search", 1),
+        new FacetBucket("software", 1));
+  }
+
+  @Test
+  public void testFacetWithMultipleLocalParamsQueryClausesInArbitraryDomain() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': 'cat:electronics',",
+        "  'facet': {",
+        "    'cats_matching_solr': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'domain': {",
+        "        'query': ['{!lucene df=\"cat\" v=\"search\"}', '{!lucene df=\"name\" v=\"Solr\"}']",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_ELECTRONICS, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasFacetWithBucketValues(topLevelFacetData, "cats_matching_solr",
+        new FacetBucket("search", 1),
+        new FacetBucket("software", 1));
+  }
+
+  @Test
+  public void testFacetWithDomainWidenedUsingExcludeTagsToIgnoreFilters() throws Exception {
+    final String jsonBody = String.join("\n", "{",
+        "  'query': '*:*',",
+        "  'filter': {'#on_shelf': 'inStock:true'},",
+        "  'facet': {",
+        "    'in_stock_only': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 2",
+        "    }",
+        "    'all': {",
+        "      'type': 'terms',",
+        "      'field': 'cat',",
+        "      'limit': 2,",
+        "      'domain': {",
+        "        'excludeTags': 'on_shelf'",
+        "      }",
+        "    }",
+        "  }",
+        "}");
+    final DirectJsonQueryRequest request = new DirectJsonQueryRequest(jsonBody);
+
+    QueryResponse response = request.process(getSolrClient(), COLLECTION_NAME);
+
+    assertExpectedDocumentsFoundAndReturned(response, NUM_IN_STOCK, 10);
+    final NestableJsonFacet topLevelFacetData = response.getJsonFacetingResponse();
+    assertHasFacetWithBucketValues(topLevelFacetData, "in_stock_only",
+        new FacetBucket("electronics", 8),
+        new FacetBucket("currency", 4));
+    assertHasFacetWithBucketValues(topLevelFacetData, "all",
+        new FacetBucket("electronics", 12),
+        new FacetBucket("currency", 4));
+  }
+
+  private class FacetBucket {
+    private final Object val;
+    private final int count;
+
+    FacetBucket(Object val, int count) {
+      this.val = val;
+      this.count = count;
+    }
+
+    public Object getVal() {
+      return val;
+    }
+
+    public int getCount() {
+      return count;
+    }
+  }
+
+  private void assertHasFacetWithBucketValues(NestableJsonFacet response, String expectedFacetName,
+      FacetBucket... expectedBuckets) {
+    assertTrue("Expected response to have facet with name " + expectedFacetName,
+        response.getBucketBasedFacets(expectedFacetName) != null);
+    final List<BucketJsonFacet> buckets = response.getBucketBasedFacets(expectedFacetName).getBuckets();
+    assertEquals(expectedBuckets.length, buckets.size());
+    for (int i = 0; i < expectedBuckets.length; i++) {
+      final FacetBucket expectedBucket = expectedBuckets[i];
+      final BucketJsonFacet actualBucket = buckets.get(i);
+      assertEquals(expectedBucket.getVal(), actualBucket.getVal());
+      assertEquals(expectedBucket.getCount(), actualBucket.getCount());
+    }
+  }
+
+  private void assertHasStatFacetWithValue(NestableJsonFacet response, String expectedFacetName,
+      Double expectedStatValue) {
+    assertTrue("Expected response to have stat facet named '" + expectedFacetName + "'",
+        response.getStatValue(expectedFacetName) != null);
+    assertEquals(expectedStatValue, response.getStatValue(expectedFacetName));
+  }
+
+  private void assertExpectedDocumentsFoundAndReturned(QueryResponse response, int expectedNumFound,
+      int expectedReturned) {
+    assertEquals(0, response.getStatus());
+    final SolrDocumentList documents = response.getResults();
+    assertEquals(expectedNumFound, documents.getNumFound());
+    assertEquals(expectedReturned, documents.size());
+  }
+}
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java
index d515368..48f13c2 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/DirectJsonQueryRequestFacetingIntegrationTest.java
@@ -545,8 +545,8 @@
 
   private void assertHasStatFacetWithValue(NestableJsonFacet response, String expectedFacetName, Double expectedStatValue) {
     assertTrue("Expected response to have stat facet named '" + expectedFacetName + "'",
-        response.getStatFacetValue(expectedFacetName) != null);
-    assertEquals(expectedStatValue, response.getStatFacetValue(expectedFacetName));
+        response.getStatValue(expectedFacetName) != null);
+    assertEquals(expectedStatValue, response.getStatValue(expectedFacetName));
   }
 
   private void assertExpectedDocumentsFoundAndReturned(QueryResponse response, int expectedNumFound, int expectedReturned) {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java
index 4852c76..291bcba 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/JsonQueryRequestFacetingIntegrationTest.java
@@ -571,8 +571,8 @@
 
   private void assertHasStatFacetWithValue(NestableJsonFacet response, String expectedFacetName, Double expectedStatValue) {
     assertTrue("Expected response to have stat facet named '" + expectedFacetName + "'",
-        response.getStatFacetValue(expectedFacetName) != null);
-    assertEquals(expectedStatValue, response.getStatFacetValue(expectedFacetName));
+        response.getStatValue(expectedFacetName) != null);
+    assertEquals(expectedStatValue, response.getStatValue(expectedFacetName));
   }
 
   private void assertExpectedDocumentsFoundAndReturned(QueryResponse response, int expectedNumFound, int expectedReturned) {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java
index 0d06e59..7028326 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/json/TermsFacetMapTest.java
@@ -55,15 +55,6 @@
   }
 
   @Test
-  public void testRejectsNegativeBucketLimit() {
-    final Throwable thrown = expectThrows(IllegalArgumentException.class, () -> {
-      final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME)
-          .setLimit(-1);
-    });
-    assertThat(thrown.getMessage(), containsString("must be non-negative"));
-  }
-
-  @Test
   public void testStoresBucketLimitWithCorrectKey() {
     final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME)
         .setLimit(3);
@@ -129,9 +120,9 @@
   public void testRejectInvalidMinCount() {
     final Throwable thrown = expectThrows(IllegalArgumentException.class, () -> {
       final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME)
-          .setMinCount(0);
+          .setMinCount(-1);
     });
-    assertThat(thrown.getMessage(), containsString("must be a positive integer"));
+    assertThat(thrown.getMessage(), containsString("must be a non-negative integer"));
   }
 
   @Test
@@ -139,6 +130,8 @@
     final TermsFacetMap termsFacet = new TermsFacetMap(ANY_FIELD_NAME)
         .setMinCount(6);
     assertEquals(6, termsFacet.get("mincount"));
+    termsFacet.setMinCount(0);
+    assertEquals(0, termsFacet.get("mincount"));
   }
 
   @Test
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/NestableJsonFacetTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NestableJsonFacetTest.java
new file mode 100644
index 0000000..3b40726
--- /dev/null
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/NestableJsonFacetTest.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.client.solrj.response;
+
+
+import java.util.Collections;
+
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.response.json.NestableJsonFacet;
+import org.apache.solr.common.util.NamedList;
+import org.junit.Test;
+
+public class NestableJsonFacetTest extends SolrTestCaseJ4 {
+
+  @Test
+  public void testParsing() {
+    NamedList<Object> list = new NamedList<>();
+    list.add("count", 12);
+    NamedList<Object> buckets = new NamedList<Object>() {{
+      add("val", "Nike");
+    }};
+    NamedList<Object> vals = new NamedList<Object>() {{
+      add("numBuckets", 10);
+      add("allBuckets", new NamedList<Object>(){{
+        add("count", 12);
+      }});
+      add("before", new NamedList<Object>(){{
+        add("count", 1);
+      }});
+      add("after", new NamedList<Object>(){{
+        add("count", 2);
+      }});
+      add("between", new NamedList<Object>(){{
+        add("count", 9);
+      }});
+    }};
+    vals.add("buckets", Collections.singletonList(buckets));
+    list.add("test", vals);
+    NestableJsonFacet facet = new NestableJsonFacet(list);
+
+    assertEquals(12L, facet.getCount());
+    assertEquals(9L, facet.getBucketBasedFacets("test").getBetween());
+    list.clear();
+
+    list.add("count", 12L);
+    buckets = new NamedList<Object>() {{
+      add("val", "Nike");
+    }};
+    vals = new NamedList<Object>() {{
+      add("numBuckets", 10L);
+      add("allBuckets", new NamedList<Object>(){{
+        add("count", 12L);
+      }});
+      add("before", new NamedList<Object>(){{
+        add("count", 1L);
+      }});
+      add("after", new NamedList<Object>(){{
+        add("count", 2L);
+      }});
+      add("between", new NamedList<Object>(){{
+        add("count", 9L);
+      }});
+    }};
+    vals.add("buckets", Collections.singletonList(buckets));
+    list.add("test", vals);
+    facet = new NestableJsonFacet(list);
+    assertEquals(12L, facet.getCount());
+    assertEquals(2L, facet.getBucketBasedFacets("test").getAfter());
+  }
+}
diff --git a/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java b/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java
index 147535a..52a661f 100644
--- a/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java
+++ b/solr/solrj/src/test/org/apache/solr/common/util/TestPathTrie.java
@@ -55,6 +55,19 @@
     pathTrie.lookup("/aa",templateValues, subPaths);
     assertEquals(3, subPaths.size());
 
+    pathTrie = new PathTrie<>(ImmutableSet.of("_introspect"));
+    pathTrie.insert("/aa/bb/{cc}/tt/*", emptyMap(), "W");
 
+    templateValues.clear();
+    assertEquals("W" ,pathTrie.lookup("/aa/bb/somepart/tt/hello", templateValues));
+    assertEquals(templateValues.get("*"), "/hello");
+
+    templateValues.clear();
+    assertEquals("W" ,pathTrie.lookup("/aa/bb/somepart/tt", templateValues));
+    assertEquals(templateValues.get("*"), null);
+
+    templateValues.clear();
+    assertEquals("W" ,pathTrie.lookup("/aa/bb/somepart/tt/hello/world/from/solr", templateValues));
+    assertEquals(templateValues.get("*"), "/hello/world/from/solr");
   }
 }
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index 21a91ee..8b5a8f9 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -121,6 +121,7 @@
 import org.apache.solr.common.util.ObjectReleaseTracker;
 import org.apache.solr.common.util.SolrjNamedThreadFactory;
 import org.apache.solr.common.util.SuppressForbidden;
+import org.apache.solr.common.util.TimeSource;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.common.util.XML;
 import org.apache.solr.core.CoreContainer;
@@ -152,6 +153,7 @@
 import org.apache.solr.util.StartupLoggingUtils;
 import org.apache.solr.util.TestHarness;
 import org.apache.solr.util.TestInjection;
+import org.apache.solr.util.TimeOut;
 import org.apache.zookeeper.KeeperException;
 import org.junit.After;
 import org.junit.AfterClass;
@@ -1268,7 +1270,16 @@
     return d;
   }
 
+  /**
+   * Generates the correct SolrParams from an even list of strings.
+   * A string in an even position will represent the name of a parameter, while the following string
+   * at position (i+1) will be the assigned value.
+   *
+   * @param params an even list of strings
+   * @return the ModifiableSolrParams generated from the given list of strings.
+   */
   public static ModifiableSolrParams params(String... params) {
+    if (params.length % 2 != 0) throw new RuntimeException("Params length should be even");
     ModifiableSolrParams msp = new ModifiableSolrParams();
     for (int i=0; i<params.length; i+=2) {
       msp.add(params[i], params[i+1]);
@@ -3116,7 +3127,7 @@
     QueryResponse rsp = client.query(collection, solrQuery);
     long found = rsp.getResults().getNumFound();
 
-    if (rsp.getResults().getNumFound() == expectedDocCount) {
+    if (found == expectedDocCount) {
       return;
     }
 
@@ -3131,9 +3142,17 @@
     // Add the bogus doc
     new UpdateRequest().add(bogus).commit(client, collection);
 
+    // Let's spin until we find the doc.
+    checkUniqueDoc(client, collection, idField, bogusID, true);
+
     // Then remove it, we should be OK now since new searchers have been opened.
     new UpdateRequest().deleteById(bogusID).commit(client, collection);
-    // Let's check again to see if we succeeded
+
+    // Now spin until the doc is gone.
+    checkUniqueDoc(client, collection, idField, bogusID, false);
+
+    // At this point we're absolutely, totally, positive that a new searcher has been opened, so go ahead and check
+    // the actual condition.
     rsp = client.query(collection, solrQuery);
     found = rsp.getResults().getNumFound();
 
@@ -3145,6 +3164,31 @@
     } else if (failAnyway) {
       fail("Solr11035BandAid failAnyway == true, would have successfully repaired the collection: '" + collection
           + "' extra info: '" + tag + "'");
+    } else {
+      log.warn("Solr11035BandAid, repair successful");
     }
   }
+  // Helper for bandaid
+  private static void checkUniqueDoc(SolrClient client, String collection, String idField, String id, boolean shouldBeThere) throws IOException, SolrServerException {
+    TimeOut timeOut = new TimeOut(100, TimeUnit.SECONDS, TimeSource.NANO_TIME);
+    final SolrQuery solrQuery = new SolrQuery(idField + ":" + id);
+
+    while (!timeOut.hasTimedOut()) {
+      QueryResponse rsp = client.query(collection, solrQuery);
+      long found = rsp.getResults().getNumFound();
+      if (shouldBeThere && found == 1) {
+        return;
+      }
+      if (shouldBeThere == false && found == 0) {
+        return;
+      }
+      log.warn("Solr11035BandAid should have succeeded in checkUniqueDoc, shouldBeThere == {}, numFound = {}. Will try again after 250 ms sleep", shouldBeThere, found);
+      try {
+        Thread.sleep(250);
+      } catch (InterruptedException e) {
+        return; // just bail
+      }
+    }
+
+  }
 }
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index 2ef2659..d9de129 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -16,6 +16,8 @@
  */
 package org.apache.solr.cloud;
 
+import static org.apache.solr.common.util.Utils.makeMap;
+
 import java.io.File;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
@@ -106,8 +108,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.util.Utils.makeMap;
-
 /**
  * TODO: we should still test this works as a custom update chain as well as
  * what we test now - the default update chain
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
index 7c6d120..fbb547c 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/MiniSolrCloudCluster.java
@@ -792,7 +792,7 @@
   }
 
   public void waitForJettyToStop(JettySolrRunner runner) throws TimeoutException {
-    log.info("waitForJettyToStop: {}", runner.getNodeName());
+    log.info("waitForJettyToStop: {}", runner.getLocalPort());
     TimeOut timeout = new TimeOut(15, TimeUnit.SECONDS, TimeSource.NANO_TIME);
     while(!timeout.hasTimedOut()) {
       if (runner.isStopped()) {
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
index 23283cc..4ce7a5e 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
@@ -55,14 +55,11 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionAdminParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.zookeeper.CreateMode;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.cloud.ZkConfigManager.CONFIGS_ZKNODE;
-
 /**
  * Base class for SolrCloud tests
  * <p>
@@ -90,12 +87,9 @@
   private static class Config {
     final String name;
     final Path path;
-    final Map<String, byte[]> extraConfig;
-
-    private Config(String name, Path path, Map<String, byte[]> extraConfig) {
+    private Config(String name, Path path) {
       this.name = name;
       this.path = path;
-      this.extraConfig = extraConfig;
     }
   }
 
@@ -187,12 +181,7 @@
      * @param configPath the path to the config files
      */
     public Builder addConfig(String configName, Path configPath) {
-      this.configs.add(new Config(configName, configPath, null));
-      return this;
-    }
-
-    public Builder addConfig(String configName, Path configPath, Map<String, byte[]> extraConfig) {
-      this.configs.add(new Config(configName, configPath, extraConfig));
+      this.configs.add(new Config(configName, configPath));
       return this;
     }
 
@@ -232,15 +221,7 @@
           null, securityJson, trackJettyMetrics);
       CloudSolrClient client = cluster.getSolrClient();
       for (Config config : configs) {
-        ((ZkClientClusterStateProvider) client.getClusterStateProvider()).uploadConfig(config.path, config.name);
-        if (config.extraConfig != null) {
-          for (Map.Entry<String, byte[]> e : config.extraConfig.entrySet()) {
-            ((ZkClientClusterStateProvider) client.getClusterStateProvider()).getZkStateReader().getZkClient()
-                .create(CONFIGS_ZKNODE + "/" + config.name + "/" + e.getKey(), e.getValue(), CreateMode.PERSISTENT, true);
-
-          }
-
-        }
+        ((ZkClientClusterStateProvider)client.getClusterStateProvider()).uploadConfig(config.path, config.name);
       }
 
       if (clusterProperties.size() > 0) {
diff --git a/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java b/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java
index 4e39954..ed23459 100644
--- a/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java
+++ b/solr/test-framework/src/test/org/apache/solr/util/TestSSLTestConfig.java
@@ -20,6 +20,8 @@
 import java.util.Arrays;
 import java.util.List;
 
+import org.apache.lucene.util.Constants;
+
 import org.apache.solr.SolrTestCase;
 
 public class TestSSLTestConfig extends SolrTestCase {
@@ -84,4 +86,19 @@
     
   }
 
+  public void testFailIfUserRunsTestsWithJVMThatHasKnownSSLBugs() {
+    // NOTE: If there is some future JVM version, where all available "ea" builds are known to be buggy,
+    // but we still want to be able to use for running tests (ie: via jenkins) to look for *other* bugs,
+    // then those -ea versions can be "white listed" here...
+
+    try {
+      SSLTestConfig.assumeSslIsSafeToTest();
+    } catch (org.junit.AssumptionViolatedException ave) {
+      fail("Current JVM (" + Constants.JVM_NAME + " / " + Constants.JVM_VERSION +
+           ") is known to have SSL Bugs.  Other tests that (explicitly or via randomization) " +
+           " use SSL will be SKIPed");
+    }
+  }
+
+  
 }