Merge branch 'master' into jira/solr-14749
diff --git a/gradle/defaults.gradle b/gradle/defaults.gradle
index 0ce26d1..0ca1af1 100644
--- a/gradle/defaults.gradle
+++ b/gradle/defaults.gradle
@@ -23,9 +23,6 @@
   // Repositories to fetch dependencies from.
   repositories {
     mavenCentral()
-    maven {
-      url "https://maven.restlet.com"
-    }
   }
 
   // Artifacts will have names after full gradle project path
diff --git a/gradle/documentation/render-javadoc.gradle b/gradle/documentation/render-javadoc.gradle
index d3c9310..914fd8b 100644
--- a/gradle/documentation/render-javadoc.gradle
+++ b/gradle/documentation/render-javadoc.gradle
@@ -146,16 +146,6 @@
   }
 }
 
-configure(project(":lucene:analysis:icu")) {
-  project.tasks.withType(RenderJavadocTask) {
-    // TODO: clean up split packages
-    javadocMissingIgnore = [
-        "org.apache.lucene.collation",
-        "org.apache.lucene.collation.tokenattributes"
-    ]
-  }
-}
-
 configure(project(":lucene:backward-codecs")) {
   project.tasks.withType(RenderJavadocTask) {
     // TODO: fix missing @param tags
diff --git a/gradle/generation/jflex.gradle b/gradle/generation/jflex.gradle
index 57c8069..f6e8351 100644
--- a/gradle/generation/jflex.gradle
+++ b/gradle/generation/jflex.gradle
@@ -149,7 +149,7 @@
       def target = file('src/java/org/apache/lucene/analysis/charfilter/HTMLCharacterEntities.jflex')
       target.withOutputStream { output ->
         project.exec {
-          executable = project.externalTool("python2")
+          executable = project.externalTool("python3")
           workingDir = target.parentFile
           standardOutput = output
           args += [
diff --git a/gradle/maven/defaults-maven.gradle b/gradle/maven/defaults-maven.gradle
index 83e0051..2662a69 100644
--- a/gradle/maven/defaults-maven.gradle
+++ b/gradle/maven/defaults-maven.gradle
@@ -68,6 +68,8 @@
         ":solr:contrib:prometheus-exporter",
         ":solr:test-framework",
     ]
+
+    apacheNexusSnapshots = "https://repository.apache.org/content/repositories/snapshots"
   }
 }
 
@@ -76,7 +78,26 @@
   apply plugin: 'signing'
 
   publishing {
-    // TODO: Add publishing repository details.
+    repositories {
+      maven {
+        name = "ApacheSnapshots"
+        url = apacheNexusSnapshots
+
+        credentials {
+          def nexusUserName = rootProject.propertyOrDefault('asfNexusUsername', null)
+          def nexusPwd = rootProject.propertyOrDefault('asfNexusPassword', null)
+          if (nexusUserName && nexusPwd) {
+            username nexusUserName
+            password nexusPwd
+          }
+        }
+      }
+    }
+  }
+
+  // Do not generate gradle metadata files.
+  tasks.withType(GenerateModuleMetadata) {
+    enabled = false
   }
 
   plugins.withType(JavaPlugin) {
@@ -101,44 +122,160 @@
     // in gradle or just complex relationships between lazy collection hooks.
     gradle.projectsEvaluated {
       publishing {
-        def configurePom = {
-          name = "Apache Solr/Lucene (${project.name})"
-          licenses {
-            license {
-              name = 'Apache 2'
-              url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+        def configurePom;
+        if (project.path.startsWith(":lucene")) {
+          configurePom = {
+            name = "Apache Lucene (module: ${project.name})"
+            description = name
+            url = 'https://lucene.apache.org/'
+
+            licenses {
+              license {
+                name = 'Apache 2'
+                url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+              }
+            }
+
+            inceptionYear = "2000"
+
+            issueManagement {
+              system = "JIRA"
+              url = "https://issues.apache.org/jira/browse/LUCENE"
+            }
+
+            ciManagement {
+              system = "Jenkins"
+              url = "https://builds.apache.org/job/Lucene/"
+            }
+
+            mailingLists {
+              mailingList {
+                name = "General List"
+                subscribe = "general-subscribe@lucene.apache.org"
+                unsubscribe = "general-unsubscribe@lucene.apache.org"
+                archive = "https://mail-archives.apache.org/mod_mbox/lucene-general/"
+              }
+
+              mailingList {
+                name = "Java User List"
+                subscribe = "java-user-subscribe@lucene.apache.org"
+                unsubscribe = "java-user-unsubscribe@lucene.apache.org"
+                archive = "https://mail-archives.apache.org/mod_mbox/lucene-java-user/"
+              }
+
+              mailingList {
+                name = "Java Developer List"
+                subscribe = "dev-subscribe@lucene.apache.org"
+                unsubscribe = "dev-unsubscribe@lucene.apache.org"
+                archive = "https://mail-archives.apache.org/mod_mbox/lucene-dev/"
+              }
+
+              mailingList {
+                name = "Java Commits List"
+                subscribe = "commits-subscribe@lucene.apache.org"
+                unsubscribe = "commits-unsubscribe@lucene.apache.org"
+                archive = "https://mail-archives.apache.org/mod_mbox/lucene-java-commits/"
+              }
+            }
+
+            scm {
+              connection = 'scm:git:https://gitbox.apache.org/repos/asf/lucene-solr.git'
+              developerConnection = 'scm:git:https://gitbox.apache.org/repos/asf/lucene-solr.git'
+              url = 'https://gitbox.apache.org/repos/asf?p=lucene-solr.git'
+            }
+          }
+        } else {
+          configurePom = {
+            name = "Apache Solr (module: ${project.name})"
+            description = name
+            url = 'https://lucene.apache.org/solr/'
+
+            licenses {
+              license {
+                name = 'Apache 2'
+                url = 'http://www.apache.org/licenses/LICENSE-2.0.txt'
+              }
+            }
+
+            inceptionYear = "2006"
+
+            issueManagement {
+              system = "JIRA"
+              url = "https://issues.apache.org/jira/browse/SOLR"
+            }
+
+            ciManagement {
+              system = "Jenkins"
+              url = "https://builds.apache.org/job/Lucene/"
+            }
+
+            mailingLists {
+              mailingList {
+                name = "Solr User List"
+                subscribe = "solr-user-subscribe@lucene.apache.org"
+                unsubscribe = "solr-user-unsubscribe@lucene.apache.org"
+                archive = "https://mail-archives.apache.org/mod_mbox/solr-user/"
+              }
+
+              mailingList {
+                name = "Java Developer List"
+                subscribe = "dev-subscribe@lucene.apache.org"
+                unsubscribe = "dev-unsubscribe@lucene.apache.org"
+                archive = "https://mail-archives.apache.org/mod_mbox/lucene-dev/"
+              }
+
+              mailingList {
+                name = "Java Commits List"
+                subscribe = "commits-subscribe@lucene.apache.org"
+                unsubscribe = "commits-unsubscribe@lucene.apache.org"
+                archive = "https://mail-archives.apache.org/mod_mbox/lucene-java-commits/"
+              }
+            }
+
+            scm {
+              connection = 'scm:git:https://gitbox.apache.org/repos/asf/lucene-solr.git'
+              developerConnection = 'scm:git:https://gitbox.apache.org/repos/asf/lucene-solr.git'
+              url = 'https://gitbox.apache.org/repos/asf?p=lucene-solr.git'
             }
           }
         }
 
         publications {
-          // JARS and sources, no javadocs (for local inspection only).
           jars(MavenPublication) {
             from components.java
             groupId = project.group
             artifactId = project.archivesBaseName
 
             artifact sourcesJar
-
-            pom(configurePom)
-          }
-
-          // Full set of signed artifacts.
-          signed(MavenPublication) {
-            from components.java
-            groupId = project.group
-            artifactId = project.archivesBaseName
-
-            artifact sourcesJar
             artifact javadocJar
 
             pom(configurePom)
+
+            pom({
+              // LUCENE-9561:
+              // Remove dependencyManagement section created by a combination of
+              // Palantir and the publishing plugin.
+              //
+              // https://github.com/palantir/gradle-consistent-versions/issues/550
+              withXml {
+                asNode().dependencyManagement.replaceNode {}
+              }
+            })
           }
         }
       }
 
+      // Add aliases of convention tasks with shorter names.
+      task mavenToApacheSnapshots() {
+        group "Publishing"
+        description "Publish Maven JARs and POMs to Apache Snapshots repository: ${apacheNexusSnapshots}"
+
+        dependsOn "publishJarsPublicationToApacheSnapshotsRepository"
+      }
+
       signing {
-        sign publishing.publications.signed
+        required { !version.endsWith("SNAPSHOT") }
+        sign publishing.publications.jars
       }
     }
   }
diff --git a/gradle/maven/maven-local.gradle b/gradle/maven/maven-local.gradle
index d6a6fdf..9d415b8 100644
--- a/gradle/maven/maven-local.gradle
+++ b/gradle/maven/maven-local.gradle
@@ -24,7 +24,7 @@
     mavenLocalDir = file("${buildDir}/maven-local")
   }
 
-  task mavenLocal() {
+  task mavenToLocalFolder() {
     group "Publishing"
     description "Publish Maven JARs and POMs locally to " + mavenLocalDir
 
@@ -33,6 +33,11 @@
     }
   }
 
+  task mavenToLocalRepo() {
+    group "Publishing"
+    description "Publish Maven JARs and POMs to current user's local maven repository."
+  }
+
   task mavenLocalClean(type: Delete) {
     delete mavenLocalDir
   }
@@ -48,11 +53,15 @@
         }
       }
 
+      tasks.matching { it.name == "publishJarsPublicationToMavenLocal" }.all { task ->
+        mavenToLocalRepo.dependsOn task
+      }
+
       tasks.matching { it.name == "publishJarsPublicationToBuildRepository" }.all { task ->
-        // Clean prior to republishing to local build repository.
+        // Clean local repo prior to republishing artifacts.
         task.dependsOn mavenLocalClean
         // Attach to root project's mavenLocal task.
-        mavenLocal.dependsOn task
+        mavenToLocalFolder.dependsOn task
       }
     }
   }
diff --git a/gradle/testing/randomization.gradle b/gradle/testing/randomization.gradle
index 298bfe1..452e1e3 100644
--- a/gradle/testing/randomization.gradle
+++ b/gradle/testing/randomization.gradle
@@ -22,6 +22,7 @@
 import java.nio.file.*
 import com.carrotsearch.randomizedtesting.SeedUtils
 import com.carrotsearch.randomizedtesting.generators.RandomPicks
+import org.apache.tools.ant.types.Commandline
 
 buildscript {
   repositories {
@@ -144,7 +145,7 @@
         def value = testOptionsResolved[opt.propName]
         return defValue != value
       }.collect { opt ->
-        "-P" + opt.propName + "=" + testOptionsResolved[opt.propName]
+        Commandline.quoteArgument("-P" + opt.propName + "=" + testOptionsResolved[opt.propName])
       }.join(" ")
 
       // leaving temporary folder option has multiple aliases...
diff --git a/gradle/validation/check-environment.gradle b/gradle/validation/check-environment.gradle
index 0f3a084..d6330da 100644
--- a/gradle/validation/check-environment.gradle
+++ b/gradle/validation/check-environment.gradle
@@ -22,7 +22,7 @@
 
 configure(rootProject) {
   ext {
-    expectedGradleVersion = '6.4.1'
+    expectedGradleVersion = '6.6.1'
     minJavaVersion = JavaVersion.VERSION_11
   }
 
diff --git a/gradle/validation/owasp-dependency-check/exclusions.xml b/gradle/validation/owasp-dependency-check/exclusions.xml
index 0a77b99..83fe6e6 100644
--- a/gradle/validation/owasp-dependency-check/exclusions.xml
+++ b/gradle/validation/owasp-dependency-check/exclusions.xml
@@ -64,36 +64,6 @@
   </suppress>
   <suppress>
     <notes><![CDATA[
-   file name: org.restlet.ext.servlet-2.3.0.jar
-   ]]></notes>
-    <packageUrl regex="true">^pkg:maven/org\.restlet\.jee/org\.restlet\.ext\.servlet@.*$</packageUrl>
-    <cpe>cpe:/a:restlet:restlet_framework</cpe>
-  </suppress>
-  <suppress>
-    <notes><![CDATA[
-   file name: org.restlet.ext.servlet-2.3.0.jar
-   ]]></notes>
-    <packageUrl regex="true">^pkg:maven/org\.restlet\.jee/org\.restlet\.ext\.servlet@.*$</packageUrl>
-    <cpe>cpe:/a:restlet:restlet</cpe>
-  </suppress>
-  <suppress>
-    <notes><![CDATA[
-   file name: org.restlet-2.3.0.jar
-   We don't use class SimpleXMLProvider
-   ]]></notes>
-    <packageUrl regex="true">^pkg:maven/org\.restlet\.jee/org\.restlet@.*$</packageUrl>
-    <cve>CVE-2017-14868</cve>
-  </suppress>
-  <suppress>
-    <notes><![CDATA[
-   file name: org.restlet-2.3.0.jar
-   We don't use class XmlRepresentation
-   ]]></notes>
-    <packageUrl regex="true">^pkg:maven/org\.restlet\.jee/org\.restlet@.*$</packageUrl>
-    <cve>CVE-2017-14949</cve>
-  </suppress>
-  <suppress>
-    <notes><![CDATA[
    file name: solr-webapp-9.0.0-SNAPSHOT.war: jquery-2.1.3.min.js
    This is already being fixed in SOLR-14209 so muting the warning
    ]]></notes>
diff --git a/gradle/validation/validate-log-calls.gradle b/gradle/validation/validate-log-calls.gradle
index 741f4e7..d759c74 100644
--- a/gradle/validation/validate-log-calls.gradle
+++ b/gradle/validation/validate-log-calls.gradle
@@ -67,7 +67,7 @@
     boolean violation = false
 
     // If the line has been explicitly OK'd, then it's OK!
-    if (line.replaceAll("\\s", "").toLowerCase().contains("//logok")) {
+    if (line.replaceAll("\\s", "").toLowerCase().contains("//nowarn")) {
       return
     }
     // Strip all of the comments, things in quotes and the like.
@@ -133,7 +133,7 @@
     }
 
     // Always report toString(). Note, this over-reports some constructs
-    // but just add //logOK if it's really OK.
+    // but just add //nowarn if it's really OK.
     if (violation == false) {
       if (line.contains("toString(") == true && prevLineNotIf) {
         cause = "Line contains toString"
@@ -151,27 +151,13 @@
     return
   }
 
-// Require all our logger definitions lower case "log", except a couple of special ones.
+// Require all our logger definitions lower case "log", except if they have //nowarn
   def checkLogName(File file, String line) {
     // It's many times faster to do check this way than use a regex
     if (line.contains("static ") && line.contains("getLogger") && line.contains(" log ") == false) {
-      String name = file.name
-      if (name.equals("LoggerFactory.java")) {
+      if (line.replaceAll("\\s", "").toLowerCase().contains("//nowarn")) {
         return
       }
-      if (name.equals("SolrCore.java") && (line.contains("requestLog") || line.contains("slowLog"))) {
-        return
-      }
-      if (name.equals("StartupLoggingUtils.java") && line.contains("getLoggerImplStr")) {
-        return
-      }
-      // Apparently the Hadoop code expectes upper-case LOG, so...
-
-      if ((name.equals("HttpServer2.java") || name.equals("BlockPoolSlice.java") || name.equals("FileUtil.java"))
-        && line.contains(" LOG ")) {
-        return
-      }
-
       reportViolation("Change the logger name to lower-case 'log' in " + file.name + " " + line + " project" + project)
     }
   }
diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar
index 62d4c05..e708b1c 100644
--- a/gradle/wrapper/gradle-wrapper.jar
+++ b/gradle/wrapper/gradle-wrapper.jar
Binary files differ
diff --git a/gradle/wrapper/gradle-wrapper.jar.sha256 b/gradle/wrapper/gradle-wrapper.jar.sha256
index f315f8d..e85f20f 100644
--- a/gradle/wrapper/gradle-wrapper.jar.sha256
+++ b/gradle/wrapper/gradle-wrapper.jar.sha256
@@ -1 +1 @@
-70239e6ca1f0d5e3b2808ef6d82390cf9ad58d3a3a0d271677a51d1b89475857
\ No newline at end of file
+e996d452d2645e70c01c11143ca2d3742734a28da2bf61f25c82bdc288c9e637
diff --git a/gradle/wrapper/gradle-wrapper.jar.version b/gradle/wrapper/gradle-wrapper.jar.version
index 306894a..09a7391 100644
--- a/gradle/wrapper/gradle-wrapper.jar.version
+++ b/gradle/wrapper/gradle-wrapper.jar.version
@@ -1 +1 @@
-6.4.1
\ No newline at end of file
+6.6.1
diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties
index 21e622d..33682bb 100644
--- a/gradle/wrapper/gradle-wrapper.properties
+++ b/gradle/wrapper/gradle-wrapper.properties
@@ -1,5 +1,5 @@
 distributionBase=GRADLE_USER_HOME
 distributionPath=wrapper/dists
-distributionUrl=https\://services.gradle.org/distributions/gradle-6.4.1-all.zip
+distributionUrl=https\://services.gradle.org/distributions/gradle-6.6.1-all.zip
 zipStoreBase=GRADLE_USER_HOME
 zipStorePath=wrapper/dists
diff --git a/help/validateLogCalls.txt b/help/validateLogCalls.txt
index e150ba8..b177bdb 100644
--- a/help/validateLogCalls.txt
+++ b/help/validateLogCalls.txt
@@ -60,7 +60,7 @@
   simple concatenation. This last is something of a style check.
 
 - You can get into some pretty convolued consructs trying to pass some of these
-  checks. Adding //logok, with or without spaces will cause the line to pass
+  checks. Adding //nowarn, with or without spaces will cause the line to pass
   no matter what. Please use this hack sparingly and be conscientious about
   surrounding with 'if (log.is*Enabled)'.
 
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index e699862..e2be6ee 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -12,9 +12,6 @@
 
 API Changes
 
-* LUCENE-9317: Clean up package name conflicts between core and analyzers-common.
-  See MIGRATE.md for details. (David Ryan, Tomoko Uchida, Uwe Schindler, Dawid Weiss)
-
 * LUCENE-8474: RAMDirectory and associated deprecated classes have been
   removed. (Dawid Weiss)
 
@@ -68,6 +65,9 @@
   in Lucenes IndexWriter. The interface is not sufficient to efficiently
   replace the functionality with reasonable efforts. (Simon Willnauer)
 
+* LUCENE-9317 LUCENE-9558: Clean up package name conflicts between modules.
+  See MIGRATE.md for details. (David Ryan, Tomoko Uchida, Uwe Schindler, Dawid Weiss)
+
 Improvements
 
 * LUCENE-9463: Query match region retrieval component, passage scoring and formatting
@@ -250,6 +250,9 @@
 * LUCENE-9539: Use more compact datastructures to represent sorted doc-values in memory when 
   sorting a segment before flush and in SortingCodecReader. (Simon Willnauer)
 
+* LUCENE-9458: WordDelimiterGraphFilter should order tokens at the same position by endOffset to
+  emit longer tokens first.  The same graph is produced. (David Smiley)
+
 Optimizations
 ---------------------
 
@@ -278,6 +281,8 @@
 * LUCENE-9501: Fix a bug in IndexSortSortedNumericDocValuesRangeQuery where it could violate the
   DocIdSetIterator contract. (Julie Tibshirani)
 
+* LUCENE-9401: Include field in ComplexPhraseQuery's toString() (Thomas Hecker via Munendra S N)
+
 Documentation
 ---------------------
 
diff --git a/lucene/MIGRATE.md b/lucene/MIGRATE.md
index d215c5d..e664708 100644
--- a/lucene/MIGRATE.md
+++ b/lucene/MIGRATE.md
@@ -1,5 +1,10 @@
 # Apache Lucene Migration Guide
 
+## ICUCollationKeyAnalyzer is renamed (LUCENE-9558)
+
+o.a.l.collation.ICUCollationAnalyzer is renamed to o.a.l.a.icu.ICUCollationKeyAnalyzer.
+Also, its dependant classes are renamed in the same way.
+
 ## Base and concrete analysis factories are moved / package renamed (LUCENE-9317)
 
 1. Base analysis factories are moved to `lucene-core`, also their package names are renamed.
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java
index 9d03c7e..70e1e1e 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterGraphFilter.java
@@ -447,26 +447,18 @@
   private class PositionSorter extends InPlaceMergeSorter {
     @Override
     protected int compare(int i, int j) {
-      // smaller start position
-      int iPosStart = bufferedParts[4*i];
-      int jPosStart = bufferedParts[4*j];
-      int cmp = Integer.compare(iPosStart, jPosStart);
-      if (cmp != 0) {
-        return cmp;
-      }
-
-      // longest pos length:
-      int iPosEnd = bufferedParts[4*i+1];
-      int jPosEnd = bufferedParts[4*j+1];
-      cmp = Integer.compare(jPosEnd, iPosEnd);
-      if (cmp != 0) {
-        return cmp;
-      }
-
       // smaller start offset
-      int iOff = bufferedParts[4*i + 2];
-      int jOff = bufferedParts[4*j + 2];
-      return Integer.compare(iOff, jOff);
+      int iOff = bufferedParts[4 * i + 2];
+      int jOff = bufferedParts[4 * j + 2];
+      int cmp = Integer.compare(iOff, jOff);
+      if (cmp != 0) {
+        return cmp;
+      }
+
+      // longer end offset
+      iOff = bufferedParts[4 * i + 3];
+      jOff = bufferedParts[4 * j + 3];
+      return Integer.compare(jOff, iOff);
     }
 
     @Override
diff --git a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterIterator.java b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterIterator.java
index f3541ac..b7435d6 100644
--- a/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterIterator.java
+++ b/lucene/analysis/common/src/java/org/apache/lucene/analysis/miscellaneous/WordDelimiterIterator.java
@@ -16,6 +16,8 @@
  */
 package org.apache.lucene.analysis.miscellaneous;
 
+import java.util.Locale;
+
 /**
  * A BreakIterator-like API for iterating over subwords in text, according to WordDelimiterGraphFilter rules.
  * @lucene.internal
@@ -113,7 +115,17 @@
     this.splitOnNumerics = splitOnNumerics;
     this.stemEnglishPossessive = stemEnglishPossessive;
   }
-  
+
+  @Override
+  public String toString() {
+    if (end == DONE) {
+      return "DONE";
+    }
+    return new String(text, current, end - current)
+        + " [" + current + "-" + end + "]"
+        + " type=" + String.format(Locale.ROOT, "%#02x", type());
+  }
+
   /**
    * Advance to the next subword in the string.
    *
diff --git a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java
index 648c366..5f26a16 100644
--- a/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java
+++ b/lucene/analysis/common/src/test/org/apache/lucene/analysis/miscellaneous/TestWordDelimiterGraphFilter.java
@@ -401,6 +401,8 @@
   public void testCatenateAllEmittedBeforeParts() throws Exception {
     // no number parts
     final int flags = PRESERVE_ORIGINAL | GENERATE_WORD_PARTS | CATENATE_ALL;
+    final boolean useCharFilter = true;
+    final boolean graphOffsetsAreCorrect = false; // note: could solve via always incrementing wordPos on first word ('8')
 
     //not using getAnalyzer because we want adjustInternalOffsets=true
     Analyzer a = new Analyzer() {
@@ -414,17 +416,61 @@
     // input starts with a number, but we don't generate numbers.
     //   Nonetheless preserve-original and concatenate-all show up first.
     assertTokenStreamContents(a.tokenStream("dummy", "8-other"),
-        new String[] { "8-other", "8other", "other" }, new int[]{0, 0, 2}, new int[]{7, 7, 7});
-
-    boolean useCharFilter = true;
-    boolean graphOffsetsAreCorrect = false; // note: could solve via always incrementing wordPos on first word ('8')
+        new String[] { "8-other", "8other", "other" }, new int[]{0, 0, 2}, new int[]{7, 7, 7}, new int[]{1, 0, 0});
     checkAnalysisConsistency(random(), a, useCharFilter, "8-other", graphOffsetsAreCorrect);
-
     verify("8-other", flags); // uses getAnalyzer which uses adjustInternalOffsets=false which works
 
+    // input ends with a number, but we don't generate numbers
+    assertTokenStreamContents(a.tokenStream("dummy", "other-9"),
+        new String[] { "other-9", "other9", "other" }, new int[]{0, 0, 0}, new int[]{7, 7, 5}, new int[]{1, 0, 0});
+    checkAnalysisConsistency(random(), a, useCharFilter, "other-9", graphOffsetsAreCorrect);
+    verify("9-other", flags); // uses getAnalyzer which uses adjustInternalOffsets=false which works
+
     a.close();
   }
 
+  /*
+  static char[] fuzzDict = {'-', 'H', 'w', '4'};
+  public void testFuzz() throws IOException {
+    //System.out.println(getGraphStrings(getAnalyzer(GENERATE_WORD_PARTS | CATENATE_WORDS), "H-H")); // orig:[H H, HH H] orig; fixed posInc:"[HH H H]"
+    //System.out.println(getGraphStrings(getAnalyzer(CATENATE_WORDS | CATENATE_ALL), "H-4")); // fixPos:[H H4] final:"[H4 H]"
+
+    StringBuilder input = new StringBuilder("000000"); // fill with arbitrary chars; not too long or too short
+
+    for (int flags = 0; flags < IGNORE_KEYWORDS; flags++) { // all interesting bit flags precede IGNORE_KEYWORDS
+      System.out.println("Flags: " + flags + " " + WordDelimiterGraphFilter.flagsToString(flags));
+      final Analyzer analyzer = getAnalyzer(flags);
+      fuzzLoop(input, 0, analyzer);
+    }
+  }
+
+  public void fuzzLoop(StringBuilder input, int inputPrefixLenFuzzed, Analyzer analyzer) throws IOException {
+    if (inputPrefixLenFuzzed < input.length()) {
+      for (char c : fuzzDict) {
+        input.setCharAt(inputPrefixLenFuzzed, c);
+        fuzzLoop(input, inputPrefixLenFuzzed + 1, analyzer); // recursive
+      }
+      return;
+    }
+
+    fuzzDoCheck(input.toString(), analyzer);
+  }
+
+  private void fuzzDoCheck(String input, Analyzer analyzer) throws IOException {
+    try (TokenStream ts1 = analyzer.tokenStream("fieldName", input)) {
+      ts1.reset();
+      while (ts1.incrementToken()) { // modified WDF sorter compare() contains assertion check
+        //do-nothing
+      }
+      ts1.end();
+    } catch (AssertionError e) {
+      System.out.println("failed input: " + input);
+      throw e;
+    }
+  }
+*/
+
+
   /** concat numbers + words + all */
   public void testLotsOfConcatenating() throws Exception {
     final int flags = GENERATE_WORD_PARTS | GENERATE_NUMBER_PARTS | CATENATE_WORDS | CATENATE_NUMBERS | CATENATE_ALL | SPLIT_ON_CASE_CHANGE | SPLIT_ON_NUMERICS | STEM_ENGLISH_POSSESSIVE;    
diff --git a/lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationAttributeFactory.java b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationAttributeFactory.java
similarity index 94%
rename from lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationAttributeFactory.java
rename to lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationAttributeFactory.java
index 5ad9209..3b29cb1 100644
--- a/lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationAttributeFactory.java
+++ b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationAttributeFactory.java
@@ -14,11 +14,12 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.collation;
+package org.apache.lucene.analysis.icu;
 
 
 import org.apache.lucene.analysis.TokenStream;
-import org.apache.lucene.collation.tokenattributes.ICUCollatedTermAttributeImpl;
+import org.apache.lucene.collation.CollationAttributeFactory;
+import org.apache.lucene.analysis.icu.tokenattributes.ICUCollatedTermAttributeImpl;
 import org.apache.lucene.util.AttributeFactory;
 
 import com.ibm.icu.text.Collator;
diff --git a/lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationDocValuesField.java b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationDocValuesField.java
similarity index 98%
rename from lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationDocValuesField.java
rename to lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationDocValuesField.java
index 9ae4685..a7901cc 100644
--- a/lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationDocValuesField.java
+++ b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationDocValuesField.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.collation;
+package org.apache.lucene.analysis.icu;
 
 
 import org.apache.lucene.document.Field;
diff --git a/lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationKeyAnalyzer.java b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationKeyAnalyzer.java
similarity index 96%
rename from lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationKeyAnalyzer.java
rename to lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationKeyAnalyzer.java
index 1c5436c..dff5699 100644
--- a/lucene/analysis/icu/src/java/org/apache/lucene/collation/ICUCollationKeyAnalyzer.java
+++ b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/ICUCollationKeyAnalyzer.java
@@ -14,13 +14,14 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.collation;
+package org.apache.lucene.analysis.icu;
 
 
 
 import com.ibm.icu.text.Collator;
 import org.apache.lucene.analysis.Analyzer;
 import org.apache.lucene.analysis.core.KeywordTokenizer;
+import org.apache.lucene.collation.CollationKeyAnalyzer;
 
 /**
  * <p>
diff --git a/lucene/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/tokenattributes/ICUCollatedTermAttributeImpl.java
similarity index 96%
rename from lucene/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java
rename to lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/tokenattributes/ICUCollatedTermAttributeImpl.java
index 793dec2..0e5c98e 100644
--- a/lucene/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/ICUCollatedTermAttributeImpl.java
+++ b/lucene/analysis/icu/src/java/org/apache/lucene/analysis/icu/tokenattributes/ICUCollatedTermAttributeImpl.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.collation.tokenattributes;
+package org.apache.lucene.analysis.icu.tokenattributes;
 
 
 import org.apache.lucene.analysis.tokenattributes.CharTermAttributeImpl;
diff --git a/lucene/analysis/icu/src/java/org/apache/lucene/collation/package.html b/lucene/analysis/icu/src/java/org/apache/lucene/collation/package.html
deleted file mode 100644
index 2c81f4a..0000000
--- a/lucene/analysis/icu/src/java/org/apache/lucene/collation/package.html
+++ /dev/null
@@ -1,23 +0,0 @@
-<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<!-- not a package-info.java, because we already defined this package in common/ -->
-<html>
-<body>
-Unicode Collation support.
-</body>
-</html>
diff --git a/lucene/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/package.html b/lucene/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/package.html
deleted file mode 100644
index 6f21117..0000000
--- a/lucene/analysis/icu/src/java/org/apache/lucene/collation/tokenattributes/package.html
+++ /dev/null
@@ -1,23 +0,0 @@
-<!doctype html public "-//w3c//dtd html 4.0 transitional//en">
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<!-- not a package-info.java, because we already defined this package in common/ -->
-<html>
-<body>
-Custom {@link org.apache.lucene.util.AttributeImpl} for indexing collation keys as index terms.
-</body>
-</html>
diff --git a/lucene/analysis/icu/src/test/org/apache/lucene/collation/TestICUCollationDocValuesField.java b/lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUCollationDocValuesField.java
similarity index 98%
rename from lucene/analysis/icu/src/test/org/apache/lucene/collation/TestICUCollationDocValuesField.java
rename to lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUCollationDocValuesField.java
index 24862c4..4e90a48 100644
--- a/lucene/analysis/icu/src/test/org/apache/lucene/collation/TestICUCollationDocValuesField.java
+++ b/lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUCollationDocValuesField.java
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.collation;
+package org.apache.lucene.analysis.icu;
 
 
 import org.apache.lucene.document.Document;
diff --git a/lucene/analysis/icu/src/test/org/apache/lucene/collation/TestICUCollationKeyAnalyzer.java b/lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUCollationKeyAnalyzer.java
similarity index 98%
rename from lucene/analysis/icu/src/test/org/apache/lucene/collation/TestICUCollationKeyAnalyzer.java
rename to lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUCollationKeyAnalyzer.java
index 91f8aed..a85d583 100644
--- a/lucene/analysis/icu/src/test/org/apache/lucene/collation/TestICUCollationKeyAnalyzer.java
+++ b/lucene/analysis/icu/src/test/org/apache/lucene/analysis/icu/TestICUCollationKeyAnalyzer.java
@@ -14,8 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-package org.apache.lucene.collation;
-
+package org.apache.lucene.analysis.icu;
 
 
 import com.ibm.icu.text.Collator;
diff --git a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/NewCollationAnalyzerTask.java b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/NewCollationAnalyzerTask.java
index 95cf8b2..56e9f75 100644
--- a/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/NewCollationAnalyzerTask.java
+++ b/lucene/benchmark/src/java/org/apache/lucene/benchmark/byTask/tasks/NewCollationAnalyzerTask.java
@@ -44,7 +44,7 @@
   public enum Implementation { 
     JDK("org.apache.lucene.collation.CollationKeyAnalyzer", 
         "java.text.Collator"),
-    ICU("org.apache.lucene.collation.ICUCollationKeyAnalyzer", 
+    ICU("org.apache.lucene.analysis.icu.ICUCollationKeyAnalyzer",
         "com.ibm.icu.text.Collator");
     
     String className;
diff --git a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
index a5beda2..7cc907a 100644
--- a/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
+++ b/lucene/core/src/java/org/apache/lucene/index/IndexWriter.java
@@ -5463,6 +5463,14 @@
     throw new IllegalArgumentException("number of documents in the index cannot exceed " + actualMaxDocs + " (current document count is " + pendingNumDocs.get() + "; added numDocs is " + addedNumDocs + ")");
   }
 
+  /**
+   * Returns the number of documents in the index including documents are being added (i.e., reserved).
+   * @lucene.experimental
+   */
+  public long getPendingNumDocs() {
+    return pendingNumDocs.get();
+  }
+
   /** Returns the highest <a href="#sequence_number">sequence number</a> across
    *  all completed operations, or 0 if no operations have finished yet.  Still
    *  in-flight operations (in other threads) are not counted until they finish.
diff --git a/lucene/core/src/java/org/apache/lucene/search/ConjunctionDISI.java b/lucene/core/src/java/org/apache/lucene/search/ConjunctionDISI.java
index 780e854..30bdabb 100644
--- a/lucene/core/src/java/org/apache/lucene/search/ConjunctionDISI.java
+++ b/lucene/core/src/java/org/apache/lucene/search/ConjunctionDISI.java
@@ -31,6 +31,7 @@
 import org.apache.lucene.util.CollectionUtil;
 
 /** A conjunction of DocIdSetIterators.
+ * Requires that all of its sub-iterators must be on the same document all the time.
  * This iterates over the doc ids that are present in each given DocIdSetIterator.
  * <br>Public only for use in {@link org.apache.lucene.search.spans}.
  * @lucene.internal
@@ -140,6 +141,15 @@
   private static DocIdSetIterator createConjunction(
       List<DocIdSetIterator> allIterators,
       List<TwoPhaseIterator> twoPhaseIterators) {
+
+    // check that all sub-iterators are on the same doc ID
+    int curDoc = allIterators.size() > 0 ? allIterators.get(0).docID() : twoPhaseIterators.get(0).approximation.docID();
+    boolean iteratorsOnTheSameDoc = allIterators.stream().allMatch(it -> it.docID() == curDoc);
+    iteratorsOnTheSameDoc = iteratorsOnTheSameDoc && twoPhaseIterators.stream().allMatch(it -> it.approximation().docID() == curDoc);
+    if (iteratorsOnTheSameDoc == false) {
+      throw new IllegalArgumentException("Sub-iterators of ConjunctionDISI are not on the same document!");
+    }
+
     long minCost = allIterators.stream().mapToLong(DocIdSetIterator::cost).min().getAsLong();
     List<BitSetIterator> bitSetIterators = new ArrayList<>();
     List<DocIdSetIterator> iterators = new ArrayList<>();
@@ -177,6 +187,7 @@
 
   private ConjunctionDISI(List<? extends DocIdSetIterator> iterators) {
     assert iterators.size() >= 2;
+
     // Sort the array the first time to allow the least frequent DocsEnum to
     // lead the matching.
     CollectionUtil.timSort(iterators, new Comparator<DocIdSetIterator>() {
@@ -227,6 +238,7 @@
 
   @Override
   public int advance(int target) throws IOException {
+    assert assertItersOnSameDoc() : "Sub-iterators of ConjunctionDISI are not one the same document!";
     return doNext(lead1.advance(target));
   }
 
@@ -237,6 +249,7 @@
 
   @Override
   public int nextDoc() throws IOException {
+    assert assertItersOnSameDoc() : "Sub-iterators of ConjunctionDISI are not on the same document!";
     return doNext(lead1.nextDoc());
   }
 
@@ -245,6 +258,16 @@
     return lead1.cost(); // overestimate
   }
 
+  // Returns {@code true} if all sub-iterators are on the same doc ID, {@code false} otherwise
+  private boolean assertItersOnSameDoc() {
+    int curDoc = lead1.docID();
+    boolean iteratorsOnTheSameDoc = (lead2.docID() == curDoc);
+    for (int i = 0; (i < others.length && iteratorsOnTheSameDoc); i++) {
+      iteratorsOnTheSameDoc = iteratorsOnTheSameDoc && (others[i].docID() == curDoc);
+    }
+    return iteratorsOnTheSameDoc;
+  }
+
   /** Conjunction between a {@link DocIdSetIterator} and one or more {@link BitSetIterator}s. */
   private static class BitSetConjunctionDISI extends DocIdSetIterator {
 
@@ -256,6 +279,7 @@
     BitSetConjunctionDISI(DocIdSetIterator lead, Collection<BitSetIterator> bitSetIterators) {
       this.lead = lead;
       assert bitSetIterators.size() > 0;
+
       this.bitSetIterators = bitSetIterators.toArray(new BitSetIterator[0]);
       // Put the least costly iterators first so that we exit as soon as possible
       ArrayUtil.timSort(this.bitSetIterators, (a, b) -> Long.compare(a.cost(), b.cost()));
@@ -276,11 +300,13 @@
 
     @Override
     public int nextDoc() throws IOException {
+      assert assertItersOnSameDoc() : "Sub-iterators of ConjunctionDISI are not on the same document!";
       return doNext(lead.nextDoc());
     }
 
     @Override
     public int advance(int target) throws IOException {
+      assert assertItersOnSameDoc() : "Sub-iterators of ConjunctionDISI are not on the same document!";
       return doNext(lead.advance(target));
     }
 
@@ -306,6 +332,16 @@
       return lead.cost();
     }
 
+    // Returns {@code true} if all sub-iterators are on the same doc ID, {@code false} otherwise
+    private boolean assertItersOnSameDoc() {
+      int curDoc = lead.docID();
+      boolean iteratorsOnTheSameDoc = true;
+      for (int i = 0; (i < bitSetIterators.length && iteratorsOnTheSameDoc); i++) {
+        iteratorsOnTheSameDoc = iteratorsOnTheSameDoc && (bitSetIterators[i].docID() == curDoc);
+      }
+      return iteratorsOnTheSameDoc;
+    }
+
   }
 
   /**
diff --git a/lucene/core/src/java/org/apache/lucene/search/Weight.java b/lucene/core/src/java/org/apache/lucene/search/Weight.java
index 9829836..4d37fba 100644
--- a/lucene/core/src/java/org/apache/lucene/search/Weight.java
+++ b/lucene/core/src/java/org/apache/lucene/search/Weight.java
@@ -204,9 +204,17 @@
       collector.setScorer(scorer);
       DocIdSetIterator scorerIterator = twoPhase == null ? iterator : twoPhase.approximation();
       DocIdSetIterator collectorIterator = collector.competitiveIterator();
-      // if possible filter scorerIterator to keep only competitive docs as defined by collector
-      DocIdSetIterator filteredIterator = collectorIterator == null ? scorerIterator :
-          ConjunctionDISI.intersectIterators(Arrays.asList(scorerIterator, collectorIterator));
+      DocIdSetIterator filteredIterator;
+      if (collectorIterator == null) {
+        filteredIterator = scorerIterator;
+      } else {
+        if (scorerIterator.docID() != -1) {
+          // Wrap ScorerIterator to start from -1 for conjunction 
+          scorerIterator = new StartDISIWrapper(scorerIterator);
+        }
+        // filter scorerIterator to keep only competitive docs as defined by collector
+        filteredIterator = ConjunctionDISI.intersectIterators(Arrays.asList(scorerIterator, collectorIterator));
+      }
       if (filteredIterator.docID() == -1 && min == 0 && max == DocIdSetIterator.NO_MORE_DOCS) {
         scoreAll(collector, filteredIterator, twoPhase, acceptDocs);
         return DocIdSetIterator.NO_MORE_DOCS;
@@ -234,12 +242,11 @@
         }
         return currentDoc;
       } else {
-        final DocIdSetIterator approximation = twoPhase.approximation();
         while (currentDoc < end) {
           if ((acceptDocs == null || acceptDocs.get(currentDoc)) && twoPhase.matches()) {
             collector.collect(currentDoc);
           }
-          currentDoc = approximation.nextDoc();
+          currentDoc = iterator.nextDoc();
         }
         return currentDoc;
       }
@@ -258,8 +265,7 @@
         }
       } else {
         // The scorer has an approximation, so run the approximation first, then check acceptDocs, then confirm
-        final DocIdSetIterator approximation = twoPhase.approximation();
-        for (int doc = approximation.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = approximation.nextDoc()) {
+        for (int doc = iterator.nextDoc(); doc != DocIdSetIterator.NO_MORE_DOCS; doc = iterator.nextDoc()) {
           if ((acceptDocs == null || acceptDocs.get(doc)) && twoPhase.matches()) {
             collector.collect(doc);
           }
@@ -268,4 +274,42 @@
     }
   }
 
+  /**
+   * Wraps an internal docIdSetIterator for it to start with docID = -1
+   */
+  protected static class StartDISIWrapper extends DocIdSetIterator {
+    private final DocIdSetIterator in;
+    private final int min;
+    private int docID = -1;
+
+    public StartDISIWrapper(DocIdSetIterator in) {
+      this.in = in;
+      this.min = in.docID();
+    }
+
+    @Override
+    public int docID() {
+      return docID;
+    }
+
+    @Override
+    public int nextDoc() throws IOException {
+      return advance(docID + 1);
+    }
+
+    @Override
+    public int advance(int target) throws IOException {
+      if (target <= min) {
+        return docID = min;
+      }
+      return docID = in.advance(target);
+    }
+
+    @Override
+    public long cost() {
+      return in.cost();
+    }
+
+  }
+
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/comparators/DocComparator.java b/lucene/core/src/java/org/apache/lucene/search/comparators/DocComparator.java
index 8974ca6..c0b3e2e 100644
--- a/lucene/core/src/java/org/apache/lucene/search/comparators/DocComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/comparators/DocComparator.java
@@ -133,14 +133,16 @@
                 return null;
             } else {
                 return new DocIdSetIterator() {
+                    private int docID = -1;
+
                     @Override
                     public int nextDoc() throws IOException {
-                        return competitiveIterator.nextDoc();
+                        return advance(docID + 1);
                     }
 
                     @Override
                     public int docID() {
-                        return competitiveIterator.docID();
+                        return docID;
                     }
 
                     @Override
@@ -150,7 +152,7 @@
 
                     @Override
                     public int advance(int target) throws IOException {
-                        return competitiveIterator.advance(target);
+                        return docID = competitiveIterator.advance(target);
                     }
                 };
             }
diff --git a/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java b/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java
index dc166f7..fa5f267 100644
--- a/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java
@@ -220,14 +220,16 @@
         public DocIdSetIterator competitiveIterator() {
             if (enableSkipping == false) return null;
             return new DocIdSetIterator() {
+                private int docID = -1;
+
                 @Override
                 public int nextDoc() throws IOException {
-                    return competitiveIterator.nextDoc();
+                    return advance(docID + 1);
                 }
 
                 @Override
                 public int docID() {
-                    return competitiveIterator.docID();
+                    return docID;
                 }
 
                 @Override
@@ -237,7 +239,7 @@
 
                 @Override
                 public int advance(int target) throws IOException {
-                    return competitiveIterator.advance(target);
+                    return docID = competitiveIterator.advance(target);
                 }
             };
         }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
index fc085ae..cc80e19 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestIndexWriter.java
@@ -4238,4 +4238,24 @@
     }
     IOUtils.close(w, dir);
   }
+
+  public void testPendingNumDocs() throws Exception {
+    try (Directory dir = newDirectory()) {
+      int numDocs = random().nextInt(100);
+      try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig())) {
+        for (int i = 0; i < numDocs; i++) {
+          Document d = new Document();
+          d.add(new StringField("id", Integer.toString(i), Field.Store.YES));
+          writer.addDocument(d);
+          assertEquals(i + 1L, writer.getPendingNumDocs());
+        }
+        assertEquals(numDocs, writer.getPendingNumDocs());
+        writer.flush();
+        assertEquals(numDocs, writer.getPendingNumDocs());
+      }
+      try (IndexWriter writer = new IndexWriter(dir, newIndexWriterConfig())) {
+        assertEquals(numDocs, writer.getPendingNumDocs());
+      }
+    }
+  }
 }
diff --git a/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java b/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java
index 64aea7f..e5db0f5 100644
--- a/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java
+++ b/lucene/core/src/test/org/apache/lucene/index/TestSizeBoundedForceMerge.java
@@ -71,7 +71,7 @@
 
     conf = newWriterConfig();
     LogByteSizeMergePolicy lmp = new LogByteSizeMergePolicy();
-    lmp.setMaxMergeMBForForcedMerge((min + 1) / (1 << 20));
+    lmp.setMaxMergeMBForForcedMerge(min / (1 << 20));
     conf.setMergePolicy(lmp);
     
     writer = new IndexWriter(dir, conf);
diff --git a/lucene/core/src/test/org/apache/lucene/search/TestConjunctionDISI.java b/lucene/core/src/test/org/apache/lucene/search/TestConjunctionDISI.java
index e729ed6..43cfbe6 100644
--- a/lucene/core/src/test/org/apache/lucene/search/TestConjunctionDISI.java
+++ b/lucene/core/src/test/org/apache/lucene/search/TestConjunctionDISI.java
@@ -41,7 +41,7 @@
     return new TwoPhaseIterator(approximation) {
 
       @Override
-      public boolean matches() throws IOException {
+      public boolean matches() {
         return confirmed.get(approximation.docID());
       }
 
@@ -391,4 +391,21 @@
   public void testCollapseSubConjunctionScorers() throws IOException {
     testCollapseSubConjunctions(true);
   }
+
+  public void testIllegalAdvancementOfSubIteratorsTripsAssertion() throws IOException {
+    assumeTrue("Assertions must be enabled for this test!", LuceneTestCase.assertsAreEnabled);
+    int maxDoc = 100;
+    final int numIterators = TestUtil.nextInt(random(), 2, 5);
+    FixedBitSet set = randomSet(maxDoc);
+
+    DocIdSetIterator[] iterators = new DocIdSetIterator[numIterators];
+    for (int i = 0; i < iterators.length; ++i) {
+      iterators[i] = new BitDocIdSet(set).iterator();
+    }
+    final DocIdSetIterator conjunction = ConjunctionDISI.intersectIterators(Arrays.asList(iterators));
+    int idx = TestUtil.nextInt(random() , 0, iterators.length-1);
+    iterators[idx].nextDoc(); // illegally advancing one of the sub-iterators outside of the conjunction iterator
+    AssertionError ex = expectThrows(AssertionError.class, () -> conjunction.nextDoc());
+    assertEquals("Sub-iterators of ConjunctionDISI are not on the same document!", ex.getMessage());
+  }
 }
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java b/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java
index 6a7b9ec..39fe212 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/FacetTestCase.java
@@ -70,32 +70,51 @@
    *
    * @param taxoReader {@link TaxonomyReader} used to read taxonomy during search. This instance is expected to be open for reading.
    * @param fc         {@link FacetsCollector} A collector with matching hits.
-   * @return {@code List<List<FacetLabel>} where outer list has one non-null entry per document
+   * @param dimension  facet dimension for which labels are requested. A null value fetches labels for all dimensions.
+   * @return {@code List<List<FacetLabel>} where outer list has one non-null entry per document.
    * and inner list contain all {@link FacetLabel} entries that belong to a document.
    * @throws IOException when a low-level IO issue occurs.
    */
-  public List<List<FacetLabel>> getAllTaxonomyFacetLabels(TaxonomyReader taxoReader, FacetsCollector fc) throws IOException {
+  public List<List<FacetLabel>> getAllTaxonomyFacetLabels(String dimension, TaxonomyReader taxoReader, FacetsCollector fc) throws IOException {
     List<List<FacetLabel>> actualLabels = new ArrayList<>();
     TaxonomyFacetLabels taxoLabels = new TaxonomyFacetLabels(taxoReader, FacetsConfig.DEFAULT_INDEX_FIELD_NAME);
-
     for (MatchingDocs m : fc.getMatchingDocs()) {
       FacetLabelReader facetLabelReader = taxoLabels.getFacetLabelReader(m.context);
-
       DocIdSetIterator disi = m.bits.iterator();
       while (disi.nextDoc() != DocIdSetIterator.NO_MORE_DOCS) {
-        List<FacetLabel> facetLabels = new ArrayList<>();
-        int docId = disi.docID();
-        FacetLabel facetLabel = facetLabelReader.nextFacetLabel(docId);
-        while (facetLabel != null) {
-          facetLabels.add(facetLabel);
-          facetLabel = facetLabelReader.nextFacetLabel(docId);
-        }
-        actualLabels.add(facetLabels);
+        actualLabels.add(allFacetLabels(disi.docID(), dimension, facetLabelReader));
       }
     }
     return actualLabels;
   }
 
+  /**
+   * Utility method to get all facet labels for an input docId and dimension using the supplied
+   * {@link FacetLabelReader}.
+   *
+   * @param docId docId for which facet labels are needed.
+   * @param dimension Retain facet labels for supplied dimension only. A null value fetches all facet labels.
+   * @param facetLabelReader {@FacetLabelReader} instance use to get facet labels for input docId.
+   * @return {@code List<FacetLabel>} containing matching facet labels.
+   * @throws IOException when a low-level IO issue occurs while reading facet labels.
+   */
+  List<FacetLabel> allFacetLabels(int docId, String dimension, FacetLabelReader facetLabelReader) throws IOException {
+    List<FacetLabel> facetLabels = new ArrayList<>();
+    FacetLabel facetLabel;
+    if (dimension != null) {
+      for (facetLabel = facetLabelReader.nextFacetLabel(docId, dimension); facetLabel != null; ) {
+        facetLabels.add(facetLabel);
+        facetLabel = facetLabelReader.nextFacetLabel(docId, dimension);
+      }
+    } else {
+      for (facetLabel = facetLabelReader.nextFacetLabel(docId); facetLabel != null; ) {
+        facetLabels.add(facetLabel);
+        facetLabel = facetLabelReader.nextFacetLabel(docId);
+      }
+    }
+    return facetLabels;
+  }
+
   protected String[] getRandomTokens(int count) {
     String[] tokens = new String[count];
     for(int i=0;i<tokens.length;i++) {
diff --git a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
index deb8208..ecd67c6 100644
--- a/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
+++ b/lucene/facet/src/test/org/apache/lucene/facet/taxonomy/TestTaxonomyFacetCounts.java
@@ -22,7 +22,7 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
-import java.util.HashSet;
+import java.util.HashSet; 
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
@@ -696,7 +696,6 @@
               } else {
                 expectedCounts[j].put(doc.dims[j], v.intValue() + 1);
               }
-
               // Add document facet labels
               facetLabels.add(new FacetLabel("dim" + j, doc.dims[j]));
             }
@@ -719,11 +718,6 @@
         }
       }
 
-      // Test facet labels for each matching test doc
-      List<List<FacetLabel>> actualLabels = getAllTaxonomyFacetLabels(tr, fc);
-      assertEquals(expectedLabels.size(), actualLabels.size());
-      assertTrue(sortedFacetLabels(expectedLabels).equals(sortedFacetLabels(actualLabels)));
-
       // Sort by highest value, tie break by value:
       sortFacetResults(expected);
 
@@ -733,6 +727,18 @@
       sortTies(actual);
 
       assertEquals(expected, actual);
+
+      // Test facet labels for each matching test doc
+      List<List<FacetLabel>> actualLabels = getAllTaxonomyFacetLabels(null, tr, fc);
+      assertEquals(expectedLabels.size(), actualLabels.size());
+      assertTrue(sortedFacetLabels(expectedLabels).equals(sortedFacetLabels(actualLabels)));
+
+      // Test facet labels for each matching test doc, given a specific dimension chosen randomly
+      final String dimension = "dim" + random().nextInt(numDims);
+      expectedLabels.forEach(list -> list.removeIf(f -> f.components[0].equals(dimension) == false));
+
+      actualLabels = getAllTaxonomyFacetLabels(dimension, tr, fc);
+      assertTrue(sortedFacetLabels(expectedLabels).equals(sortedFacetLabels(actualLabels)));
     }
 
     w.close();
@@ -740,8 +746,8 @@
   }
 
   private static List<List<FacetLabel>> sortedFacetLabels(List<List<FacetLabel>> allFacetLabels) {
-    // sort each inner list since there is no guaranteed order in which FacetLabels
-    // are expected to be retrieved for each document
+    // Sort each inner list since there is no guaranteed order in which
+    // FacetLabels are expected to be retrieved for each document.
     for (List<FacetLabel> facetLabels : allFacetLabels) {
       Collections.sort(facetLabels);
     }
diff --git a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
index d552aef..3be0a54 100644
--- a/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
+++ b/lucene/queryparser/src/java/org/apache/lucene/queryparser/complexPhrase/ComplexPhraseQueryParser.java
@@ -434,10 +434,15 @@
 
     @Override
     public String toString(String field) {
-      if (slopFactor == 0)
-        return "\"" + phrasedQueryStringContents + "\"";
-      else
-        return "\"" + phrasedQueryStringContents + "\"" + "~" + slopFactor;
+      StringBuilder sb = new StringBuilder();
+      if (!this.field.equals(field)) {
+        sb.append(this.field).append(":");
+      }
+      sb.append("\"").append(phrasedQueryStringContents).append("\"");
+      if (slopFactor != 0) {
+        sb.append("~").append(slopFactor);
+      }
+      return sb.toString();
     }
 
     @Override
diff --git a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
index 5935da9..68b3e39 100644
--- a/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
+++ b/lucene/queryparser/src/test/org/apache/lucene/queryparser/complexPhrase/TestComplexPhraseQuery.java
@@ -169,12 +169,14 @@
   }
 
   public void testToStringContainsSlop() throws Exception {
-    ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser(defaultFieldName, analyzer);
+    ComplexPhraseQueryParser qp = new ComplexPhraseQueryParser("", analyzer);
     int slop = random().nextInt(31) + 1;
 
     String qString = "name:\"j* smyth~\"~" + slop;
     Query query = qp.parse(qString);
-    assertTrue("Slop is not shown in toString()", query.toString().endsWith("~" + slop));
+    String actualQStr = query.toString();
+    assertTrue("Slop is not shown in toString()", actualQStr.endsWith("~" + slop));
+    assertEquals(qString, actualQStr);
 
     String string = "\"j* smyth~\"";
     Query q = qp.parse(string);
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java
index 2fc68b4..2b23a65 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/StrategyTestCase.java
@@ -67,7 +67,7 @@
   protected boolean storeShape = true;
 
   protected void executeQueries(SpatialMatchConcern concern, String... testQueryFile) throws IOException {
-    log.info("testing queried for strategy "+strategy); // logOk
+    log.info("testing queried for strategy "+strategy); // nowarn
     for( String path : testQueryFile ) {
       Iterator<SpatialTestQuery> testQueryIterator = getTestQueries(path, ctx);
       runTestQueries(testQueryIterator, concern);
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java
index 357a487..5b71985 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/HeatmapFacetCounterTest.java
@@ -67,7 +67,7 @@
 
   @After
   public void after() {
-    log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); // logOK
+    log.info("Validated " + cellsValidated + " cells, " + cellValidatedNonZero + " non-zero"); // nowarn
   }
 
   @Test
diff --git a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java
index 97f3b38..6349060 100644
--- a/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java
+++ b/lucene/spatial-extras/src/test/org/apache/lucene/spatial/prefix/RandomSpatialOpFuzzyPrefixTreeTest.java
@@ -87,7 +87,7 @@
       ((PrefixTreeStrategy) strategy).setPointsOnly(true);
     }
 
-    log.info("Strategy: " +  strategy.toString()); // logOk
+    log.info("Strategy: " +  strategy.toString()); // nowarn
   }
 
   private void setupCtx2D(SpatialContext ctx) {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
index fe9a706..914ae37 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/analysis/BaseTokenStreamTestCase.java
@@ -392,6 +392,7 @@
   public static void assertAnalyzesTo(Analyzer a, String input, String[] output, int startOffsets[], int endOffsets[], String types[], int posIncrements[], int posLengths[], boolean graphOffsetsAreCorrect, byte[][] payloads) throws IOException {
     assertTokenStreamContents(a.tokenStream("dummy", input), output, startOffsets, endOffsets, types, posIncrements, posLengths, input.length(), null, null, graphOffsetsAreCorrect, payloads);
     checkResetException(a, input);
+    checkAnalysisConsistency(random(), a, true, input, graphOffsetsAreCorrect);
   }
 
   public static void assertAnalyzesTo(Analyzer a, String input, String[] output) throws IOException {
@@ -422,7 +423,7 @@
     assertAnalyzesTo(a, input, output, startOffsets, endOffsets, null, posIncrements, null);
   }
 
-  static void checkResetException(Analyzer a, String input) throws IOException {
+  public static void checkResetException(Analyzer a, String input) throws IOException {
     TokenStream ts = a.tokenStream("bogus", input);
     try {
       if (ts.incrementToken()) {
diff --git a/lucene/test-framework/src/java/org/apache/lucene/geo/ShapeTestUtil.java b/lucene/test-framework/src/java/org/apache/lucene/geo/ShapeTestUtil.java
index 47cd3bd..62e71d1 100644
--- a/lucene/test-framework/src/java/org/apache/lucene/geo/ShapeTestUtil.java
+++ b/lucene/test-framework/src/java/org/apache/lucene/geo/ShapeTestUtil.java
@@ -71,7 +71,11 @@
     Random random = random();
     float x = nextFloat(random);
     float y = nextFloat(random);
-    float radius = random().nextFloat() * Float.MAX_VALUE / 2;
+    float radius = 0;
+    while (radius == 0) {
+      radius = random().nextFloat() * Float.MAX_VALUE / 2;
+    }
+    assert radius != 0;
     return new XYCircle(x, y, radius);
   }
 
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index fb936ab..ab3eafe 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -126,6 +126,10 @@
 
 * SOLR-14036: Remove distrib=false from /terms handler's default parameters (David Smiley, Munendra S N)
 
+* SOLR-14829: Cleanup the documentation for Request Handlers and Search Components (Alexandre Rafalovitch)
+
+* SOLR-10370: ReplicationHandler should fetch index at fixed delay instead of fixed rate (Cao Manh Dat)
+
 Bug Fixes
 ---------------------
 * SOLR-14546: Fix for a relatively hard to hit issue in OverseerTaskProcessor that could lead to out of order execution
@@ -187,6 +191,10 @@
 * SOLR-14859: DateRangeField now throws errors when invalid field/fieldType options specified; no longer silently accepts incompatible option values
   (Jason Gerlowski, Chris Hostetter, Munendra S N)
 
+* SOLR-14905: Update commons-io version to 2.8.0 due to security vulnerability. (Nazerke Seidan via Bruno Roustant)
+
+* SOLR-14691: Metrics reporting should avoid creating objects. (ab, Noble Paul)
+
 Optimizations
 ---------------------
 
@@ -251,6 +259,10 @@
 * SOLR-14850: Fix ExactStatsCache NullPointerException when shards.tolerant=true.
   (Yevhen Tienkaiev via ab)
 
+* SOLR-14897: Fix unlimited number of forwarding the request from one node to another node. (hossman, Munendra S N)
+
+* SOLR-14898: Stop returning duplicate HTTP response headers when requests are forward to another node. (hossman)
+
 Other Changes
 ---------------------
 
@@ -281,6 +293,18 @@
 
 * SOLR-14876: Upgrade to zookeeper 3.6.2 (odidev via Erick Erickson)
 
+* SOLR-14333: Implement toString in Collapse filter so that proper parsed queries returned in debug response.
+  Also, Deprecate unused constants NULL_COLLAPSE, NULL_IGNORE, NULL_EXPAND, HINT_MULTI_DOCVALUES in collapse parser.
+  (Guna Sekhar Dora Kovvuru, Munendra S N, Mike Drob)
+
+* SOLR-12987: Deprecated plugins/features are now logged once and with log category org.apache.solr.DEPRECATED
+  (David Smiley)
+
+* SOLR-14910: Use in-line tags for logger declarations in Gradle ValidateLogCalls that are non-standard,
+              change //logok to //nowarn (Erick Erickson)
+
+* SOLR-14659: Remove restlet as dependency for the ManagedResource API (Timothy Potter, noble)
+
 ==================  8.6.2 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
@@ -289,6 +313,8 @@
 ---------------------
 * SOLR-14751: Zookeeper Admin screen not working for old ZK versions (janhoy)
 
+* SOLR-14663: Copy ConfigSet root data from base ConfigSet when using CREATE command (Andras Salamon, Tomás Fernández Löbbe)
+
 ==================  8.6.1 ==================
 
 Bug Fixes
diff --git a/solr/NOTICE.txt b/solr/NOTICE.txt
index f60d6b4..f5d6fcc 100644
--- a/solr/NOTICE.txt
+++ b/solr/NOTICE.txt
@@ -546,18 +546,6 @@
 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
 
 =========================================================================
-==     Restlet Notice                                                  ==
-=========================================================================
-
-Copyright (C) 2005-2014 Restlet S.A.S.
-
-Restlet is a registered trademark of Restlet S.A.S.
-
-This product contains software developed by the Restlet project.
-
-See http://www.restlet.org/
-
-=========================================================================
 ==     Protocol Buffers Notice                                         ==
 =========================================================================
 
diff --git a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
index 6fd464f..dc01d32 100644
--- a/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
+++ b/solr/contrib/analysis-extras/src/java/org/apache/solr/schema/ICUCollationField.java
@@ -28,7 +28,7 @@
 import org.apache.lucene.analysis.TokenStream;
 import org.apache.lucene.analysis.tokenattributes.TermToBytesRefAttribute;
 import org.apache.lucene.util.ResourceLoader;
-import org.apache.lucene.collation.ICUCollationKeyAnalyzer;
+import org.apache.lucene.analysis.icu.ICUCollationKeyAnalyzer;
 import org.apache.lucene.document.SortedDocValuesField;
 import org.apache.lucene.document.SortedSetDocValuesField;
 import org.apache.lucene.index.IndexableField;
diff --git a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java
index 8e1f625..e62cd0e 100644
--- a/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java
+++ b/solr/contrib/clustering/src/java/org/apache/solr/handler/clustering/carrot2/CarrotClusteringEngine.java
@@ -163,7 +163,7 @@
       if (attributeXmls.length > 0) {
         if (attributeXmls.length > 1) {
           log.warn("More than one attribute file found, first one will be used: {}"
-              , Arrays.toString(attributeXmls)); // logOk
+              , Arrays.toString(attributeXmls)); // nowarn
         }
 
         withContextClassLoader(core.getResourceLoader().getClassLoader(), () -> {
diff --git a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java
index 1b33ec9..f74e7b2 100644
--- a/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java
+++ b/solr/contrib/extraction/src/java/org/apache/solr/handler/extraction/ExtractingDocumentLoader.java
@@ -230,7 +230,7 @@
         } catch (TikaException e) {
           if(ignoreTikaException)
             log.warn(new StringBuilder("skip extracting text due to ").append(e.getLocalizedMessage())
-                .append(". metadata=").append(metadata.toString()).toString()); // logOk
+                .append(". metadata=").append(metadata.toString()).toString()); // nowarn
           else
             throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
         }
diff --git a/solr/contrib/ltr/build.gradle b/solr/contrib/ltr/build.gradle
index f3288d9..efe7f92 100644
--- a/solr/contrib/ltr/build.gradle
+++ b/solr/contrib/ltr/build.gradle
@@ -27,7 +27,6 @@
     exclude group: "net.bytebuddy", module: "byte-buddy-agent"
   })
   testImplementation ('org.objenesis:objenesis')
-  testImplementation ('org.restlet.jee:org.restlet.ext.servlet')
 
   testImplementation project(':solr:test-framework')
 }
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
index 22c0201..5dd7cf0 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
@@ -44,11 +44,8 @@
 import org.apache.solr.ltr.store.FeatureStore;
 import org.apache.solr.ltr.store.rest.ManagedFeatureStore;
 import org.apache.solr.ltr.store.rest.ManagedModelStore;
-import org.apache.solr.rest.ManagedResourceStorage;
-import org.apache.solr.rest.SolrSchemaRestApi;
 import org.apache.solr.util.RestTestBase;
 import org.eclipse.jetty.servlet.ServletHolder;
-import org.restlet.ext.servlet.ServerServlet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -178,17 +175,8 @@
               + "/collection1/conf/schema.xml"));
     }
 
-    final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi",
-        ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application",
-        SolrSchemaRestApi.class.getCanonicalName());
-    solrRestApi.setInitParameter("storageIO",
-        ManagedResourceStorage.InMemoryStorageIO.class.getCanonicalName());
-    extraServlets.put(solrRestApi, PARENT_ENDPOINT);
-
     System.setProperty("managed.schema.mutable", "true");
-
+    final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
     return extraServlets;
   }
 
diff --git a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java
index 62763df..7607d1f 100644
--- a/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java
+++ b/solr/contrib/prometheus-exporter/src/java/org/apache/solr/prometheus/collector/SchedulerMetricsCollector.java
@@ -93,7 +93,7 @@
         try {
           metricSamples.addAll(future.get());
         } catch (ExecutionException e) {
-          log.error("Error occurred during metrics collection", e.getCause());//logok
+          log.error("Error occurred during metrics collection", e.getCause());//nowarn
           // continue any ways; do not fail
         }
       }
diff --git a/solr/core/build.gradle b/solr/core/build.gradle
index 8c7bcc9..cfbd6d6 100644
--- a/solr/core/build.gradle
+++ b/solr/core/build.gradle
@@ -95,9 +95,7 @@
   implementation 'org.codehaus.janino:commons-compiler'
   implementation 'org.codehaus.janino:janino'
 
-  api 'org.restlet.jee:org.restlet'
   implementation 'org.rrd4j:rrd4j'
-  implementation 'org.restlet.jee:org.restlet.ext.servlet'
 
   implementation ('org.apache.calcite.avatica:avatica-core') { transitive = false }
   implementation ('org.apache.calcite:calcite-core') { transitive = false }
diff --git a/solr/core/src/java/org/apache/solr/cloud/ActiveReplicaWatcher.java b/solr/core/src/java/org/apache/solr/cloud/ActiveReplicaWatcher.java
index 66406fa..9030c38 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ActiveReplicaWatcher.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ActiveReplicaWatcher.java
@@ -119,11 +119,11 @@
       log.debug("-- onStateChanged@{}: replicaIds={}, solrCoreNames={} {}\ncollectionState {}"
           , Long.toHexString(hashCode()), replicaIds, solrCoreNames
           , (latch != null ? "\nlatch count=" + latch.getCount() : "")
-          , collectionState); // logOk
+          , collectionState); // nowarn
     }
     if (collectionState == null) { // collection has been deleted - don't wait
       if (log.isDebugEnabled()) {
-        log.debug("-- collection deleted, decrementing latch by {} ", replicaIds.size() + solrCoreNames.size()); // logOk
+        log.debug("-- collection deleted, decrementing latch by {} ", replicaIds.size() + solrCoreNames.size()); // nowarn
       }
       if (latch != null) {
         for (int i = 0; i < replicaIds.size() + solrCoreNames.size(); i++) {
diff --git a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
index 65de4ca..fc20731 100644
--- a/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
+++ b/solr/core/src/java/org/apache/solr/cloud/CloudUtil.java
@@ -93,7 +93,7 @@
             }
             log.error("{}",
                 new SolrException(ErrorCode.SERVER_ERROR, "Will not load SolrCore " + desc.getName()
-                    + " because it has been replaced due to failover.")); // logOk
+                    + " because it has been replaced due to failover.")); // nowarn
             throw new SolrException(ErrorCode.SERVER_ERROR,
                 "Will not load SolrCore " + desc.getName()
                     + " because it has been replaced due to failover.");
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
index bcdec9a..2464a46 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
@@ -632,9 +632,9 @@
   private void printTrackingMaps() {
     if (log.isDebugEnabled()) {
       log.debug("RunningTasks: {}", runningTasks);
-      log.debug("BlockedTasks: {}", blockedTasks.keySet()); // logOk
-      log.debug("CompletedTasks: {}", completedTasks.keySet()); // logOk
-      log.debug("RunningZKTasks: {}", runningZKTasks); // logOk
+      log.debug("BlockedTasks: {}", blockedTasks.keySet()); // nowarn
+      log.debug("CompletedTasks: {}", completedTasks.keySet()); // nowarn
+      log.debug("RunningZKTasks: {}", runningZKTasks); // nowarn
     }
   }
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 2edb926..9ec9b46 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -189,8 +189,8 @@
   public static final String version = "1.0";
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private static final Logger requestLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".Request");
-  private static final Logger slowLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".SlowRequest");
+  private static final Logger requestLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".Request"); //nowarn
+  private static final Logger slowLog = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass().getName() + ".SlowRequest"); //nowarn
 
   private String name;
   private String logid; // used to show what name is set
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index dbfb0d5..609da78 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -62,6 +62,7 @@
 import org.apache.solr.common.cloud.SolrClassLoader;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.handler.component.ShardHandlerFactory;
+import org.apache.solr.logging.DeprecationLog;
 import org.apache.solr.pkg.PackageListeningClassLoader;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.QueryResponseWriter;
@@ -529,8 +530,8 @@
 
         // print warning if class is deprecated
         if (clazz.isAnnotationPresent(Deprecated.class)) {
-          log.warn("Solr loaded a deprecated plugin/analysis class [{}]. Please consult documentation how to replace it accordingly.",
-              cname);
+          DeprecationLog.log(cname,
+            "Solr loaded a deprecated plugin/analysis class [" + cname + "]. Please consult documentation how to replace it accordingly.");
         }
       }
     }
@@ -693,18 +694,25 @@
       }
 
       for (ResourceLoaderAware aware : arr) {
-        CURRENT_AWARE.set(aware);
-        try{
-          aware.inform(loader);
-        } finally {
-          CURRENT_AWARE.remove();
-        }
+        informAware(loader, aware);
 
       }
     }
   }
 
   /**
+   * Set the current {@link ResourceLoaderAware} object in thread local so that appropriate classloader can be used for package loaded classes
+   */
+  public static void informAware(ResourceLoader loader, ResourceLoaderAware aware) throws IOException {
+    CURRENT_AWARE.set(aware);
+    try{
+      aware.inform(loader);
+    } finally {
+      CURRENT_AWARE.remove();
+    }
+  }
+
+  /**
    * Register any {@link SolrInfoBean}s
    *
    * @param infoRegistry The Info Registry
@@ -882,6 +890,6 @@
   }
 
   //This is to verify if this requires to use the schema classloader for classes loaded from packages
-  public static final ThreadLocal<ResourceLoaderAware> CURRENT_AWARE = new ThreadLocal<>();
+  private static final ThreadLocal<ResourceLoaderAware> CURRENT_AWARE = new ThreadLocal<>();
 
 }
diff --git a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
index c8a0512..66d195c 100644
--- a/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
+++ b/solr/core/src/java/org/apache/solr/handler/IndexFetcher.java
@@ -437,7 +437,7 @@
 
       if (log.isInfoEnabled()) {
         log.info("Follower's generation: {}", commit.getGeneration());
-        log.info("Follower's version: {}", IndexDeletionPolicyWrapper.getCommitTimestamp(commit)); // logOK
+        log.info("Follower's version: {}", IndexDeletionPolicyWrapper.getCommitTimestamp(commit)); // nowarn
       }
 
       if (latestVersion == 0L) {
@@ -1249,7 +1249,7 @@
       try {
         if (log.isInfoEnabled()) {
           log.info("From dir files: {}", Arrays.asList(tmpIdxDir.listAll()));
-          log.info("To dir files: {}", Arrays.asList(indexDir.listAll())); //logOk
+          log.info("To dir files: {}", Arrays.asList(indexDir.listAll())); //nowarn
         }
       } catch (IOException e) {
         throw new RuntimeException(e);
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index b200ecc..97bed5c 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -68,6 +68,7 @@
 import org.apache.lucene.store.IOContext;
 import org.apache.lucene.store.IndexInput;
 import org.apache.lucene.store.RateLimiter;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.CommonParams;
@@ -894,7 +895,7 @@
          true, "isLeader", getCategory().toString(), scope);
     solrMetricsContext.gauge(() -> isFollower,
          true, "isFollower", getCategory().toString(), scope);
-    final MetricsMap fetcherMap = new MetricsMap((detailed, map) -> {
+    final MetricsMap fetcherMap = new MetricsMap(map -> {
       IndexFetcher fetcher = currentIndexFetcher;
       if (fetcher != null) {
         map.put(LEADER_URL, fetcher.getLeaderUrl());
@@ -1110,10 +1111,10 @@
     }
   }
 
-  private void addVal(Map<String, Object> map, String key, Properties props, @SuppressWarnings({"rawtypes"})Class clzz) {
+  private void addVal(MapWriter.EntryWriter ew, String key, Properties props, @SuppressWarnings({"rawtypes"})Class clzz) {
     Object val = formatVal(key, props, clzz);
     if (val != null) {
-      map.put(key, val);
+      ew.putNoEx(key, val);
     }
   }
 
@@ -1217,7 +1218,7 @@
     // Randomize initial delay, with a minimum of 1ms
     long initialDelayNs = new Random().nextLong() % pollIntervalNs
         + TimeUnit.NANOSECONDS.convert(1, TimeUnit.MILLISECONDS);
-    executorService.scheduleAtFixedRate(task, initialDelayNs, pollIntervalNs, TimeUnit.NANOSECONDS);
+    executorService.scheduleWithFixedDelay(task, initialDelayNs, pollIntervalNs, TimeUnit.NANOSECONDS);
     log.info("Poll scheduled at an interval of {}ms",
         TimeUnit.MILLISECONDS.convert(pollIntervalNs, TimeUnit.NANOSECONDS));
   }
@@ -1606,7 +1607,7 @@
           }
           fos.write(buf, 0, read);
           fos.flush();
-          log.debug("Wrote {} bytes for file {}", offset + read, fileName); // logOK
+          log.debug("Wrote {} bytes for file {}", offset + read, fileName); // nowarn
 
           //Pause if necessary
           maxBytesBeforePause += read;
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index 1fcc183..d41ad54 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -157,8 +157,8 @@
     numClientErrors = solrMetricsContext.meter("clientErrors", getCategory().toString(), scope);
     numTimeouts = solrMetricsContext.meter("timeouts", getCategory().toString(), scope);
     requests = solrMetricsContext.counter("requests", getCategory().toString(), scope);
-    MetricsMap metricsMap = new MetricsMap((detail, map) ->
-        shardPurposes.forEach((k, v) -> map.put(k, v.getCount())));
+    MetricsMap metricsMap = new MetricsMap(map ->
+        shardPurposes.forEach((k, v) -> map.putNoEx(k, v.getCount())));
     solrMetricsContext.gauge(metricsMap, true, "shardRequests", getCategory().toString(), scope);
     requestTimes = solrMetricsContext.timer("requestTimes", getCategory().toString(), scope);
     distribRequestTimes = solrMetricsContext.timer("requestTimes", getCategory().toString(), scope, "distrib");
diff --git a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java
index 606bab5..5a8f9d2 100644
--- a/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SchemaHandler.java
@@ -39,6 +39,7 @@
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.rest.RestManager;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.schema.ManagedIndexSchema;
 import org.apache.solr.schema.SchemaManager;
@@ -60,6 +61,7 @@
 public class SchemaHandler extends RequestHandlerBase implements SolrCoreAware, PermissionNameProvider {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private boolean isImmutableConfigSet = false;
+  private SolrRequestHandler managedResourceRequestHandler;
 
   private static final Map<String, String> level2;
 
@@ -106,6 +108,8 @@
     switch (ctx.getHttpMethod()) {
       case "GET":
         return PermissionNameProvider.Name.SCHEMA_READ_PERM;
+      case "PUT":
+      case "DELETE":
       case "POST":
         return PermissionNameProvider.Name.SCHEMA_EDIT_PERM;
       default:
@@ -257,6 +261,8 @@
     String prefix =  parts.get(0);
     if(subPaths.contains(prefix)) return this;
 
+    if(managedResourceRequestHandler != null) return managedResourceRequestHandler;
+
     return null;
   }
 
@@ -273,6 +279,7 @@
   @Override
   public void inform(SolrCore core) {
     isImmutableConfigSet = SolrConfigHandler.getImmutable(core);
+    this.managedResourceRequestHandler =  new ManagedResourceRequestHandler(core.getRestManager());
   }
 
   @Override
@@ -290,4 +297,36 @@
   public Boolean registerV2() {
     return Boolean.TRUE;
   }
+
+  private  class ManagedResourceRequestHandler extends RequestHandlerBase implements PermissionNameProvider {
+
+
+    private final RestManager restManager;
+
+    private ManagedResourceRequestHandler(RestManager restManager) {
+      this.restManager = restManager;
+    }
+
+    @Override
+    public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) {
+      RestManager.ManagedEndpoint me = new RestManager.ManagedEndpoint(restManager);
+      me.doInit(req, rsp);
+      me.delegateRequestToManagedResource();
+    }
+
+    @Override
+    public Name getPermissionName(AuthorizationContext ctx) {
+      return SchemaHandler.this.getPermissionName(ctx);
+    }
+
+    @Override
+    public String getName() {
+      return null;
+    }
+
+    @Override
+    public String getDescription() {
+      return null;
+    }
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 3c9e37c..c2cefbc 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -1349,7 +1349,7 @@
           success = true;
           break;
         }
-        log.warn("Force leader attempt {}. Waiting 5 secs for an active leader. State of the slice: {}", (i + 1), slice); //logok
+        log.warn("Force leader attempt {}. Waiting 5 secs for an active leader. State of the slice: {}", (i + 1), slice); //nowarn
       }
 
       if (success) {
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
index ff1a255..6d94fba 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ConfigSetsHandler.java
@@ -73,6 +73,7 @@
  * A {@link org.apache.solr.request.SolrRequestHandler} for ConfigSets API requests.
  */
 public class ConfigSetsHandler extends RequestHandlerBase implements PermissionNameProvider {
+  final public static Boolean DISABLE_CREATE_AUTH_CHECKS = Boolean.getBoolean("solr.disableConfigSetsCreateAuthChecks"); // this is for back compat only
   final public static String DEFAULT_CONFIGSET_NAME = "_default";
   final public static String AUTOCREATED_CONFIGSET_SUFFIX = ".AUTOCREATED";
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
@@ -192,7 +193,7 @@
     } else {
       filesToDelete = Collections.emptySet();
     }
-    createBaseZnode(zkClient, overwritesExisting, isTrusted(req), cleanup, configPathInZk);
+    createBaseZnode(zkClient, overwritesExisting, isTrusted(req, coreContainer.getAuthenticationPlugin()), cleanup, configPathInZk);
 
     ZipInputStream zis = new ZipInputStream(inputStream, StandardCharsets.UTF_8);
     ZipEntry zipEntry = null;
@@ -259,9 +260,19 @@
    * Fail if an untrusted request tries to update a trusted ConfigSet
    */
   private void ensureOverwritingUntrustedConfigSet(SolrZkClient zkClient, String configSetZkPath) {
+    boolean isCurrentlyTrusted = isCurrentlyTrusted(zkClient, configSetZkPath);
+    if (isCurrentlyTrusted) {
+      throw new SolrException(ErrorCode.BAD_REQUEST, "Trying to make an unstrusted ConfigSet update on a trusted configSet");
+    }
+  }
+
+  private static boolean isCurrentlyTrusted(SolrZkClient zkClient, String configSetZkPath) {
     byte[] configSetNodeContent;
     try {
       configSetNodeContent = zkClient.getData(configSetZkPath, null, null, true);
+      if (configSetNodeContent == null || configSetNodeContent.length == 0) {
+        return true;
+      }
     } catch (KeeperException e) {
       throw new SolrException(ErrorCode.SERVER_ERROR, "Exception while fetching current configSet at " + configSetZkPath, e);
     } catch (InterruptedException e) {
@@ -270,21 +281,15 @@
     }
     @SuppressWarnings("unchecked")
     Map<Object, Object> contentMap = (Map<Object, Object>) Utils.fromJSON(configSetNodeContent);
-    boolean isCurrentlyTrusted = (boolean) contentMap.getOrDefault("trusted", true);
-    if (isCurrentlyTrusted) {
-      throw new SolrException(ErrorCode.BAD_REQUEST, "Trying to make an unstrusted ConfigSet update on a trusted configSet");
-    }
+    return (boolean) contentMap.getOrDefault("trusted", true);
   }
 
-  boolean isTrusted(SolrQueryRequest req) {
-    AuthenticationPlugin authcPlugin = coreContainer.getAuthenticationPlugin();
-    if (log.isInfoEnabled()) {
-      log.info("Trying to upload a configset. authcPlugin: {}, user principal: {}",
-          authcPlugin, req.getUserPrincipal());
-    }
-    if (authcPlugin != null && req.getUserPrincipal() != null) {
+  static boolean isTrusted(SolrQueryRequest req, AuthenticationPlugin authPlugin) {
+    if (authPlugin != null && req.getUserPrincipal() != null) {
+      log.debug("Trusted configset request");
       return true;
     }
+    log.debug("Untrusted configset request");
     return false;
   }
 
@@ -361,8 +366,29 @@
       @Override
       public Map<String, Object> call(SolrQueryRequest req, SolrQueryResponse rsp, ConfigSetsHandler h) throws Exception {
         String baseConfigSetName = req.getParams().get(BASE_CONFIGSET, DEFAULT_CONFIGSET_NAME);
+        String newConfigSetName = req.getParams().get(NAME);
+        if (newConfigSetName == null || newConfigSetName.length() == 0) {
+          throw new SolrException(ErrorCode.BAD_REQUEST, "ConfigSet name not specified");
+        }
+
+        ZkConfigManager zkConfigManager = new ZkConfigManager(h.coreContainer.getZkController().getZkStateReader().getZkClient());
+        if (zkConfigManager.configExists(newConfigSetName)) {
+          throw new SolrException(ErrorCode.BAD_REQUEST, "ConfigSet already exists: " + newConfigSetName);
+        }
+
+        // is there a base config that already exists
+        if (!zkConfigManager.configExists(baseConfigSetName)) {
+          throw new SolrException(ErrorCode.BAD_REQUEST,
+                  "Base ConfigSet does not exist: " + baseConfigSetName);
+        }
+
         Map<String, Object> props = CollectionsHandler.copy(req.getParams().required(), null, NAME);
         props.put(BASE_CONFIGSET, baseConfigSetName);
+        if (!DISABLE_CREATE_AUTH_CHECKS &&
+                !isTrusted(req, h.coreContainer.getAuthenticationPlugin()) &&
+                isCurrentlyTrusted(h.coreContainer.getZkController().getZkClient(), ZkConfigManager.CONFIGS_ZKNODE + "/" +  baseConfigSetName)) {
+          throw new SolrException(ErrorCode.UNAUTHORIZED, "Can't create a configset with an unauthenticated request from a trusted " + BASE_CONFIGSET);
+        }
         return copyPropertiesWithPrefix(req.getParams(), props, PROPERTY_PREFIX + ".");
       }
     },
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
index e6d8017..76bb04d 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/MetricsHandler.java
@@ -25,6 +25,7 @@
 import java.util.Map;
 import java.util.Set;
 import java.util.function.BiConsumer;
+import java.util.function.Predicate;
 import java.util.regex.Pattern;
 import java.util.stream.Collectors;
 
@@ -36,6 +37,7 @@
 import com.codahale.metrics.MetricFilter;
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.CommonTestInjection;
@@ -112,17 +114,15 @@
       return;
     }
     MetricFilter mustMatchFilter = parseMustMatchFilter(params);
-    MetricUtils.PropertyFilter propertyFilter = parsePropertyFilter(params);
+    Predicate<CharSequence> propertyFilter = parsePropertyFilter(params);
     List<MetricType> metricTypes = parseMetricTypes(params);
     List<MetricFilter> metricFilters = metricTypes.stream().map(MetricType::asMetricFilter).collect(Collectors.toList());
     Set<String> requestedRegistries = parseRegistries(params);
 
-    @SuppressWarnings({"rawtypes"})
-    NamedList response = new SimpleOrderedMap();
+    NamedList<Object> response = new SimpleOrderedMap<>();
     for (String registryName : requestedRegistries) {
       MetricRegistry registry = metricManager.registry(registryName);
-      @SuppressWarnings({"rawtypes"})
-      SimpleOrderedMap result = new SimpleOrderedMap();
+      SimpleOrderedMap<Object> result = new SimpleOrderedMap<>();
       MetricUtils.toMaps(registry, metricFilters, mustMatchFilter, propertyFilter, false,
           false, compact, false, (k, v) -> result.add(k, v));
       if (result.size() > 0) {
@@ -134,8 +134,8 @@
 
   @SuppressWarnings({"unchecked", "rawtypes"})
   public void handleKeyRequest(String[] keys, BiConsumer<String, Object> consumer) throws Exception {
-    SimpleOrderedMap result = new SimpleOrderedMap();
-    SimpleOrderedMap errors = new SimpleOrderedMap();
+    SimpleOrderedMap<Object> result = new SimpleOrderedMap<>();
+    SimpleOrderedMap<Object> errors = new SimpleOrderedMap<>();
     for (String key : keys) {
       if (key == null || key.isEmpty()) {
         continue;
@@ -158,7 +158,7 @@
         errors.add(key, "metric '" + metricName + "' not found");
         continue;
       }
-      MetricUtils.PropertyFilter propertyFilter = MetricUtils.PropertyFilter.ALL;
+      Predicate<CharSequence> propertyFilter = MetricUtils.ALL_PROPERTIES;
       if (propertyName != null) {
         propertyFilter = (name) -> name.equals(propertyName);
         // use escaped versions
@@ -173,6 +173,8 @@
       MetricUtils.convertMetric(key, m, propertyFilter, false, true, true, false, ":", (k, v) -> {
         if ((v instanceof Map) && propertyName != null) {
           ((Map)v).forEach((k1, v1) -> result.add(k + ":" + k1, v1));
+        } else if ((v instanceof MapWriter) && propertyName != null) {
+          ((MapWriter) v)._forEachEntry((k1, v1) -> result.add(k + ":" + k1, v1));
         } else {
           result.add(k, v);
         }
@@ -229,10 +231,10 @@
     return mustMatchFilter;
   }
 
-  private MetricUtils.PropertyFilter parsePropertyFilter(SolrParams params) {
+  private Predicate<CharSequence> parsePropertyFilter(SolrParams params) {
     String[] props = params.getParams(PROPERTY_PARAM);
     if (props == null || props.length == 0) {
-      return MetricUtils.PropertyFilter.ALL;
+      return MetricUtils.ALL_PROPERTIES;
     }
     final Set<String> filter = new HashSet<>();
     for (String prop : props) {
@@ -241,7 +243,7 @@
       }
     }
     if (filter.isEmpty()) {
-      return MetricUtils.PropertyFilter.ALL;
+      return MetricUtils.ALL_PROPERTIES;
     } else {
       return (name) -> filter.contains(name);
     }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
index e0be4e0..15be746 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/PrepRecoveryOp.java
@@ -135,7 +135,7 @@
                       ", live=" + live + ", checkLive=" + checkLive + ", currentState=" + state
                       + ", localState=" + localState + ", nodeName=" + nodeName +
                       ", coreNodeName=" + coreNodeName + ", onlyIfActiveCheckResult=" + onlyIfActiveCheckResult
-                      + ", nodeProps: " + replica); //LOGOK
+                      + ", nodeProps: " + replica); //nowarn
             }
             if (!onlyIfActiveCheckResult && replica != null && (state == waitForState || leaderDoesNotNeedRecovery)) {
               if (checkLive == null) {
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
index 59a9571..adfcb15 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
@@ -357,10 +357,10 @@
     super.initializeMetrics(parentContext, scope);
 
     this.solrMetricsContext.gauge(() -> ramBytesUsed(), true, "totalSizeInBytes", getCategory().toString());
-    MetricsMap suggestersMap = new MetricsMap((detailed, map) -> {
+    MetricsMap suggestersMap = new MetricsMap(map -> {
       for (Map.Entry<String, SolrSuggester> entry : suggesters.entrySet()) {
         SolrSuggester suggester = entry.getValue();
-        map.put(entry.getKey(), suggester.toString());
+        map.putNoEx(entry.getKey(), suggester.toString());
       }
     });
     this.solrMetricsContext.gauge(suggestersMap, true, "suggesters", getCategory().toString(), scope);
diff --git a/solr/core/src/java/org/apache/solr/logging/DeprecationLog.java b/solr/core/src/java/org/apache/solr/logging/DeprecationLog.java
new file mode 100644
index 0000000..e1b277a
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/logging/DeprecationLog.java
@@ -0,0 +1,53 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.logging;
+
+import java.lang.invoke.MethodHandles;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Utility to log a deprecation.
+ */
+public class DeprecationLog {
+  // not used but needed to satisfy validate-source-patterns.gradle
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  public static final String LOG_PREFIX = "org.apache.solr.DEPRECATED.";
+
+  // featureId -> message.  Don't really need the message
+  private static final Map<String, String> alreadyLogged = new ConcurrentHashMap<>();
+
+  /**
+   * Logs a deprecation warning for the provided feature, but only the first time.
+   * The logger name used is {@value #LOG_PREFIX} + {@code featureId}.
+   * Remember that logger names are disable-able via configuration if needed.
+   * @return true if logged
+   */
+  public static boolean log(String featureId, String message) {
+    if (alreadyLogged.putIfAbsent(featureId, message) != null) {
+      return false;
+    }
+    Logger log = LoggerFactory.getLogger(LOG_PREFIX + featureId);
+    log.warn(message);
+    return true;
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java b/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java
index bd9abaf..8faccff 100644
--- a/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java
+++ b/solr/core/src/java/org/apache/solr/metrics/MetricsMap.java
@@ -28,6 +28,7 @@
 import javax.management.openmbean.OpenMBeanAttributeInfoSupport;
 import javax.management.openmbean.OpenType;
 import javax.management.openmbean.SimpleType;
+import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.lang.reflect.Field;
 import java.util.ArrayList;
@@ -38,6 +39,7 @@
 import com.codahale.metrics.Gauge;
 import com.codahale.metrics.Metric;
 import org.apache.lucene.store.AlreadyClosedException;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -51,18 +53,33 @@
  * {@link javax.management.openmbean.OpenType#ALLOWED_CLASSNAMES_LIST}, otherwise only their toString()
  * representation will be shown in JConsole.</p>
  */
-public class MetricsMap implements Gauge<Map<String,Object>>, DynamicMBean {
+public class MetricsMap implements Gauge<Map<String,Object>>, MapWriter, DynamicMBean {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   // set to true to use cached statistics between getMBeanInfo calls to work
   // around over calling getStatistics on MBeanInfos when iterating over all attributes (SOLR-6586)
   private final boolean useCachedStatsBetweenGetMBeanInfoCalls = Boolean.getBoolean("useCachedStatsBetweenGetMBeanInfoCalls");
 
-  private BiConsumer<Boolean, Map<String, Object>> initializer;
-  private Map<String, String> jmxAttributes = new HashMap<>();
+  private BiConsumer<Boolean, Map<String, Object>> mapInitializer;
+  private MapWriter initializer;
+  private Map<String, String> jmxAttributes;
   private volatile Map<String,Object> cachedValue;
 
-  public MetricsMap(BiConsumer<Boolean, Map<String,Object>> initializer) {
+  /**
+   * Create an instance that reports values to a Map.
+   * @param mapInitializer function to populate the Map result.
+   * @deprecated use {@link #MetricsMap(MapWriter)} instead.
+   */
+  @Deprecated(since = "8.7")
+  public MetricsMap(BiConsumer<Boolean, Map<String,Object>> mapInitializer) {
+    this.mapInitializer = mapInitializer;
+  }
+
+  /**
+   * Create an instance that reports values to a MapWriter.
+   * @param initializer function to populate the MapWriter result.
+   */
+  public MetricsMap(MapWriter initializer) {
     this.initializer = initializer;
   }
 
@@ -73,7 +90,11 @@
 
   public Map<String,Object> getValue(boolean detailed) {
     Map<String,Object> map = new HashMap<>();
-    initializer.accept(detailed, map);
+    if (mapInitializer != null) {
+      mapInitializer.accept(detailed, map);
+    } else {
+      initializer.toMap(map);
+    }
     return map;
   }
 
@@ -81,13 +102,22 @@
     return getValue().toString();
   }
 
+  // lazy init
+  private synchronized void initJmxAttributes() {
+    if (jmxAttributes == null) {
+      jmxAttributes = new HashMap<>();
+    }
+  }
+
   @Override
   public Object getAttribute(String attribute) throws AttributeNotFoundException, MBeanException, ReflectionException {
     Object val;
     // jmxAttributes override any real values
-    val = jmxAttributes.get(attribute);
-    if (val != null) {
-      return val;
+    if (jmxAttributes != null) {
+      val = jmxAttributes.get(attribute);
+      if (val != null) {
+        return val;
+      }
     }
     Map<String,Object> stats = null;
     if (useCachedStatsBetweenGetMBeanInfoCalls) {
@@ -117,6 +147,7 @@
 
   @Override
   public void setAttribute(Attribute attribute) throws AttributeNotFoundException, InvalidAttributeValueException, MBeanException, ReflectionException {
+    initJmxAttributes();
     jmxAttributes.put(attribute.getName(), String.valueOf(attribute.getValue()));
   }
 
@@ -150,13 +181,15 @@
     if (useCachedStatsBetweenGetMBeanInfoCalls) {
       cachedValue = stats;
     }
-    jmxAttributes.forEach((k, v) -> {
-      attrInfoList.add(new MBeanAttributeInfo(k, String.class.getName(),
-          null, true, false, false));
-    });
+    if (jmxAttributes != null) {
+      jmxAttributes.forEach((k, v) -> {
+        attrInfoList.add(new MBeanAttributeInfo(k, String.class.getName(),
+            null, true, false, false));
+      });
+    }
     try {
       stats.forEach((k, v) -> {
-        if (jmxAttributes.containsKey(k)) {
+        if (jmxAttributes != null && jmxAttributes.containsKey(k)) {
           return;
         }
         @SuppressWarnings({"rawtypes"})
@@ -197,4 +230,14 @@
     }
     return null;
   }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    if (mapInitializer != null) {
+      Map<String, Object> value = getValue();
+      value.forEach((k, v) -> ew.putNoEx(k, v));
+    } else {
+      initializer.writeMap(ew);
+    }
+  }
 }
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
index 929aa93..9606ef5 100644
--- a/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
+++ b/solr/core/src/java/org/apache/solr/metrics/reporters/solr/SolrReporter.java
@@ -453,7 +453,7 @@
         }
         final String effectiveGroup = group;
         MetricUtils.toSolrInputDocuments(metricManager.registry(registryName), Collections.singletonList(report.filter), MetricFilter.ALL,
-            MetricUtils.PropertyFilter.ALL, skipHistograms, skipAggregateValues, compact, metadata, doc -> {
+            MetricUtils.ALL_PROPERTIES, skipHistograms, skipAggregateValues, compact, metadata, doc -> {
               doc.setField(REGISTRY_ID, registryName);
               doc.setField(GROUP_ID, effectiveGroup);
               if (effectiveLabel != null) {
diff --git a/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java b/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java
index 01db581..f07bb27 100644
--- a/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java
+++ b/solr/core/src/java/org/apache/solr/rest/BaseSolrResource.java
@@ -15,37 +15,28 @@
  * limitations under the License.
  */
 package org.apache.solr.rest;
-import java.io.IOException;
-import java.io.OutputStream;
+
 import java.io.UnsupportedEncodingException;
 import java.net.URLDecoder;
 
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.CommonParams;
-import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.request.SolrRequestInfo;
 import org.apache.solr.response.QueryResponseWriter;
-import org.apache.solr.response.QueryResponseWriterUtil;
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.schema.IndexSchema;
 import org.apache.solr.servlet.ResponseUtils;
-import org.restlet.data.MediaType;
-import org.restlet.data.Method;
-import org.restlet.data.Status;
-import org.restlet.representation.OutputRepresentation;
-import org.restlet.resource.ResourceException;
-import org.restlet.resource.ServerResource;
 import org.slf4j.Logger;
-
 import static org.apache.solr.common.params.CommonParams.JSON;
 
 /**
- * Base class of all Solr Restlet server resource classes.
+ * Base class for delegating REST-oriented requests to ManagedResources. ManagedResources are heavy-weight and
+ * should not be created for every request, so this class serves as a gateway between a REST call and the resource.
  */
-public abstract class BaseSolrResource extends ServerResource {
+public abstract class BaseSolrResource {
   protected static final String SHOW_DEFAULTS = "showDefaults";
   public static final String UPDATE_TIMEOUT_SECS = "updateTimeoutSecs";
 
@@ -56,6 +47,7 @@
   private QueryResponseWriter responseWriter;
   private String contentType;
   private int updateTimeoutSecs = -1;
+  private int statusCode = -1;
 
   public SolrCore getSolrCore() { return solrCore; }
   public IndexSchema getSchema() { return schema; }
@@ -73,92 +65,33 @@
    * from the SolrRequestInfo thread local, then gets the SolrCore
    * and IndexSchema and sets up the response.
    * writer.
-   * <p>
-   * If an error occurs during initialization, setExisting(false) is
-   * called and an error status code and message is set; in this case,
-   * Restlet will not continue servicing the request (by calling the
-   * method annotated to associate it with GET, etc., but rather will
-   * send an error response.
    */
-  @Override
-  public void doInit() throws ResourceException {
-    super.doInit();
-    setNegotiated(false); // Turn off content negotiation for now
-    if (isExisting()) {
-      try {
-        SolrRequestInfo solrRequestInfo = SolrRequestInfo.getRequestInfo();
-        if (null == solrRequestInfo) {
-          final String message = "No handler or core found in " + getRequest().getOriginalRef().getPath();
-          doError(Status.CLIENT_ERROR_BAD_REQUEST, message);
-          setExisting(false);
-        } else {
-          solrRequest = solrRequestInfo.getReq();
-          if (null == solrRequest) {
-            final String message = "No handler or core found in " + getRequest().getOriginalRef().getPath();
-            doError(Status.CLIENT_ERROR_BAD_REQUEST, message);
-            setExisting(false);
-          } else {
-            solrResponse = solrRequestInfo.getRsp();
-            solrCore = solrRequest.getCore();
-            schema = solrRequest.getSchema();
-            String responseWriterName = solrRequest.getParams().get(CommonParams.WT);
-            if (null == responseWriterName) {
-              responseWriterName = JSON; // Default to json writer
-            }
-            String indent = solrRequest.getParams().get("indent");
-            if (null == indent || ! ("off".equals(indent) || "false".equals(indent))) {
-              // indent by default
-              ModifiableSolrParams newParams = new ModifiableSolrParams(solrRequest.getParams());
-              newParams.remove(indent);
-              newParams.add("indent", "on");
-              solrRequest.setParams(newParams);
-            }
-            responseWriter = solrCore.getQueryResponseWriter(responseWriterName);
-            contentType = responseWriter.getContentType(solrRequest, solrResponse);
-            final String path = getRequest().getRootRef().getPath();
-            if ( ! RestManager.SCHEMA_BASE_PATH.equals(path)) {
-              // don't set webapp property on the request when context and core/collection are excluded 
-              final int cutoffPoint = path.indexOf("/", 1);
-              final String firstPathElement = -1 == cutoffPoint ? path : path.substring(0, cutoffPoint);
-              solrRequest.getContext().put("webapp", firstPathElement); // Context path
-            }
-            SolrCore.preDecorateResponse(solrRequest, solrResponse);
-
-            // client application can set a timeout for update requests
-            String updateTimeoutSecsParam = getSolrRequest().getParams().get(UPDATE_TIMEOUT_SECS);
-            if (updateTimeoutSecsParam != null)
-              updateTimeoutSecs = Integer.parseInt(updateTimeoutSecsParam);
-          }
-        }
-      } catch (Throwable t) {
-        if (t instanceof OutOfMemoryError) {
-          throw (OutOfMemoryError) t;
-        }
-        setExisting(false);
-        throw new ResourceException(t);
+  public void doInit(SolrQueryRequest solrRequest, SolrQueryResponse solrResponse) {
+    try {
+      this.solrRequest = solrRequest;
+      this.solrResponse = solrResponse;
+      solrCore = solrRequest.getCore();
+      schema = solrRequest.getSchema();
+      String responseWriterName = solrRequest.getParams().get(CommonParams.WT, JSON);
+      responseWriter = solrCore.getQueryResponseWriter(responseWriterName);
+      contentType = responseWriter.getContentType(solrRequest, solrResponse);
+      final String path = solrRequest.getPath();
+      if ( ! RestManager.SCHEMA_BASE_PATH.equals(path)) {
+        // don't set webapp property on the request when context and core/collection are excluded
+        final int cutoffPoint = path.indexOf("/", 1);
+        final String firstPathElement = -1 == cutoffPoint ? path : path.substring(0, cutoffPoint);
+        solrRequest.getContext().put("webapp", firstPathElement); // Context path
       }
-    }
-  }
 
-  /**
-   * This class serves as an adapter between Restlet and Solr's response writers. 
-   */
-  public class SolrOutputRepresentation extends OutputRepresentation {
-    
-    public SolrOutputRepresentation() {
-      // No normalization, in case of a custom media type
-      super(MediaType.valueOf(contentType));
-      // TODO: For now, don't send the Vary: header, but revisit if/when content negotiation is added
-      getDimensions().clear();
-    }
-    
-    
-    /** Called by Restlet to get the response body */
-    @Override
-    public void write(OutputStream outputStream) throws IOException {
-      if (getRequest().getMethod() != Method.HEAD) {
-        QueryResponseWriterUtil.writeQueryResponse(outputStream, responseWriter, solrRequest, solrResponse, contentType);
+      // client application can set a timeout for update requests
+      String updateTimeoutSecsParam = solrRequest.getParams().get(UPDATE_TIMEOUT_SECS);
+      if (updateTimeoutSecsParam != null)
+        updateTimeoutSecs = Integer.parseInt(updateTimeoutSecsParam);
+    } catch (Throwable t) {
+      if (t instanceof OutOfMemoryError) {
+        throw (OutOfMemoryError) t;
       }
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, t);
     }
   }
 
@@ -169,9 +102,7 @@
   protected void handlePostExecution(Logger log) {
     
     handleException(log);
-    
-    // TODO: should status=0 (success?) be left as-is in the response header?
-    SolrCore.postDecorateResponse(null, solrRequest, solrResponse);
+
     addDeprecatedWarning();
 
     if (log.isInfoEnabled() && solrResponse.getToLog().size() > 0) {
@@ -181,7 +112,6 @@
 
   protected void addDeprecatedWarning(){
     solrResponse.add("warn","This API is deprecated");
-
   }
 
   /**
@@ -197,8 +127,7 @@
     if (null != exception) {
       @SuppressWarnings({"rawtypes"})
       NamedList info = new SimpleOrderedMap();
-      int code = ResponseUtils.getErrorInfo(exception, info, log);
-      setStatus(Status.valueOf(code));
+      this.statusCode = ResponseUtils.getErrorInfo(exception, info, log);
       getSolrResponse().add("error", info);
       String message = (String)info.get("msg");
       if (null != message && ! message.trim().isEmpty()) {
diff --git a/solr/core/src/java/org/apache/solr/rest/DELETEable.java b/solr/core/src/java/org/apache/solr/rest/DELETEable.java
deleted file mode 100644
index 591f35c..0000000
--- a/solr/core/src/java/org/apache/solr/rest/DELETEable.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.rest;
-
-import org.restlet.representation.Representation;
-import org.restlet.resource.Delete;
-
-/** Marker interface for resource classes that handle DELETE requests. */
-public interface DELETEable {
-  @Delete
-  public Representation delete();
-}
diff --git a/solr/core/src/java/org/apache/solr/rest/GETable.java b/solr/core/src/java/org/apache/solr/rest/GETable.java
deleted file mode 100644
index 131ffe9..0000000
--- a/solr/core/src/java/org/apache/solr/rest/GETable.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.rest;
-
-import org.restlet.representation.Representation;
-import org.restlet.resource.Get;
-
-/** Marker interface for resource classes that handle GET requests. */
-public interface GETable {
-  @Get
-  public Representation get();
-}
diff --git a/solr/core/src/java/org/apache/solr/rest/ManagedResource.java b/solr/core/src/java/org/apache/solr/rest/ManagedResource.java
index f9da549..c6e7adf 100644
--- a/solr/core/src/java/org/apache/solr/rest/ManagedResource.java
+++ b/solr/core/src/java/org/apache/solr/rest/ManagedResource.java
@@ -31,9 +31,6 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
-import org.restlet.data.Status;
-import org.restlet.representation.Representation;
-import org.restlet.resource.ResourceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -154,7 +151,7 @@
   }
 
   /**
-   * Called from {@link #doPut(BaseSolrResource,Representation,Object)}
+   * Called from {@link #doPut(BaseSolrResource,Object)}
    * to update this resource's init args using the given updatedArgs
    */
   @SuppressWarnings("unchecked")
@@ -277,7 +274,7 @@
           "Failed to store data for %s due to: %s",
           resourceId, storeErr.toString());
       log.error(errMsg, storeErr);
-      throw new ResourceException(Status.SERVER_ERROR_INTERNAL, errMsg, storeErr);
+      throw new SolrException(ErrorCode.SERVER_ERROR, errMsg, storeErr);
     }
   }
 
@@ -352,18 +349,18 @@
   }
   
   /**
-   * Just calls {@link #doPut(BaseSolrResource,Representation,Object)};
+   * Just calls {@link #doPut(BaseSolrResource,Object)};
    * override to change the behavior of POST handling.
    */
-  public void doPost(BaseSolrResource endpoint, Representation entity, Object json) {
-    doPut(endpoint, entity, json);
+  public void doPost(BaseSolrResource endpoint, Object json) {
+    doPut(endpoint, json);
   }
   
   /**
    * Applies changes to initArgs or managed data.
    */
   @SuppressWarnings("unchecked")
-  public synchronized void doPut(BaseSolrResource endpoint, Representation entity, Object json) {
+  public synchronized void doPut(BaseSolrResource endpoint, Object json) {
 
     if (log.isInfoEnabled()) {
       log.info("Processing update to {}: {} is a {}", getResourceId(), json, json.getClass().getName());
@@ -392,7 +389,7 @@
     } else if (json instanceof List) {
       managedData = json;
     } else {
-      throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, 
+      throw new SolrException(ErrorCode.BAD_REQUEST,
           "Unsupported update format "+json.getClass().getName());
     }
         
@@ -425,15 +422,13 @@
   protected abstract Object applyUpdatesToManagedData(Object updates);
 
   /**
-   * Called by {@link RestManager.ManagedEndpoint#delete()}
-   * to delete a named part (the given childId) of the
+   * Called to delete a named part (the given childId) of the
    * resource at the given endpoint
    */
   public abstract void doDeleteChild(BaseSolrResource endpoint, String childId);
 
   /**
-   * Called by {@link RestManager.ManagedEndpoint#get()}
-   * to retrieve a named part (the given childId) of the
+   * Called to retrieve a named part (the given childId) of the
    * resource at the given endpoint
    */
   public abstract void doGet(BaseSolrResource endpoint, String childId);
diff --git a/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java b/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java
index 52ad830..e9102a2 100644
--- a/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java
+++ b/solr/core/src/java/org/apache/solr/rest/ManagedResourceStorage.java
@@ -44,8 +44,6 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
-import org.restlet.data.Status;
-import org.restlet.resource.ResourceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -302,7 +300,7 @@
             if (e instanceof RuntimeException) {
               throw (RuntimeException)e;              
             } else {
-              throw new ResourceException(Status.SERVER_ERROR_INTERNAL, 
+              throw new SolrException(ErrorCode.SERVER_ERROR,
                   "Failed to save data to ZooKeeper znode: "+znodePath+" due to: "+e, e);
             }
           }
diff --git a/solr/core/src/java/org/apache/solr/rest/POSTable.java b/solr/core/src/java/org/apache/solr/rest/POSTable.java
deleted file mode 100644
index 5b7fd4e..0000000
--- a/solr/core/src/java/org/apache/solr/rest/POSTable.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.rest;
-
-import org.restlet.representation.Representation;
-import org.restlet.resource.Post;
-
-/** Marker interface for resource classes that handle POST requests. */
-public interface POSTable {
-  @Post
-  public Representation post(Representation representation);
-}
diff --git a/solr/core/src/java/org/apache/solr/rest/PUTable.java b/solr/core/src/java/org/apache/solr/rest/PUTable.java
deleted file mode 100644
index e8b27b4..0000000
--- a/solr/core/src/java/org/apache/solr/rest/PUTable.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.rest;
-
-import org.restlet.representation.Representation;
-import org.restlet.resource.Put;
-
-/** Marker interface for resource classes that handle PUT requests. */
-public interface PUTable {
-  @Put
-  public Representation put(Representation entity);
-}
diff --git a/solr/core/src/java/org/apache/solr/rest/RestManager.java b/solr/core/src/java/org/apache/solr/rest/RestManager.java
index addee77..ea1d3c0 100644
--- a/solr/core/src/java/org/apache/solr/rest/RestManager.java
+++ b/solr/core/src/java/org/apache/solr/rest/RestManager.java
@@ -18,13 +18,17 @@
 
 import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.io.Reader;
+import java.io.UnsupportedEncodingException;
 import java.lang.invoke.MethodHandles;
 import java.lang.reflect.Constructor;
+import java.net.URLDecoder;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
+import java.util.Iterator;
 import java.util.LinkedHashSet;
 import java.util.List;
 import java.util.Locale;
@@ -36,22 +40,15 @@
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
+import org.apache.solr.common.util.ContentStream;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.request.SolrQueryRequest;
-import org.apache.solr.request.SolrRequestInfo;
+import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
-import org.restlet.Request;
-import org.restlet.data.MediaType;
-import org.restlet.data.Method;
-import org.restlet.data.Status;
-import org.restlet.representation.Representation;
-import org.restlet.resource.ResourceException;
-import org.restlet.routing.Router;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
-
-import static org.apache.solr.common.util.Utils.fromJSONString;
+import static org.apache.solr.common.util.Utils.fromJSON;
 
 /**
  * Supports runtime mapping of REST API endpoints to ManagedResource 
@@ -122,11 +119,6 @@
 
     public Registry() {
       reservedEndpoints.add(SCHEMA_BASE_PATH + MANAGED_ENDPOINT);
-
-      for (String reservedEndpoint : SolrSchemaRestApi.getReservedEndpoints()) {
-        reservedEndpoints.add(reservedEndpoint);
-      }
-
       reservedEndpointsPattern = getReservedEndpointsPattern();
     }
 
@@ -210,7 +202,7 @@
       // it's ok to re-register the same class for an existing path
       ManagedResourceRegistration reg = registered.get(resourceId);
       if (reg != null) {
-        if (!reg.implClass.equals(implClass)) {
+        if (!implClass.equals(reg.implClass)) {
           String errMsg = String.format(Locale.ROOT,
               "REST API path %s already registered to instances of %s",
               resourceId, reg.implClass.getName());
@@ -240,46 +232,42 @@
   }  
 
   /**
-   * Locates the RestManager using ThreadLocal SolrRequestInfo.
+   * Request handling needs a lightweight object to delegate a request to.
+   * ManagedResource implementations are heavy-weight objects that live for the duration of
+   * a SolrCore, so this class acts as the proxy between the request handler and a
+   * ManagedResource when doing request processing.
    */
-  public static RestManager getRestManager(SolrRequestInfo solrRequestInfo) {
-    if (solrRequestInfo == null)
-      throw new ResourceException(Status.SERVER_ERROR_INTERNAL, 
-          "No SolrRequestInfo in this Thread!");
+  public static class ManagedEndpoint extends BaseSolrResource {
 
-    SolrQueryRequest req = solrRequestInfo.getReq();
-    RestManager restManager = 
-        (req != null) ? req.getCore().getRestManager() : null;
-    
-    if (restManager == null)
-      throw new ResourceException(Status.SERVER_ERROR_INTERNAL, 
-          "No RestManager found!");
-    
-    return restManager;
-  }
-  
-  /**
-   * The Restlet router needs a lightweight extension of ServerResource to delegate a request
-   * to. ManagedResource implementations are heavy-weight objects that live for the duration of
-   * a SolrCore, so this class acts as the proxy between Restlet and a ManagedResource when
-   * doing request processing.
-   *
-   */
-  public static class ManagedEndpoint extends BaseSolrResource
-      implements GETable, PUTable, POSTable, DELETEable
-  {
+    final RestManager restManager;
+
+    public ManagedEndpoint(RestManager restManager) {
+      this.restManager = restManager;
+    }
+
     /**
-     * Determines the ManagedResource resourceId from the Restlet request.
+     * Determines the ManagedResource resourceId from the request path.
      */
-    public static String resolveResourceId(Request restletReq)  {
-      String resourceId = restletReq.getResourceRef().
-          getRelativeRef(restletReq.getRootRef().getParentRef()).getPath(DECODE);
-      
+    public static String resolveResourceId(final String path)  {
+      String resourceId;
+      try {
+        resourceId = URLDecoder.decode(path, "UTF-8");
+      } catch (UnsupportedEncodingException e) {
+        throw new RuntimeException(e); // shouldn't happen
+      }
+
+      int at = resourceId.indexOf("/schema");
+      if (at == -1) {
+        at = resourceId.indexOf("/config");
+      }
+      if (at > 0) {
+        resourceId = resourceId.substring(at);
+      }
+
       // all resources are registered with the leading slash
       if (!resourceId.startsWith("/"))
         resourceId = "/"+resourceId;
 
-
       return resourceId;
     }
     
@@ -292,18 +280,11 @@
      * dynamically locate the ManagedResource associated with the request URI.
      */
     @Override
-    public void doInit() throws ResourceException {
-      super.doInit();      
-      
-      // get the relative path to the requested resource, which is
-      // needed to locate ManagedResource impls at runtime
-      String resourceId = resolveResourceId(getRequest());
+    public void doInit(SolrQueryRequest solrRequest, SolrQueryResponse solrResponse) {
+      super.doInit(solrRequest, solrResponse);
 
-      // supports a request for a registered resource or its child
-      RestManager restManager = 
-          RestManager.getRestManager(SolrRequestInfo.getRequestInfo());
-      
-      managedResource = restManager.getManagedResourceOrNull(resourceId);      
+      final String resourceId = resolveResourceId(solrRequest.getPath());
+      managedResource = restManager.getManagedResourceOrNull(resourceId);
       if (managedResource == null) {
         // see if we have a registered endpoint one-level up ...
         int lastSlashAt = resourceId.lastIndexOf('/');
@@ -317,7 +298,7 @@
             if (!(managedResource instanceof ManagedResource.ChildResourceSupport)) {
               String errMsg = String.format(Locale.ROOT,
                   "%s does not support child resources!", managedResource.getResourceId());
-              throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, errMsg);
+              throw new SolrException(ErrorCode.BAD_REQUEST, errMsg);
             }
             
             childId = resourceId.substring(lastSlashAt+1);
@@ -326,44 +307,46 @@
           }
         }
       }    
-      
+
       if (managedResource == null) {
-        if (Method.PUT.equals(getMethod()) || Method.POST.equals(getMethod())) {
+        final String method = getSolrRequest().getHttpMethod();
+        if ("PUT".equals(method) || "POST".equals(method)) {
           // delegate create requests to the RestManager
           managedResource = restManager.endpoint;
-        } else {        
-          throw new ResourceException(Status.CLIENT_ERROR_NOT_FOUND, 
+        } else {
+          throw new SolrException(ErrorCode.BAD_REQUEST,
               "No REST managed resource registered for path "+resourceId);
         }
       }
-      
-      log.info("Found ManagedResource [{}] for {}", managedResource, resourceId);
-    }    
-    
-    @Override
-    public Representation put(Representation entity) {
-      try {
-        managedResource.doPut(this, entity, parseJsonFromRequestBody(entity));
-      } catch (Exception e) {
-        getSolrResponse().setException(e);        
-      }
-      handlePostExecution(log);
-      return new SolrOutputRepresentation();    
-    }
-    
-    @Override
-    public Representation post(Representation entity) {
-      try {
-        managedResource.doPost(this, entity, parseJsonFromRequestBody(entity));
-      } catch (Exception e) {
-        getSolrResponse().setException(e);        
-      }
-      handlePostExecution(log);
-      return new SolrOutputRepresentation();    
-    }    
 
-    @Override
-    public Representation delete() {
+      log.info("Found ManagedResource [{}] for {}", managedResource, resourceId);
+    }
+
+    public void delegateRequestToManagedResource() {
+      SolrQueryRequest req = getSolrRequest();
+      final String method = req.getHttpMethod();
+      try {
+        switch (method) {
+          case "GET":
+            managedResource.doGet(this, childId);
+            break;
+          case "PUT":
+            managedResource.doPut(this, parseJsonFromRequestBody(req));
+            break;
+          case "POST":
+            managedResource.doPost(this, parseJsonFromRequestBody(req));
+            break;
+          case "DELETE":
+            doDelete();
+            break;
+        }
+      } catch (Exception e) {
+        getSolrResponse().setException(e);
+      }
+      handlePostExecution(log);
+    }
+
+    protected void doDelete() {
       // only delegate delete child resources to the ManagedResource
       // as deleting the actual resource is best handled by the
       // RestManager
@@ -375,68 +358,24 @@
         }
       } else {
         try {
-          RestManager restManager = 
-              RestManager.getRestManager(SolrRequestInfo.getRequestInfo());
           restManager.deleteManagedResource(managedResource);
         } catch (Exception e) {
           getSolrResponse().setException(e);        
         }
       }
       handlePostExecution(log);
-      return new SolrOutputRepresentation();    
-    }    
-        
-    @Override
-    public Representation get() { 
-      try {
-        managedResource.doGet(this, childId);
-      } catch (Exception e) {
-        getSolrResponse().setException(e);        
-      }
-      handlePostExecution(log);
-      return new SolrOutputRepresentation();    
-    }     
-    
-    /**
-     * Parses and validates the JSON passed from the to the ManagedResource. 
-     */
-    protected Object parseJsonFromRequestBody(Representation entity) {
-      if (entity.getMediaType() == null) {
-        entity.setMediaType(MediaType.APPLICATION_JSON);
-      }
-      
-      if (!entity.getMediaType().equals(MediaType.APPLICATION_JSON, true)) {
-        String errMsg = String.format(Locale.ROOT,
-            "Invalid content type %s; only %s is supported.",
-            entity.getMediaType(), MediaType.APPLICATION_JSON.toString());
-        log.error(errMsg);
-        throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, errMsg);
-      }
-      
-      String text = null;
-      try {
-        text = entity.getText();
-      } catch (IOException ioExc) {
-        String errMsg = "Failed to read entity text due to: "+ioExc;
-        log.error(errMsg, ioExc);
-        throw new ResourceException(Status.SERVER_ERROR_INTERNAL, errMsg, ioExc);
-      }
-      
-      if (text == null || text.trim().length() == 0) {
-        throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Empty request body!");      
-      }
+    }
 
-      Object parsedJson = null;
-      try {
-        parsedJson = fromJSONString(text);
-      } catch (Exception ioExc) {
-        String errMsg = String.format(Locale.ROOT,
-            "Failed to parse request [%s] into JSON due to: %s",
-            text, ioExc.toString());
-        log.error(errMsg, ioExc);
-        throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, errMsg, ioExc);
+    protected Object parseJsonFromRequestBody(SolrQueryRequest req) {
+      Iterator<ContentStream> iter = req.getContentStreams().iterator();
+      if (iter.hasNext()) {
+        try (Reader reader = iter.next().getReader()) {
+          return fromJSON(reader);
+        } catch (IOException ioExc) {
+          throw new SolrException(ErrorCode.SERVER_ERROR, ioExc);
+        }
       }
-      return parsedJson;
+      throw new SolrException(ErrorCode.BAD_REQUEST, "No JSON body found in request!");
     }
 
     @Override
@@ -518,16 +457,16 @@
      */
     @SuppressWarnings("unchecked")
     @Override
-    public synchronized void doPut(BaseSolrResource endpoint, Representation entity, Object json) {      
+    public synchronized void doPut(BaseSolrResource endpoint, Object json) {
       if (json instanceof Map) {
-        String resourceId = ManagedEndpoint.resolveResourceId(endpoint.getRequest());
+        String resourceId = ManagedEndpoint.resolveResourceId(endpoint.getSolrRequest().getPath());
         Map<String,String> info = (Map<String,String>)json;
         info.put("resourceId", resourceId);
         storeManagedData(applyUpdatesToManagedData(json));
       } else {
-        throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, 
+        throw new SolrException(ErrorCode.BAD_REQUEST,
             "Expected Map to create a new ManagedResource but received a "+json.getClass().getName());
-      }          
+      }
       // PUT just returns success status code with an empty body
     }
 
@@ -539,15 +478,15 @@
     @SuppressWarnings("unchecked")
     @Override
     protected Object applyUpdatesToManagedData(Object updates) {
-      Map<String,String> info = (Map<String,String>)updates;      
+      Map<String,String> info = (Map<String,String>)updates;
       // this is where we'd register a new ManagedResource
       String implClass = info.get("class");
       String resourceId = info.get("resourceId");
-      log.info("Creating a new ManagedResource of type {} at path {}", 
+      log.info("Creating a new ManagedResource of type {} at path {}",
           implClass, resourceId);
-      Class<? extends ManagedResource> clazz = 
+      Class<? extends ManagedResource> clazz =
           solrResourceLoader.findClass(implClass, ManagedResource.class);
-      
+
       // add this new resource to the RestManager
       restManager.addManagedResource(resourceId, clazz);
 
@@ -558,7 +497,7 @@
         if (reg.observers.isEmpty()) {
           managedList.add(reg.getInfo());
         }
-      }          
+      }
       return managedList;
     }
 
@@ -567,18 +506,18 @@
      */
     @Override
     public void doDeleteChild(BaseSolrResource endpoint, String childId) {
-      throw new ResourceException(Status.SERVER_ERROR_NOT_IMPLEMENTED);
+      throw new SolrException(ErrorCode.BAD_REQUEST, "Delete child resource not supported!");
     }
 
     @Override
     public void doGet(BaseSolrResource endpoint, String childId) {
       
       // filter results by /schema or /config
-      String path = ManagedEndpoint.resolveResourceId(endpoint.getRequest());
+      String path = ManagedEndpoint.resolveResourceId(endpoint.getSolrRequest().getPath());
       Matcher resourceIdMatcher = resourceIdRegex.matcher(path);
       if (!resourceIdMatcher.matches()) {
         // extremely unlikely but didn't want to squelch it either
-        throw new ResourceException(Status.SERVER_ERROR_NOT_IMPLEMENTED, path);
+        throw new SolrException(ErrorCode.BAD_REQUEST, "Requests to path "+path+" not supported!");
       }
       
       String filter = resourceIdMatcher.group(1);
@@ -603,11 +542,7 @@
   protected Map<String,ManagedResource> managed = new TreeMap<>();
   protected RestManagerManagedResource endpoint;
   protected SolrResourceLoader loader;
-  
-  // refs to these are needed to bind new ManagedResources created using the API
-  protected Router schemaRouter;
-  protected Router configRouter;
-  
+
   /**
    * Initializes the RestManager with the storageIO being optionally created outside of this implementation
    * such as to use ZooKeeper instead of the local FS. 
@@ -625,7 +560,7 @@
     
     this.storageIO = storageIO;
     this.loader = loader;
-    
+
     registry = loader.getManagedResourceRegistry();
     
     // the RestManager provides metadata about managed resources via the /managed endpoint
@@ -651,8 +586,7 @@
 
   /**
    * If not already registered, registers the given {@link ManagedResource} subclass
-   * at the given resourceId, creates an instance, and attaches it to the appropriate
-   * Restlet router.  Returns the corresponding instance.
+   * at the given resourceId, creates an instance. Returns the corresponding instance.
    */
   public synchronized ManagedResource addManagedResource(String resourceId, Class<? extends ManagedResource> clazz) {
     final ManagedResource res;
@@ -665,31 +599,16 @@
     }
     return res;
   }
-  
-  // used internally to create and attach a ManagedResource to the Restlet router
-  // the registry also uses this method directly, which is slightly hacky but necessary
-  // in order to support dynamic adding of new fieldTypes using the managed-schema API
+
+  // cache a mapping of path to ManagedResource
   private synchronized ManagedResource addRegisteredResource(ManagedResourceRegistration reg) {
     String resourceId = reg.resourceId;
     ManagedResource res = createManagedResource(reg);
     managed.put(resourceId, res);
     log.info("Registered new managed resource {}", resourceId);
-    
-    // attach this new resource to the Restlet router
-    Matcher resourceIdValidator = resourceIdRegex.matcher(resourceId);
-    boolean validated = resourceIdValidator.matches();
-    assert validated : "managed resourceId '" + resourceId
-                     + "' should already be validated by registerManagedResource()";
-    String routerPath = resourceIdValidator.group(1);      
-    String path = resourceIdValidator.group(2);
-    Router router = SCHEMA_BASE_PATH.equals(routerPath) ? schemaRouter : configRouter;
-    if (router != null) {
-      attachManagedResource(res, path, router);
-    }
     return res;
   }
 
-
   /**
    * Creates a ManagedResource using registration information. 
    */
@@ -714,14 +633,13 @@
    * Returns the {@link ManagedResource} subclass instance corresponding
    * to the given resourceId from the registry.
    *
-   * @throws ResourceException if no managed resource is registered with
+   * @throws SolrException if no managed resource is registered with
    *  the given resourceId.
    */
   public ManagedResource getManagedResource(String resourceId) {
     ManagedResource res = getManagedResourceOrNull(resourceId);
     if (res == null) {
-      throw new ResourceException(Status.SERVER_ERROR_INTERNAL, 
-          "No ManagedResource registered for path: "+resourceId);
+      throw new SolrException(ErrorCode.NOT_FOUND, "No ManagedResource registered for path: "+resourceId);
     }
     return res;
   }
@@ -759,48 +677,5 @@
       log.error("Error when trying to clean-up after deleting {}",resourceId, e);
     }
   }
-      
-  /**
-   * Attach managed resource paths to the given Restlet Router. 
-   * @param router - Restlet Router
-   */
-  public synchronized void attachManagedResources(String routerPath, Router router) {
-    if (SCHEMA_BASE_PATH.equals(routerPath)) {
-      this.schemaRouter = router;
-    } else {
-      throw new SolrException(ErrorCode.SERVER_ERROR, 
-          routerPath+" not supported by the RestManager");
-    }      
-    
-    int numAttached = 0;
-    for (Map.Entry<String, ManagedResource> entry : managed.entrySet()) {
-      String resourceId = entry.getKey();
-      if (resourceId.startsWith(routerPath)) {
-        // the way restlet works is you attach a path w/o the routerPath
-        String path = resourceId.substring(routerPath.length());
-        attachManagedResource(entry.getValue(), path, router);
-        ++numAttached;
-      }
-    }
-    
-    log.info("Attached {} ManagedResource endpoints to Restlet router: {}", 
-        numAttached, routerPath);
-  }
-  
-  /**
-   * Attaches a ManagedResource and optionally a path for child resources
-   * to the given Restlet Router.
-   */
-  protected void attachManagedResource(ManagedResource res, String path, Router router) {
-    router.attach(path, res.getServerResourceClass());
-    log.info("Attached managed resource at path: {}",path);
-    
-    // Determine if we should also route requests for child resources
-    // ManagedResource.ChildResourceSupport is a marker interface that
-    // indicates the ManagedResource also manages child resources at
-    // a path one level down from the main resourceId
-    if (ManagedResource.ChildResourceSupport.class.isAssignableFrom(res.getClass())) {
-      router.attach(path+"/{child}", res.getServerResourceClass());
-    }    
-  }
+
 }
diff --git a/solr/core/src/java/org/apache/solr/rest/SolrSchemaRestApi.java b/solr/core/src/java/org/apache/solr/rest/SolrSchemaRestApi.java
deleted file mode 100644
index aa7826a..0000000
--- a/solr/core/src/java/org/apache/solr/rest/SolrSchemaRestApi.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.rest;
-
-import java.lang.invoke.MethodHandles;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.Set;
-
-import org.apache.solr.request.SolrRequestInfo;
-import org.restlet.Application;
-import org.restlet.Restlet;
-import org.restlet.routing.Router;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * Restlet servlet handling /&lt;context&gt;/&lt;collection&gt;/schema/* URL paths
- */
-public class SolrSchemaRestApi extends Application {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-
-  /**
-   * Returns reserved endpoints under /schema
-   */
-  public static Set<String> getReservedEndpoints() {
-    Set<String> reservedEndpoints = new HashSet<>();
-    return Collections.unmodifiableSet(reservedEndpoints);
-  }
-
-  private Router router;
-
-  public SolrSchemaRestApi() {
-    router = new Router(getContext());
-  }
-
-  @Override
-  public void stop() throws Exception {
-    if (null != router) {
-      router.stop();
-    }
-  }
-
-  /**
-   * Bind URL paths to the appropriate ServerResource subclass. 
-   */
-  @Override
-  public synchronized Restlet createInboundRoot() {
-
-    log.info("createInboundRoot started for /schema");
-
-
-    router.attachDefault(RestManager.ManagedEndpoint.class);
-    
-    // attach all the dynamically registered schema resources
-    RestManager.getRestManager(SolrRequestInfo.getRequestInfo())
-        .attachManagedResources(RestManager.SCHEMA_BASE_PATH, router);
-
-    log.info("createInboundRoot complete for /schema");
-
-    return router;
-  }  
-}
diff --git a/solr/core/src/java/org/apache/solr/rest/package-info.java b/solr/core/src/java/org/apache/solr/rest/package-info.java
index b903928..ef642aa 100644
--- a/solr/core/src/java/org/apache/solr/rest/package-info.java
+++ b/solr/core/src/java/org/apache/solr/rest/package-info.java
@@ -16,7 +16,7 @@
  */
  
 /** 
- * Solr RESTful APIs via Restlet.
+ * Solr RESTful APIs.
  */
 package org.apache.solr.rest;
 
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
index 69dcf80..da25697 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymFilterFactory.java
@@ -43,8 +43,6 @@
 import org.apache.solr.rest.BaseSolrResource;
 import org.apache.solr.rest.ManagedResource;
 import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
-import org.restlet.data.Status;
-import org.restlet.resource.ResourceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -180,7 +178,7 @@
       } else if (updates instanceof Map) {
         madeChanges = applyMapUpdates((Map<String,Object>)updates, ignoreCase);
       } else {
-        throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST,
+        throw new SolrException(ErrorCode.BAD_REQUEST,
             "Unsupported data format (" + updates.getClass().getName() + "); expected a JSON object (Map or List)!");
       }
       return madeChanges ? getStoredView() : null;
@@ -250,7 +248,7 @@
           }
 
         } else {
-          throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Unsupported value "+val+
+          throw new SolrException(ErrorCode.BAD_REQUEST, "Unsupported value "+val+
               " for "+term+"; expected single value or a JSON array!");
         }
 
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
index 0f00947..fa11c84 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/ManagedSynonymGraphFilterFactory.java
@@ -42,8 +42,6 @@
 import org.apache.solr.rest.BaseSolrResource;
 import org.apache.solr.rest.ManagedResource;
 import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
-import org.restlet.data.Status;
-import org.restlet.resource.ResourceException;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -175,7 +173,7 @@
       } else if (updates instanceof Map) {
         madeChanges = applyMapUpdates((Map<String,Object>)updates, ignoreCase);
       } else {
-        throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST,
+        throw new SolrException(ErrorCode.BAD_REQUEST,
             "Unsupported data format (" + updates.getClass().getName() + "); expected a JSON object (Map or List)!");
       }
       return madeChanges ? getStoredView() : null;
@@ -245,7 +243,7 @@
           }
 
         } else {
-          throw new ResourceException(Status.CLIENT_ERROR_BAD_REQUEST, "Unsupported value "+val+
+          throw new SolrException(ErrorCode.BAD_REQUEST, "Unsupported value "+val+
               " for "+term+"; expected single value or a JSON array!");
         }
 
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/analysis/package-info.java b/solr/core/src/java/org/apache/solr/rest/schema/analysis/package-info.java
index 04d4428..8124188 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/analysis/package-info.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/analysis/package-info.java
@@ -16,7 +16,8 @@
  */
  
 /** 
- * Analysis-related functionality for RESTful API access to the Solr Schema using Restlet.
+ * Analysis-related functionality for RESTful API access to managed resources related to the schema, such
+ * as stopwords, protected words, and synonyms.
  */
 package org.apache.solr.rest.schema.analysis;
 
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/package-info.java b/solr/core/src/java/org/apache/solr/rest/schema/package-info.java
index ca5fd2c..3712359 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/package-info.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/package-info.java
@@ -16,7 +16,7 @@
  */
  
 /** 
- * Provides RESTful API access to the Solr Schema using Restlet.
+ * Provides RESTful API access to managed resources in the Solr Schema.
  */
 package org.apache.solr.rest.schema;
 
diff --git a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
index c1815e0..e7a748c 100644
--- a/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
+++ b/solr/core/src/java/org/apache/solr/schema/ManagedIndexSchema.java
@@ -79,6 +79,8 @@
 import org.slf4j.LoggerFactory;
 import org.xml.sax.InputSource;
 
+import static org.apache.solr.core.SolrResourceLoader.informAware;
+
 /** Solr-managed schema - non-user-editable, but can be mutable via internal and external REST API requests. */
 public final class ManagedIndexSchema extends IndexSchema {
 
@@ -1324,7 +1326,7 @@
     for (CharFilterFactory next : charFilters) {
       if (next instanceof ResourceLoaderAware) {
         try {
-          ((ResourceLoaderAware) next).inform(loader);
+          informAware(loader, (ResourceLoaderAware) next);
         } catch (IOException e) {
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
         }
@@ -1334,7 +1336,7 @@
     TokenizerFactory tokenizerFactory = chain.getTokenizerFactory();
     if (tokenizerFactory instanceof ResourceLoaderAware) {
       try {
-        ((ResourceLoaderAware) tokenizerFactory).inform(loader);
+        informAware(loader, (ResourceLoaderAware) tokenizerFactory);
       } catch (IOException e) {
         throw new SolrException(ErrorCode.SERVER_ERROR, e);
       }
@@ -1343,13 +1345,10 @@
     TokenFilterFactory[] filters = chain.getTokenFilterFactories();
     for (TokenFilterFactory next : filters) {
       if (next instanceof ResourceLoaderAware) {
-        SolrResourceLoader.CURRENT_AWARE.set((ResourceLoaderAware) next);
         try {
-          ((ResourceLoaderAware) next).inform(loader);
+          informAware(loader, (ResourceLoaderAware) next);
         } catch (IOException e) {
           throw new SolrException(ErrorCode.SERVER_ERROR, e);
-        } finally {
-          SolrResourceLoader.CURRENT_AWARE.remove();
         }
       }
     }
diff --git a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
index 8da244b..756718c 100644
--- a/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
+++ b/solr/core/src/java/org/apache/solr/search/CaffeineCache.java
@@ -364,7 +364,7 @@
   @Override
   public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
     solrMetricsContext = parentContext.getChildContext(this);
-    cacheMap = new MetricsMap((detailed, map) -> {
+    cacheMap = new MetricsMap(map -> {
       if (cache != null) {
         CacheStats stats = cache.stats();
         long insertCount = inserts.sum();
diff --git a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
index 05ae26e..9a1e740 100644
--- a/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
+++ b/solr/core/src/java/org/apache/solr/search/CollapsingQParserPlugin.java
@@ -122,12 +122,57 @@
 public class CollapsingQParserPlugin extends QParserPlugin {
 
   public static final String NAME = "collapse";
-  public static final String NULL_COLLAPSE = "collapse";
-  public static final String NULL_IGNORE = "ignore";
-  public static final String NULL_EXPAND = "expand";
   public static final String HINT_TOP_FC = "top_fc";
+
+  /**
+   * @deprecated use {@link NullPolicy} instead.
+   */
+  @Deprecated
+  public static final String NULL_COLLAPSE = "collapse";
+  @Deprecated
+  public static final String NULL_IGNORE = "ignore";
+  @Deprecated
+  public static final String NULL_EXPAND = "expand";
+  @Deprecated
   public static final String HINT_MULTI_DOCVALUES = "multi_docvalues";
 
+  public enum NullPolicy {
+    IGNORE("ignore", 0),
+    COLLAPSE("collapse", 1),
+    EXPAND("expand", 2);
+
+    private final String name;
+    private final int code;
+
+    NullPolicy(String name, int code) {
+      this.name = name;
+      this.code = code;
+    }
+
+    public String getName() {
+      return name;
+    }
+
+    public int getCode() {
+      return code;
+    }
+
+    public static NullPolicy fromString(String nullPolicy) {
+      if (StringUtils.isEmpty(nullPolicy)) {
+        return DEFAULT_POLICY;
+      }
+      switch (nullPolicy) {
+        case "ignore": return IGNORE;
+        case "collapse": return COLLAPSE;
+        case "expand": return EXPAND;
+        default:
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid nullPolicy: " + nullPolicy);
+      }
+    }
+
+    static NullPolicy DEFAULT_POLICY = IGNORE;
+  }
+
 
   public QParser createParser(String qstr, SolrParams localParams, SolrParams params, SolrQueryRequest request) {
     return new CollapsingQParser(qstr, localParams, params, request);
@@ -188,6 +233,11 @@
       return 17 * (31 + selectorText.hashCode()) * (31 + type.hashCode());
     }
 
+    @Override
+    public String toString(){
+      return "GroupHeadSelector(selectorText=" + this.selectorText + ", type=" +this.type + ")";
+    }
+
     /**
      * returns a new GroupHeadSelector based on the specified local params
      */
@@ -221,11 +271,8 @@
     public String hint;
     private boolean needsScores = true;
     private boolean needsScores4Collapsing = false;
-    private int nullPolicy;
+    private NullPolicy nullPolicy;
     private Set<BytesRef> boosted; // ordered by "priority"
-    public static final int NULL_POLICY_IGNORE = 0;
-    public static final int NULL_POLICY_COLLAPSE = 1;
-    public static final int NULL_POLICY_EXPAND = 2;
     private int size;
 
     public String getField(){
@@ -254,7 +301,7 @@
       int hashCode = classHash();
       hashCode = 31 * hashCode + collapseField.hashCode();
       hashCode = 31 * hashCode + groupHeadSelector.hashCode();
-      hashCode = 31 * hashCode + nullPolicy;
+      hashCode = 31 * hashCode + nullPolicy.hashCode();
       return hashCode;
     }
 
@@ -279,7 +326,12 @@
     }
 
     public String toString(String s) {
-      return s;
+      return "CollapsingPostFilter(field=" + this.collapseField +
+          ", nullPolicy=" + this.nullPolicy.getName() + ", " +
+          this.groupHeadSelector +
+          (hint == null ? "": ", hint=" + this.hint) +
+          ", size=" + this.size
+          + ")";
     }
 
     public CollapsingPostFilter(SolrParams localParams, SolrParams params, SolrQueryRequest request) {
@@ -354,16 +406,7 @@
         }
       }
 
-      String nPolicy = localParams.get("nullPolicy", NULL_IGNORE);
-      if(nPolicy.equals(NULL_IGNORE)) {
-        this.nullPolicy = NULL_POLICY_IGNORE;
-      } else if (nPolicy.equals(NULL_COLLAPSE)) {
-        this.nullPolicy = NULL_POLICY_COLLAPSE;
-      } else if(nPolicy.equals((NULL_EXPAND))) {
-        this.nullPolicy = NULL_POLICY_EXPAND;
-      } else {
-        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Invalid nullPolicy:"+nPolicy);
-      }
+      this.nullPolicy = NullPolicy.fromString(localParams.get("nullPolicy"));
     }
 
     @SuppressWarnings({"unchecked"})
@@ -392,7 +435,7 @@
         return collectorFactory.getCollector(this.collapseField,
                                              this.groupHeadSelector,
                                              this.sortSpec,
-                                             this.nullPolicy,
+                                             this.nullPolicy.getCode(),
                                              this.hint,
                                              this.needsScores4Collapsing,
                                              this.needsScores,
@@ -552,7 +595,7 @@
       this.ords = new IntIntDynamicMap(valueCount, -1);
       this.scores = new IntFloatDynamicMap(valueCount, -Float.MAX_VALUE);
       this.nullPolicy = nullPolicy;
-      if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      if(nullPolicy == NullPolicy.EXPAND.getCode()) {
         nullScores = new FloatArrayList();
       }
 
@@ -620,13 +663,13 @@
           ords.put(ord, globalDoc);
           scores.put(ord, score);
         }
-      } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         float score = scorer.score();
         if(score > nullScore) {
           nullScore = score;
           nullDoc = globalDoc;
         }
-      } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(nullPolicy == NullPolicy.EXPAND.getCode()) {
         collapsedSet.set(globalDoc);
         nullScores.add(scorer.score());
       }
@@ -716,9 +759,9 @@
           dummy.score = scores.get(ord);
         } else if(boosts && mergeBoost.boost(docId)) {
           //Ignore so it doesn't mess up the null scoring.
-        } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+        } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
           dummy.score = nullScore;
-        } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
           dummy.score = nullScores.get(++index);
         }
 
@@ -772,7 +815,7 @@
       this.collapsedSet = new FixedBitSet(maxDoc);
       this.nullValue = nullValue;
       this.nullPolicy = nullPolicy;
-      if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      if(nullPolicy == NullPolicy.EXPAND.getCode()) {
         nullScores = new FloatArrayList();
       }
       this.cmap = new IntLongHashMap(size);
@@ -839,13 +882,13 @@
           long scoreDoc = (((long)Float.floatToRawIntBits(score))<<32)+globalDoc;
           cmap.indexInsert(idx, collapseValue, scoreDoc);
         }
-      } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         float score = scorer.score();
         if(score > this.nullScore) {
           this.nullScore = score;
           this.nullDoc = globalDoc;
         }
-      } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(nullPolicy == NullPolicy.EXPAND.getCode()) {
         collapsedSet.set(globalDoc);
         nullScores.add(scorer.score());
       }
@@ -917,9 +960,9 @@
           dummy.score = Float.intBitsToFloat((int)(scoreDoc>>32));
         } else if(boosts && mergeBoost.boost(globalDoc)) {
           //Ignore so boosted documents don't mess up the null scoring policies.
-        } else if (nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+        } else if (nullPolicy == NullPolicy.COLLAPSE.getCode()) {
           dummy.score = nullScore;
-        } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        } else if(nullPolicy == NullPolicy.EXPAND.getCode()) {
           dummy.score = nullScores.get(nullScoreIndex++);
         }
 
@@ -1114,9 +1157,9 @@
           } else if (mergeBoost != null && mergeBoost.boost(globalDoc)) {
             //It's an elevated doc so no score is needed
             dummy.score = 0F;
-          } else if (nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+          } else if (nullPolicy == NullPolicy.COLLAPSE.getCode()) {
             dummy.score = nullScore;
-          } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+          } else if(nullPolicy == NullPolicy.EXPAND.getCode()) {
             dummy.score = nullScores.get(nullScoreIndex++);
           }
         }
@@ -1273,9 +1316,9 @@
           } else if (mergeBoost != null && mergeBoost.boost(globalDoc)) {
             //Its an elevated doc so no score is needed
             dummy.score = 0F;
-          } else if (nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+          } else if (nullPolicy == NullPolicy.COLLAPSE.getCode()) {
             dummy.score = nullScore;
-          } else if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+          } else if(nullPolicy == NullPolicy.EXPAND.getCode()) {
             dummy.score = nullScores.get(nullScoreIndex++);
           }
         }
@@ -1538,7 +1581,7 @@
 
       if (this.needsScores) {
         this.scores = new IntFloatDynamicMap(valueCount, 0.0f);
-        if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        if(nullPolicy == NullPolicy.EXPAND.getCode()) {
           nullScores = new FloatArrayList();
         }
       }
@@ -1648,7 +1691,7 @@
             scores.put(ord, scorer.score());
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if(comp.test(currentVal, nullVal)) {
           nullVal = currentVal;
           nullDoc = globalDoc;
@@ -1656,7 +1699,7 @@
             nullScore = scorer.score();
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if(needsScores) {
           nullScores.add(scorer.score());
@@ -1729,7 +1772,7 @@
             scores.put(ord, scorer.score());
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if(comp.test(currentVal, nullVal)) {
           nullVal = currentVal;
           nullDoc = globalDoc;
@@ -1737,7 +1780,7 @@
             nullScore = scorer.score();
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if(needsScores) {
           nullScores.add(scorer.score());
@@ -1807,7 +1850,7 @@
             scores.put(ord, scorer.score());
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if(comp.test(currentVal, nullVal)) {
           nullVal = currentVal;
           nullDoc = globalDoc;
@@ -1815,7 +1858,7 @@
             nullScore = scorer.score();
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if(needsScores) {
           nullScores.add(scorer.score());
@@ -1900,7 +1943,7 @@
             scores.put(ord, score);
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if(comp.test(currentVal, nullVal)) {
           nullVal = currentVal;
           nullDoc = globalDoc;
@@ -1911,7 +1954,7 @@
             nullScore = score;
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if(needsScores) {
           if (!needsScores4Collapsing) {
@@ -2001,7 +2044,7 @@
             }
           }
         }
-      } else if (this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if (this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if (-1 == nullDoc) {
           // we've never seen a doc with null collapse key yet, treat it as the null group head for now
           compareState.setNullGroupValues(contextDoc);
@@ -2024,7 +2067,7 @@
             }
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if (needsScores) {
           if (!needsScores4Collapsing) {
@@ -2094,7 +2137,7 @@
 
       if(needsScores) {
         this.scores = new IntFloatDynamicMap(size, 0.0f);
-        if(nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+        if(nullPolicy == NullPolicy.EXPAND.getCode()) {
           nullScores = new FloatArrayList();
         }
       }
@@ -2236,7 +2279,7 @@
             scores.put(index, scorer.score());
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if(comp.test(currentVal, nullCompVal)) {
           nullCompVal = currentVal;
           nullDoc = globalDoc;
@@ -2244,7 +2287,7 @@
             nullScore = scorer.score();
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if(needsScores) {
           nullScores.add(scorer.score());
@@ -2329,7 +2372,7 @@
             scores.put(index, scorer.score());
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if(comp.test(currentVal, nullCompVal)) {
           nullCompVal = currentVal;
           nullDoc = globalDoc;
@@ -2337,7 +2380,7 @@
             nullScore = scorer.score();
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if(needsScores) {
           nullScores.add(scorer.score());
@@ -2445,7 +2488,7 @@
             scores.put(index, score);
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if(comp.test(currentVal, nullCompVal)) {
           nullCompVal = currentVal;
           nullDoc = globalDoc;
@@ -2456,7 +2499,7 @@
             nullScore = score;
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if(needsScores) {
           if (!needsScores4Collapsing) {
@@ -2556,7 +2599,7 @@
             scores.put(index, score);
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_COLLAPSE) {
+      } else if(this.nullPolicy == NullPolicy.COLLAPSE.getCode()) {
         if (-1 == nullDoc) {
           // we've never seen a doc with null collapse key yet, treat it as the null group head for now
           compareState.setNullGroupValues(contextDoc);
@@ -2579,7 +2622,7 @@
             }
           }
         }
-      } else if(this.nullPolicy == CollapsingPostFilter.NULL_POLICY_EXPAND) {
+      } else if(this.nullPolicy == NullPolicy.EXPAND.getCode()) {
         this.collapsedSet.set(globalDoc);
         if (needsScores) {
           if (!needsScores4Collapsing) {
diff --git a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
index 5005627..193abbf 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrFieldCacheBean.java
@@ -49,8 +49,8 @@
   @Override
   public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
     this.solrMetricsContext = parentContext;
-    MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
-      if (detailed && !disableEntryList && !disableJmxEntryList) {
+    MetricsMap metricsMap = new MetricsMap(map -> {
+      if (!disableEntryList && !disableJmxEntryList) {
         UninvertingReader.FieldCacheStats fieldCacheStats = UninvertingReader.getUninvertedStats();
         String[] entries = fieldCacheStats.info;
         map.put("entries_count", entries.length);
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 3ebd43c..5d1af6d 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -2298,8 +2298,8 @@
     }, true, "indexCommitSize", Category.SEARCHER.toString(), scope);
     // statsCache metrics
     parentContext.gauge(
-        new MetricsMap((detailed, map) -> {
-          statsCache.getCacheMetrics().getSnapshot(map::put);
+        new MetricsMap(map -> {
+          statsCache.getCacheMetrics().getSnapshot(map::putNoEx);
           map.put("statsCacheImpl", statsCache.getClass().getSimpleName());
         }), true, "statsCache", Category.CACHE.toString(), scope);
   }
diff --git a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java
index 4696241..f625c93 100644
--- a/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/HadoopAuthPlugin.java
@@ -233,7 +233,7 @@
       log.info("----------HTTP Request---------");
       if (log.isInfoEnabled()) {
         log.info("{} : {}", request.getMethod(), request.getRequestURI());
-        log.info("Query : {}", request.getQueryString()); // logOk
+        log.info("Query : {}", request.getQueryString()); // nowarn
       }
       log.info("Headers :");
       Enumeration<String> headers = request.getHeaderNames();
diff --git a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
index 1fed351..4fae614 100644
--- a/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
+++ b/solr/core/src/java/org/apache/solr/servlet/HttpSolrCall.java
@@ -408,26 +408,6 @@
   protected void extractHandlerFromURLPath(SolrRequestParsers parser) throws Exception {
     if (handler == null && path.length() > 1) { // don't match "" or "/" as valid path
       handler = core.getRequestHandler(path);
-
-      if (handler == null) {
-        //may be a restlet path
-        // Handle /schema/* paths via Restlet
-        if (path.equals("/schema") || path.startsWith("/schema/")) {
-          solrReq = parser.parse(core, path, req);
-          SolrRequestInfo.setRequestInfo(new SolrRequestInfo(solrReq, new SolrQueryResponse()));
-          mustClearSolrRequestInfo = true;
-          if (path.equals(req.getServletPath())) {
-            // avoid endless loop - pass through to Restlet via webapp
-            action = PASSTHROUGH;
-          } else {
-            // forward rewritten URI (without path prefix and core/collection name) to Restlet
-            action = FORWARD;
-          }
-          SolrRequestInfo.getRequestInfo().setAction(action);
-          return;
-        }
-      }
-
       // no handler yet but <requestDispatcher> allows us to handle /select with a 'qt' param
       if (handler == null && parser.isHandleSelect()) {
         if ("/select".equals(path) || "/select/".equals(path)) {
@@ -493,7 +473,7 @@
     }
     if (statusCode == AuthorizationResponse.FORBIDDEN.statusCode) {
       if (log.isDebugEnabled()) {
-        log.debug("UNAUTHORIZED auth header {} context : {}, msg: {}", req.getHeader("Authorization"), context, authResponse.getMessage()); // logOk
+        log.debug("UNAUTHORIZED auth header {} context : {}, msg: {}", req.getHeader("Authorization"), context, authResponse.getMessage()); // nowarn
       }
       sendError(statusCode,
           "Unauthorized request, Response code: " + statusCode);
@@ -503,7 +483,7 @@
       return RETURN;
     }
     if (!(statusCode == HttpStatus.SC_ACCEPTED) && !(statusCode == HttpStatus.SC_OK)) {
-      log.warn("ERROR {} during authentication: {}", statusCode, authResponse.getMessage()); // logOk
+      log.warn("ERROR {} during authentication: {}", statusCode, authResponse.getMessage()); // nowarn
       sendError(statusCode,
           "ERROR during authorization, Response code: " + statusCode);
       if (shouldAudit(EventType.ERROR)) {
@@ -666,19 +646,18 @@
     }
   }
 
-  private String getQuerySting() {
-    int internalRequestCount = queryParams.getInt(INTERNAL_REQUEST_COUNT, 0);
-    ModifiableSolrParams updatedQueryParams = new ModifiableSolrParams(queryParams);
-    updatedQueryParams.set(INTERNAL_REQUEST_COUNT, internalRequestCount + 1);
-    return updatedQueryParams.toQueryString();
-  }
-
   //TODO using Http2Client
   private void remoteQuery(String coreUrl, HttpServletResponse resp) throws IOException {
     HttpRequestBase method;
     HttpEntity httpEntity = null;
+
+    ModifiableSolrParams updatedQueryParams = new ModifiableSolrParams(queryParams);
+    int forwardCount = queryParams.getInt(INTERNAL_REQUEST_COUNT, 0) + 1;
+    updatedQueryParams.set(INTERNAL_REQUEST_COUNT, forwardCount);
+    String queryStr = updatedQueryParams.toQueryString();
+
     try {
-      String urlstr = coreUrl + getQuerySting();
+      String urlstr = coreUrl + queryStr;
 
       boolean isPostOrPutRequest = "POST".equals(req.getMethod()) || "PUT".equals(req.getMethod());
       if ("GET".equals(req.getMethod())) {
@@ -728,7 +707,11 @@
         // encoding issues with Tomcat
         if (header != null && !header.getName().equalsIgnoreCase(TRANSFER_ENCODING_HEADER)
             && !header.getName().equalsIgnoreCase(CONNECTION_HEADER)) {
-          resp.addHeader(header.getName(), header.getValue());
+          
+          // NOTE: explicitly using 'setHeader' instead of 'addHeader' so that
+          // the remote nodes values for any response headers will overide any that
+          // may have already been set locally (ex: by the local jetty's RewriteHandler config)
+          resp.setHeader(header.getName(), header.getValue());
         }
       }
 
@@ -746,7 +729,7 @@
     } catch (IOException e) {
       sendError(new SolrException(
           SolrException.ErrorCode.SERVER_ERROR,
-          "Error trying to proxy request for url: " + coreUrl, e));
+          "Error trying to proxy request for url: " + coreUrl + " with _forwardCount: " + forwardCount, e));
     } finally {
       Utils.consumeFully(httpEntity);
     }
@@ -984,8 +967,8 @@
     if (activeSlices) {
       for (Map.Entry<String, DocCollection> entry : clusterState.getCollectionsMap().entrySet()) {
         final Slice[] activeCollectionSlices = entry.getValue().getActiveSlicesArr();
-        for (Slice s : activeCollectionSlices) {
-          slices.add(s);
+        if (activeCollectionSlices != null) {
+          Collections.addAll(slices, activeCollectionSlices);
         }
       }
     } else {
@@ -1015,9 +998,7 @@
         getSlicesForCollections(clusterState, activeSlices, false);
       }
     } else {
-      for (Slice s : slices) {
-        activeSlices.add(s);
-      }
+      Collections.addAll(activeSlices, slices);
     }
 
     for (Slice s: activeSlices) {
@@ -1033,16 +1014,18 @@
       collectionsList = new ArrayList<>(collectionsList);
       collectionsList.add(collectionName);
     }
-    String coreUrl = getCoreUrl(collectionName, origCorename, clusterState,
-        activeSlices, byCoreName, true);
 
     // Avoid getting into a recursive loop of requests being forwarded by
     // stopping forwarding and erroring out after (totalReplicas) forwards
+    if (queryParams.getInt(INTERNAL_REQUEST_COUNT, 0) > totalReplicas){
+      throw new SolrException(SolrException.ErrorCode.INVALID_STATE,
+          "No active replicas found for collection: " + collectionName);
+    }
+
+    String coreUrl = getCoreUrl(collectionName, origCorename, clusterState,
+        activeSlices, byCoreName, true);
+
     if (coreUrl == null) {
-      if (queryParams.getInt(INTERNAL_REQUEST_COUNT, 0) > totalReplicas){
-        throw new SolrException(SolrException.ErrorCode.INVALID_STATE,
-            "No active replicas found for collection: " + collectionName);
-      }
       coreUrl = getCoreUrl(collectionName, origCorename, clusterState,
           activeSlices, byCoreName, false);
     }
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index 967dffb..95afda4 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -120,8 +120,8 @@
 
   /**
    * Enum to define action that needs to be processed.
-   * PASSTHROUGH: Pass through to Restlet via webapp.
-   * FORWARD: Forward rewritten URI (without path prefix and core/collection name) to Restlet
+   * PASSTHROUGH: Pass through to another filter via webapp.
+   * FORWARD: Forward rewritten URI (without path prefix and core/collection name) to another filter in the chain
    * RETURN: Returns the control, and no further specific processing is needed.
    *  This is generally when an error is set and returned.
    * RETRY:Retry the request. In cases when a core isn't found to work with, this is set.
@@ -235,10 +235,10 @@
       metricManager.registerAll(registryName, new GarbageCollectorMetricSet(), SolrMetricManager.ResolutionStrategy.IGNORE, "gc");
       metricManager.registerAll(registryName, new MemoryUsageGaugeSet(), SolrMetricManager.ResolutionStrategy.IGNORE, "memory");
       metricManager.registerAll(registryName, new ThreadStatesGaugeSet(), SolrMetricManager.ResolutionStrategy.IGNORE, "threads"); // todo should we use CachedThreadStatesGaugeSet instead?
-      MetricsMap sysprops = new MetricsMap((detailed, map) -> {
+      MetricsMap sysprops = new MetricsMap(map -> {
         System.getProperties().forEach((k, v) -> {
           if (!hiddenSysProps.contains(k)) {
-            map.put(String.valueOf(k), v);
+            map.putNoEx(String.valueOf(k), v);
           }
         });
       });
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
index 4a8cffa..2592950 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrRequestParsers.java
@@ -768,8 +768,7 @@
 
       // According to previous StandardRequestParser logic (this is a re-written version),
       // POST was handled normally, but other methods (PUT/DELETE)
-      // were handled by restlet if the URI contained /schema or /config
-      // "handled by restlet" means that we don't attempt to handle any request body here.
+      // were handled by the RestManager classes if the URI contained /schema or /config
       if (!isPost) {
         if (isV2) {
           return raw.parseParamsAndFillStreams(req, streams);
@@ -780,14 +779,14 @@
 
         // OK, we have a BODY at this point
 
-        boolean restletPath = false;
+        boolean schemaRestPath = false;
         int idx = uri.indexOf("/schema");
         if (idx >= 0 && uri.endsWith("/schema") || uri.contains("/schema/")) {
-          restletPath = true;
+          schemaRestPath = true;
         }
 
-        if (restletPath) {
-          return parseQueryString(req.getQueryString());
+        if (schemaRestPath) {
+          return raw.parseParamsAndFillStreams(req, streams);
         }
 
         if ("PUT".equals(method) || "DELETE".equals(method)) {
diff --git a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
index a4e754c..4ae6b88 100644
--- a/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
+++ b/solr/core/src/java/org/apache/solr/spelling/SpellCheckCollator.java
@@ -184,7 +184,7 @@
         collations.add(collation);
       }
       if (log.isDebugEnabled()) {
-        log.debug("Collation: {} {}", collationQueryStr, (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : "")); // logOk
+        log.debug("Collation: {} {}", collationQueryStr, (verifyCandidateWithQuery ? (" will return " + hits + " hits.") : "")); // nowarn
       }
     }
     return collations;
diff --git a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
index 37d735e..76bb265 100644
--- a/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
+++ b/solr/core/src/java/org/apache/solr/store/blockcache/Metrics.java
@@ -59,7 +59,7 @@
   @Override
   public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
     solrMetricsContext = parentContext.getChildContext(this);
-    metricsMap = new MetricsMap((detailed, map) -> {
+    metricsMap = new MetricsMap(map -> {
       long now = System.nanoTime();
       long delta = Math.max(now - previous, 1);
       double seconds = delta / 1000000000.0;
diff --git a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
index 6ff88f2..fe19d12 100644
--- a/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
+++ b/solr/core/src/java/org/apache/solr/store/hdfs/HdfsLocalityReporter.java
@@ -93,7 +93,7 @@
   @Override
   public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
     solrMetricsContext = parentContext.getChildContext(this);
-    MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
+    MetricsMap metricsMap = new MetricsMap(map -> {
       long totalBytes = 0;
       long localBytes = 0;
       int totalCount = 0;
diff --git a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
index 0e1806e..2b8b86f 100644
--- a/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
+++ b/solr/core/src/java/org/apache/solr/update/SolrIndexSplitter.java
@@ -312,7 +312,7 @@
           for (int segmentNumber = 0; segmentNumber<leaves.size(); segmentNumber++) {
             if (log.isInfoEnabled()) {
               log.info("SolrIndexSplitter: partition # {} partitionCount={} {} segment #={} segmentCount={}", partitionNumber, numPieces
-                  , (ranges != null ? " range=" + ranges.get(partitionNumber) : ""), segmentNumber, leaves.size()); // logOk
+                  , (ranges != null ? " range=" + ranges.get(partitionNumber) : ""), segmentNumber, leaves.size()); // nowarn
             }
             CodecReader subReader = SlowCodecReaderWrapper.wrap(leaves.get(segmentNumber).reader());
             iw.addIndexes(new LiveDocsReader(subReader, segmentDocSets.get(segmentNumber)[partitionNumber]));
diff --git a/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java b/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java
index 6bc18c3..53f19b9 100644
--- a/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/StartupLoggingUtils.java
@@ -53,7 +53,7 @@
     }
   }
 
-  public static String getLoggerImplStr() {
+  public static String getLoggerImplStr() { //nowarn
     return binder.getLoggerFactoryClassStr();
   }
 
diff --git a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
index c4fdd51..1b7b13c 100644
--- a/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
+++ b/solr/core/src/java/org/apache/solr/util/stats/MetricUtils.java
@@ -20,14 +20,13 @@
 import java.beans.IntrospectionException;
 import java.beans.Introspector;
 import java.beans.PropertyDescriptor;
+import java.io.IOException;
 import java.lang.invoke.MethodHandles;
 import java.lang.management.OperatingSystemMXBean;
 import java.lang.management.PlatformManagedObject;
 import java.lang.reflect.InvocationTargetException;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
-import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.SortedSet;
@@ -35,6 +34,7 @@
 import java.util.concurrent.TimeUnit;
 import java.util.function.BiConsumer;
 import java.util.function.Consumer;
+import java.util.function.Predicate;
 
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Gauge;
@@ -46,10 +46,14 @@
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Snapshot;
 import com.codahale.metrics.Timer;
+import org.apache.solr.common.ConditionalKeyMapWriter;
+import org.apache.solr.common.IteratorWriter;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -87,7 +91,9 @@
 
   /**
    * This filter can limit what properties of a metric are returned.
+   * @deprecated use {@link Predicate} instead.
    */
+  @Deprecated(since = "8.7")
   public interface PropertyFilter {
     PropertyFilter ALL = (name) -> true;
 
@@ -97,8 +103,14 @@
      * @return true if this property should be returned, false otherwise.
      */
     boolean accept(String name);
+
+    static Predicate<CharSequence> toPredicate(PropertyFilter filter) {
+      return (name) -> filter.accept(name.toString());
+    }
   }
 
+  public static final Predicate<CharSequence> ALL_PROPERTIES = (name) -> true;
+
   /**
    * Adds metrics from a Timer to a NamedList, using well-known back-compat names.
    * @param lst The NamedList to add the metrics data to
@@ -145,11 +157,40 @@
    * @param metadata optional metadata. If not null and not empty then this map will be added under a
    *                 {@code _metadata_} key.
    * @param consumer consumer that accepts produced {@link SolrInputDocument}-s
+   * @deprecated use {@link #toSolrInputDocuments(MetricRegistry, List, MetricFilter, Predicate, boolean, boolean, boolean, Map, Consumer)} instead.
    */
+  @Deprecated(since = "8.7")
   public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
                                           MetricFilter mustMatchFilter, PropertyFilter propertyFilter, boolean skipHistograms,
                                           boolean skipAggregateValues, boolean compact,
                                           Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
+    toSolrInputDocuments(registry, shouldMatchFilters, mustMatchFilter,
+        PropertyFilter.toPredicate(propertyFilter), skipHistograms,
+        skipAggregateValues, compact, metadata, consumer);
+  }
+  /**
+   * Provides a representation of the given metric registry as {@link SolrInputDocument}-s.
+   Only those metrics
+   * are converted which match at least one of the given MetricFilter instances.
+   *
+   * @param registry      the {@link MetricRegistry} to be converted
+   * @param shouldMatchFilters a list of {@link MetricFilter} instances.
+   *                           A metric must match <em>any one</em> of the filters from this list to be
+   *                           included in the output
+   * @param mustMatchFilter a {@link MetricFilter}.
+   *                        A metric <em>must</em> match this filter to be included in the output.
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
+   * @param compact use compact representation for counters and gauges.
+   * @param metadata optional metadata. If not null and not empty then this map will be added under a
+   *                 {@code _metadata_} key.
+   * @param consumer consumer that accepts produced {@link SolrInputDocument}-s
+   */
+  public static void toSolrInputDocuments(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+                                          MetricFilter mustMatchFilter, Predicate<CharSequence> propertyFilter, boolean skipHistograms,
+                                          boolean skipAggregateValues, boolean compact,
+                                          Map<String, Object> metadata, Consumer<SolrInputDocument> consumer) {
     boolean addMetadata = metadata != null && !metadata.isEmpty();
     toMaps(registry, shouldMatchFilters, mustMatchFilter, propertyFilter, skipHistograms, skipAggregateValues, compact, false, (k, v) -> {
       SolrInputDocument doc = new SolrInputDocument();
@@ -169,20 +210,42 @@
    * @param o an instance of converted metric, either a Map or a flat Object
    */
   static void toSolrInputDocument(String prefix, SolrInputDocument doc, Object o) {
-    if (!(o instanceof Map)) {
+    final BiConsumer<Object, Object> consumer = (k, v) -> {
+      if ((v instanceof Map) || (v instanceof MapWriter) || (v instanceof IteratorWriter)) {
+        toSolrInputDocument(k.toString(), doc, v);
+      } else {
+        String key = prefix != null ? prefix + "." + k : k.toString();
+        doc.addField(key, v);
+      }
+    };
+    if (o instanceof MapWriter) {
+      @SuppressWarnings({"unchecked"})
+      MapWriter writer = (MapWriter) o;
+      writer._forEachEntry(consumer);
+    } else if (o instanceof Map) {
+      @SuppressWarnings({"unchecked"})
+      Map<String, Object> map = (Map<String, Object>) o;
+      for (Map.Entry<String, Object> entry : map.entrySet()) {
+        consumer.accept(entry.getKey(), entry.getValue());
+      }
+    } else if (o instanceof IteratorWriter) {
+      @SuppressWarnings({"unchecked"})
+      IteratorWriter writer = (IteratorWriter) o;
+      final String name = prefix != null ? prefix : "value";
+      try {
+        writer.writeIter(new IteratorWriter.ItemWriter() {
+          @Override
+          public IteratorWriter.ItemWriter add(Object o) throws IOException {
+            consumer.accept(name, o);
+            return this;
+          }
+        });
+      } catch (IOException e) {
+        throw new RuntimeException("this should never happen", e);
+      }
+    } else {
       String key = prefix != null ? prefix : VALUE;
       doc.addField(key, o);
-      return;
-    }
-    @SuppressWarnings({"unchecked"})
-    Map<String, Object> map = (Map<String, Object>)o;
-    for (Map.Entry<String, Object> entry : map.entrySet()) {
-      if (entry.getValue() instanceof Map) { // flatten recursively
-        toSolrInputDocument(entry.getKey(), doc, entry.getValue());
-      } else {
-        String key = prefix != null ? prefix + "." + entry.getKey() : entry.getKey();
-        doc.addField(key, entry.getValue());
-      }
     }
   }
 
@@ -198,9 +261,33 @@
    * @param simple use simplified representation for complex metrics - instead of a (name, map)
    *             only the selected (name "." key, value) pairs will be produced.
    * @param consumer consumer that accepts produced objects
+   * @deprecated use {@link #toMaps(MetricRegistry, List, MetricFilter, Predicate, boolean, boolean, boolean, boolean, BiConsumer)} instead.
+   */
+  @Deprecated(since = "8.7")
+  public static void toMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
+                            MetricFilter mustMatchFilter, PropertyFilter propertyFilter,
+                            boolean skipHistograms, boolean skipAggregateValues,
+                            boolean compact, boolean simple,
+                            BiConsumer<String, Object> consumer) {
+    toMaps(registry, shouldMatchFilters, mustMatchFilter,
+        PropertyFilter.toPredicate(propertyFilter), skipHistograms,
+        skipAggregateValues, compact, simple, consumer);
+  }
+  /**
+   * Convert selected metrics to maps or to flattened objects.
+   * @param registry source of metrics
+   * @param shouldMatchFilters metrics must match any of these filters
+   * @param mustMatchFilter metrics must match this filter
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
+   * @param compact use compact representation for counters and gauges.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
    */
   public static void toMaps(MetricRegistry registry, List<MetricFilter> shouldMatchFilters,
-                     MetricFilter mustMatchFilter, PropertyFilter propertyFilter,
+                     MetricFilter mustMatchFilter, Predicate<CharSequence> propertyFilter,
                      boolean skipHistograms, boolean skipAggregateValues,
                      boolean compact, boolean simple,
                      BiConsumer<String, Object> consumer) {
@@ -263,8 +350,27 @@
    * @param simple use simplified representation for complex metrics - instead of a (name, map)
    *             only the selected (name "." key, value) pairs will be produced.
    * @param consumer consumer that accepts produced objects
+   * @deprecated use {@link #convertMetric(String, Metric, Predicate, boolean, boolean, boolean, boolean, String, BiConsumer)} instead.
    */
+  @Deprecated(since = "8.7")
   public static void convertMetric(String n, Metric metric, PropertyFilter propertyFilter, boolean skipHistograms, boolean skipAggregateValues,
+                                   boolean compact, boolean simple, String separator, BiConsumer<String, Object> consumer) {
+    convertMetric(n, metric, PropertyFilter.toPredicate(propertyFilter),
+        skipHistograms, skipAggregateValues, compact, simple, separator, consumer);
+  }
+  /**
+   * Convert a single instance of metric into a map or flattened object.
+   * @param n metric name
+   * @param metric metric instance
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipHistograms discard any {@link Histogram}-s and histogram parts of {@link Timer}-s.
+   * @param skipAggregateValues discard internal values of {@link AggregateMetric}-s.
+   * @param compact use compact representation for counters and gauges.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  public static void convertMetric(String n, Metric metric, Predicate<CharSequence> propertyFilter, boolean skipHistograms, boolean skipAggregateValues,
                               boolean compact, boolean simple, String separator, BiConsumer<String, Object> consumer) {
     if (metric instanceof Counter) {
       Counter counter = (Counter) metric;
@@ -272,8 +378,16 @@
     } else if (metric instanceof Gauge) {
       @SuppressWarnings({"rawtypes"})
       Gauge gauge = (Gauge) metric;
+      // unwrap if needed
+      if (gauge instanceof SolrMetricManager.GaugeWrapper) {
+        gauge = ((SolrMetricManager.GaugeWrapper) gauge).getGauge();
+      }
       try {
-        convertGauge(n, gauge, propertyFilter, simple, compact, separator, consumer);
+        if (gauge instanceof MapWriter) {
+          convertMapWriter(n, (MapWriter) gauge, propertyFilter, simple, compact, separator, consumer);
+        } else {
+          convertGauge(n, gauge, propertyFilter, simple, compact, separator, consumer);
+        }
       } catch (InternalError ie) {
         if (n.startsWith("memory.") && ie.getMessage().contains("Memory Pool not found")) {
           log.warn("Error converting gauge '{}', possible JDK bug: SOLR-10362", n, ie);
@@ -309,37 +423,38 @@
    * @param consumer consumer that accepts produced objects
    */
   static void convertAggregateMetric(String name, AggregateMetric metric,
-      PropertyFilter propertyFilter,
+      Predicate<CharSequence> propertyFilter,
       boolean skipAggregateValues, boolean simple, String separator, BiConsumer<String, Object> consumer) {
     if (simple) {
-      if (propertyFilter.accept(MEAN)) {
+      if (propertyFilter.test(MEAN)) {
         consumer.accept(name + separator + MEAN, metric.getMean());
       }
     } else {
-      Map<String, Object> response = new LinkedHashMap<>();
-      BiConsumer<String, Object> filter = (k, v) -> {
-        if (propertyFilter.accept(k)) {
-          response.put(k, v);
+      MapWriter writer = ew -> {
+        BiConsumer<String, Object> filter = (k, v) -> {
+          if (propertyFilter.test(k)) {
+            ew.putNoEx(k, v);
+          }
+        };
+        filter.accept("count", metric.size());
+        filter.accept(MAX, metric.getMax());
+        filter.accept(MIN, metric.getMin());
+        filter.accept(MEAN, metric.getMean());
+        filter.accept(STDDEV, metric.getStdDev());
+        filter.accept(SUM, metric.getSum());
+        if (!(metric.isEmpty() || skipAggregateValues)) {
+          ew.putNoEx(VALUES, (MapWriter) ew1 -> {
+            metric.getValues().forEach((k, v) -> {
+              ew1.putNoEx(k, (MapWriter) ew2 -> {
+                ew2.putNoEx("value", v.value);
+                ew2.putNoEx("updateCount", v.updateCount.get());
+              });
+            });
+          });
         }
       };
-      filter.accept("count", metric.size());
-      filter.accept(MAX, metric.getMax());
-      filter.accept(MIN, metric.getMin());
-      filter.accept(MEAN, metric.getMean());
-      filter.accept(STDDEV, metric.getStdDev());
-      filter.accept(SUM, metric.getSum());
-      if (!(metric.isEmpty() || skipAggregateValues)) {
-        Map<String, Object> values = new LinkedHashMap<>();
-        response.put(VALUES, values);
-        metric.getValues().forEach((k, v) -> {
-          Map<String, Object> map = new LinkedHashMap<>();
-          map.put("value", v.value);
-          map.put("updateCount", v.updateCount.get());
-          values.put(k, map);
-        });
-      }
-      if (!response.isEmpty()) {
-        consumer.accept(name, response);
+      if (writer._size() > 0) {
+        consumer.accept(name, writer);
       }
     }
   }
@@ -354,24 +469,23 @@
    *             only the selected (name "." key, value) pairs will be produced.
    * @param consumer consumer that accepts produced objects
    */
-  static void convertHistogram(String name, Histogram histogram, PropertyFilter propertyFilter,
+  static void convertHistogram(String name, Histogram histogram, Predicate<CharSequence> propertyFilter,
                                               boolean simple, String separator, BiConsumer<String, Object> consumer) {
     Snapshot snapshot = histogram.getSnapshot();
     if (simple) {
-      if (propertyFilter.accept(MEAN)) {
+      if (propertyFilter.test(MEAN)) {
         consumer.accept(name + separator + MEAN, snapshot.getMean());
       }
     } else {
-      Map<String, Object> response = new LinkedHashMap<>();
-      String prop = "count";
-      if (propertyFilter.accept(prop)) {
-        response.put(prop, histogram.getCount());
-      }
-      // non-time based values
-      addSnapshot(response, snapshot, propertyFilter, false);
-      if (!response.isEmpty()) {
-        consumer.accept(name, response);
-      }
+      MapWriter writer = ew -> {
+        String prop = "count";
+        if (propertyFilter.test(prop)) {
+          ew.putNoEx(prop, histogram.getCount());
+        }
+        // non-time based values
+        addSnapshot(ew, snapshot, propertyFilter, false);
+      };
+      consumer.accept(name, writer);
     }
   }
 
@@ -385,10 +499,10 @@
   }
 
   // some snapshots represent time in ns, other snapshots represent raw values (eg. chunk size)
-  static void addSnapshot(Map<String, Object> response, Snapshot snapshot, PropertyFilter propertyFilter, boolean ms) {
+  static void addSnapshot(MapWriter.EntryWriter ew, Snapshot snapshot, Predicate<CharSequence> propertyFilter, boolean ms) {
     BiConsumer<String, Object> filter = (k, v) -> {
-      if (propertyFilter.accept(k)) {
-        response.put(k, v);
+      if (propertyFilter.test(k)) {
+        ew.putNoEx(k, v);
       }
     };
     filter.accept((ms ? MIN_MS: MIN), nsToMs(ms, snapshot.getMin()));
@@ -411,32 +525,50 @@
    * @param simple use simplified representation for complex metrics - instead of a (name, map)
    *             only the selected (name "." key, value) pairs will be produced.
    * @param consumer consumer that accepts produced objects
+   * @deprecated use {@link #convertTimer(String, Timer, Predicate, boolean, boolean, String, BiConsumer)} instead.
    */
+  @Deprecated(since = "8.7")
   public static void convertTimer(String name, Timer timer, PropertyFilter propertyFilter, boolean skipHistograms,
+                                  boolean simple, String separator, BiConsumer<String, Object> consumer) {
+    convertTimer(name, timer, PropertyFilter.toPredicate(propertyFilter),
+        skipHistograms, simple, separator, consumer);
+  }
+  /**
+   * Convert a {@link Timer} to a map.
+   * @param name metric name
+   * @param timer timer instance
+   * @param propertyFilter limit what properties of a metric are returned
+   * @param skipHistograms if true then discard the histogram part of the timer.
+   * @param simple use simplified representation for complex metrics - instead of a (name, map)
+   *             only the selected (name "." key, value) pairs will be produced.
+   * @param consumer consumer that accepts produced objects
+   */
+  public static void convertTimer(String name, Timer timer, Predicate<CharSequence> propertyFilter, boolean skipHistograms,
                                                 boolean simple, String separator, BiConsumer<String, Object> consumer) {
     if (simple) {
       String prop = "meanRate";
-      if (propertyFilter.accept(prop)) {
+      if (propertyFilter.test(prop)) {
         consumer.accept(name + separator + prop, timer.getMeanRate());
       }
     } else {
-      Map<String, Object> response = new LinkedHashMap<>();
-      BiConsumer<String,Object> filter = (k, v) -> {
-        if (propertyFilter.accept(k)) {
-          response.put(k, v);
+      MapWriter writer = ew -> {
+        BiConsumer<String,Object> filter = (k, v) -> {
+          if (propertyFilter.test(k)) {
+            ew.putNoEx(k, v);
+          }
+        };
+        filter.accept("count", timer.getCount());
+        filter.accept("meanRate", timer.getMeanRate());
+        filter.accept("1minRate", timer.getOneMinuteRate());
+        filter.accept("5minRate", timer.getFiveMinuteRate());
+        filter.accept("15minRate", timer.getFifteenMinuteRate());
+        if (!skipHistograms) {
+          // time-based values in nanoseconds
+          addSnapshot(ew, timer.getSnapshot(), propertyFilter, true);
         }
       };
-      filter.accept("count", timer.getCount());
-      filter.accept("meanRate", timer.getMeanRate());
-      filter.accept("1minRate", timer.getOneMinuteRate());
-      filter.accept("5minRate", timer.getFiveMinuteRate());
-      filter.accept("15minRate", timer.getFifteenMinuteRate());
-      if (!skipHistograms) {
-        // time-based values in nanoseconds
-        addSnapshot(response, timer.getSnapshot(), propertyFilter, true);
-      }
-      if (!response.isEmpty()) {
-        consumer.accept(name, response);
+      if (writer._size() > 0) {
+        consumer.accept(name, writer);
       }
     }
   }
@@ -450,29 +582,49 @@
    *             only the selected (name "." key, value) pairs will be produced.
    * @param consumer consumer that accepts produced objects
    */
-  static void convertMeter(String name, Meter meter, PropertyFilter propertyFilter, boolean simple, String separator, BiConsumer<String, Object> consumer) {
+  static void convertMeter(String name, Meter meter, Predicate<CharSequence> propertyFilter, boolean simple, String separator, BiConsumer<String, Object> consumer) {
     if (simple) {
-      if (propertyFilter.accept("count")) {
+      if (propertyFilter.test("count")) {
         consumer.accept(name + separator + "count", meter.getCount());
       }
     } else {
-      Map<String, Object> response = new LinkedHashMap<>();
-      BiConsumer<String, Object> filter = (k, v) -> {
-        if (propertyFilter.accept(k)) {
-          response.put(k, v);
-        }
+      MapWriter writer = ew -> {
+        BiConsumer<String, Object> filter = (k, v) -> {
+          if (propertyFilter.test(k)) {
+            ew.putNoEx(k, v);
+          }
+        };
+        filter.accept("count", meter.getCount());
+        filter.accept("meanRate", meter.getMeanRate());
+        filter.accept("1minRate", meter.getOneMinuteRate());
+        filter.accept("5minRate", meter.getFiveMinuteRate());
+        filter.accept("15minRate", meter.getFifteenMinuteRate());
       };
-      filter.accept("count", meter.getCount());
-      filter.accept("meanRate", meter.getMeanRate());
-      filter.accept("1minRate", meter.getOneMinuteRate());
-      filter.accept("5minRate", meter.getFiveMinuteRate());
-      filter.accept("15minRate", meter.getFifteenMinuteRate());
-      if (!response.isEmpty()) {
-        consumer.accept(name, response);
+      if (writer._size() > 0) {
+        consumer.accept(name, writer);
       }
     }
   }
 
+  static void convertMapWriter(String name, MapWriter metric,
+                               Predicate<CharSequence> propertyFilter, boolean simple, boolean compact,
+                               String separator, BiConsumer<String, Object> consumer) {
+    ConditionalKeyMapWriter filteredMetric = new ConditionalKeyMapWriter(metric, propertyFilter);
+    if (compact || simple) {
+      if (simple) {
+        filteredMetric._forEachEntry((k, v) ->
+            consumer.accept(name + separator + k, v));
+      } else {
+        if (filteredMetric._size() > 0) {
+          consumer.accept(name, filteredMetric);
+        }
+      }
+    } else {
+      if (filteredMetric._size() > 0) {
+        consumer.accept(name, (MapWriter) ew -> ew.putNoEx("value", filteredMetric));
+      }
+    }
+  }
   /**
    * Convert a {@link Gauge}.
    * @param name metric name
@@ -486,7 +638,7 @@
    */
   static void convertGauge(String name,
                            @SuppressWarnings({"rawtypes"})Gauge gauge,
-                           PropertyFilter propertyFilter, boolean simple, boolean compact,
+                           Predicate<CharSequence> propertyFilter, boolean simple, boolean compact,
                            String separator, BiConsumer<String, Object> consumer) {
     if (compact || simple) {
       Object o = gauge.getValue();
@@ -494,20 +646,23 @@
         if (simple) {
           for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
             String prop = entry.getKey().toString();
-            if (propertyFilter.accept(prop)) {
+            if (propertyFilter.test(prop)) {
               consumer.accept(name + separator + prop, entry.getValue());
             }
           }
         } else {
-          Map<String, Object> val = new HashMap<>();
-          for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
-            String prop = entry.getKey().toString();
-            if (propertyFilter.accept(prop)) {
-              val.put(prop, entry.getValue());
+          boolean notEmpty = ((Map<?, ?>)o).entrySet().stream()
+              .anyMatch(entry -> propertyFilter.test(entry.getKey().toString()));
+          MapWriter writer = ew -> {
+            for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
+              String prop = entry.getKey().toString();
+              if (propertyFilter.test(prop)) {
+                ew.putNoEx(prop, entry.getValue());
+              }
             }
-          }
-          if (!val.isEmpty()) {
-            consumer.accept(name, val);
+          };
+          if (notEmpty) {
+            consumer.accept(name, writer);
           }
         }
       } else {
@@ -515,21 +670,24 @@
       }
     } else {
       Object o = gauge.getValue();
-      Map<String, Object> response = new LinkedHashMap<>();
       if (o instanceof Map) {
-        for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
-          String prop = entry.getKey().toString();
-          if (propertyFilter.accept(prop)) {
-            response.put(prop, entry.getValue());
-          }
-        }
-        if (!response.isEmpty()) {
-          consumer.accept(name, Collections.singletonMap("value", response));
+        boolean notEmpty = ((Map<?, ?>)o).entrySet().stream()
+            .anyMatch(entry -> propertyFilter.test(entry.getKey().toString()));
+        if (notEmpty) {
+          consumer.accept(name, (MapWriter) ew -> {
+            ew.putNoEx("value", (MapWriter) ew1 -> {
+              for (Map.Entry<?, ?> entry : ((Map<?, ?>)o).entrySet()) {
+                String prop = entry.getKey().toString();
+                if (propertyFilter.test(prop)) {
+                  ew1.put(prop, entry.getValue());
+                }
+              }
+            });
+          });
         }
       } else {
-        if (propertyFilter.accept("value")) {
-          response.put("value", o);
-          consumer.accept(name, response);
+        if (propertyFilter.test("value")) {
+          consumer.accept(name, (MapWriter) ew -> ew.putNoEx("value", o));
         }
       }
     }
@@ -542,14 +700,12 @@
    * @param compact if true then only return {@link Counter#getCount()}. If false
    *                then return a map with a "count" field.
    */
-  static void convertCounter(String name, Counter counter, PropertyFilter propertyFilter, boolean compact, BiConsumer<String, Object> consumer) {
+  static void convertCounter(String name, Counter counter, Predicate<CharSequence> propertyFilter, boolean compact, BiConsumer<String, Object> consumer) {
     if (compact) {
       consumer.accept(name, counter.getCount());
     } else {
-      if (propertyFilter.accept("count")) {
-        Map<String, Object> response = new LinkedHashMap<>();
-        response.put("count", counter.getCount());
-        consumer.accept(name, response);
+      if (propertyFilter.test("count")) {
+        consumer.accept(name, (MapWriter) ew -> ew.putNoEx("count", counter.getCount()));
       }
     }
   }
diff --git a/solr/core/src/test-files/runtimecode/MyPatternReplaceCharFilterFactory.java b/solr/core/src/test-files/runtimecode/MyPatternReplaceCharFilterFactory.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/MyPatternReplaceCharFilterFactory.java
rename to solr/core/src/test-files/runtimecode/MyPatternReplaceCharFilterFactory.java.txt
diff --git a/solr/core/src/test-files/runtimecode/MyPlugin.java b/solr/core/src/test-files/runtimecode/MyPlugin.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/MyPlugin.java
rename to solr/core/src/test-files/runtimecode/MyPlugin.java.txt
diff --git a/solr/core/src/test-files/runtimecode/MyTextField.java b/solr/core/src/test-files/runtimecode/MyTextField.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/MyTextField.java
rename to solr/core/src/test-files/runtimecode/MyTextField.java.txt
diff --git a/solr/core/src/test-files/runtimecode/MyWhitespaceTokenizerFactory.java b/solr/core/src/test-files/runtimecode/MyWhitespaceTokenizerFactory.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/MyWhitespaceTokenizerFactory.java
rename to solr/core/src/test-files/runtimecode/MyWhitespaceTokenizerFactory.java.txt
diff --git a/solr/core/src/test-files/runtimecode/RuntimeLibReqHandler.java b/solr/core/src/test-files/runtimecode/RuntimeLibReqHandler.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/RuntimeLibReqHandler.java
rename to solr/core/src/test-files/runtimecode/RuntimeLibReqHandler.java.txt
diff --git a/solr/core/src/test-files/runtimecode/RuntimeLibResponseWriter.java b/solr/core/src/test-files/runtimecode/RuntimeLibResponseWriter.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/RuntimeLibResponseWriter.java
rename to solr/core/src/test-files/runtimecode/RuntimeLibResponseWriter.java.txt
diff --git a/solr/core/src/test-files/runtimecode/RuntimeLibSearchComponent.java b/solr/core/src/test-files/runtimecode/RuntimeLibSearchComponent.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/RuntimeLibSearchComponent.java
rename to solr/core/src/test-files/runtimecode/RuntimeLibSearchComponent.java.txt
diff --git a/solr/core/src/test-files/runtimecode/TestURP.java b/solr/core/src/test-files/runtimecode/TestURP.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/TestURP.java
rename to solr/core/src/test-files/runtimecode/TestURP.java.txt
diff --git a/solr/core/src/test-files/runtimecode/TestVersionedURP.java b/solr/core/src/test-files/runtimecode/TestVersionedURP.java.txt
similarity index 100%
rename from solr/core/src/test-files/runtimecode/TestVersionedURP.java
rename to solr/core/src/test-files/runtimecode/TestVersionedURP.java.txt
diff --git a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java
index e38b563..89879e5 100644
--- a/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java
+++ b/solr/core/src/test/org/apache/hadoop/fs/FileUtil.java
@@ -76,7 +76,8 @@
 public class FileUtil {
   public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
 
-  private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class);
+  // Apparently the Hadoop code expectes upper-case LOG, so...
+  private static final Logger LOG = LoggerFactory.getLogger(FileUtil.class); //nowarn
 
   /* The error code is defined in winutils to indicate insufficient
    * privilege to create symbolic links. This value need to keep in
@@ -597,12 +598,7 @@
       File[] allFiles = dir.listFiles();
       if(allFiles != null) {
         for (int i = 0; i < allFiles.length; i++) {
-          boolean isSymLink;
-          try {
-            isSymLink = org.apache.commons.io.FileUtils.isSymlink(allFiles[i]);
-          } catch(IOException ioe) {
-            isSymLink = true;
-          }
+          boolean isSymLink = org.apache.commons.io.FileUtils.isSymlink(allFiles[i]);
           if(!isSymLink) {
             size += getDU(allFiles[i]);
           }
diff --git a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
index 9cba3fb..c75ef8d 100644
--- a/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
+++ b/solr/core/src/test/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/BlockPoolSlice.java
@@ -81,7 +81,8 @@
 public class BlockPoolSlice {
   public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
 
-  static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class);
+  // Apparently the Hadoop code expectes upper-case LOG, so...
+  static final Logger LOG = LoggerFactory.getLogger(BlockPoolSlice.class); //nowarn
 
   private final String bpid;
   private final FsVolumeImpl volume; // volume to which this BlockPool belongs to
diff --git a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java
index 0767d4f..274dd45 100644
--- a/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java
+++ b/solr/core/src/test/org/apache/hadoop/http/HttpServer2.java
@@ -118,7 +118,8 @@
 public final class HttpServer2 implements FilterContainer {
   public static final Object SOLR_HACK_FOR_CLASS_VERIFICATION = new Object();
 
-  public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class);
+  // Apparently the Hadoop code expectes upper-case LOG, so...
+  public static final Logger LOG = LoggerFactory.getLogger(HttpServer2.class); //nowarn
 
   public static final String HTTP_SCHEME = "http";
   public static final String HTTPS_SCHEME = "https";
diff --git a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java
index bf35932..33afa70 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ChaosMonkeySafeLeaderWithPullReplicasTest.java
@@ -206,7 +206,7 @@
 
     if (log.isInfoEnabled()) {
       log.info("control docs:{}\n\n", controlClient.query(new SolrQuery("*:*")).getResults().getNumFound());
-      log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // logOk
+      log.info("collection state: {}", printClusterStateInfo(DEFAULT_COLLECTION)); // nowarn
     }
     
     waitForReplicationFromReplicas(DEFAULT_COLLECTION, cloudClient.getZkStateReader(), new TimeOut(30, TimeUnit.SECONDS, TimeSource.NANO_TIME));
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
index 99c4064..e6d26f9 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerRolesTest.java
@@ -99,7 +99,7 @@
   private void logOverseerState() throws KeeperException, InterruptedException {
     if (log.isInfoEnabled()) {
       log.info("Overseer: {}", getLeaderNode(zkClient()));
-      log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // logOk
+      log.info("Election queue: {}", getSortedElectionNodes(zkClient(), "/overseer_elect/election")); // nowarn
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
index f3e73e2..f6c07c1 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
@@ -1166,14 +1166,14 @@
     Snapshot snapshot = timer.getSnapshot();
     if (log.isInfoEnabled()) {
       log.info("\t avgRequestsPerSecond: {}", timer.getMeanRate());
-      log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); // logOk
-      log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); // logOk
-      log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); // logOk
-      log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); // logOk
-      log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); // logOk
-      log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); // logOk
-      log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); // logOk
-      log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); // logOk
+      log.info("\t 5minRateRequestsPerSecond: {}", timer.getFiveMinuteRate()); // nowarn
+      log.info("\t 15minRateRequestsPerSecond: {}", timer.getFifteenMinuteRate()); // nowarn
+      log.info("\t avgTimePerRequest: {}", nsToMs(snapshot.getMean())); // nowarn
+      log.info("\t medianRequestTime: {}", nsToMs(snapshot.getMedian())); // nowarn
+      log.info("\t 75thPcRequestTime: {}", nsToMs(snapshot.get75thPercentile())); // nowarn
+      log.info("\t 95thPcRequestTime: {}", nsToMs(snapshot.get95thPercentile())); // nowarn
+      log.info("\t 99thPcRequestTime: {}", nsToMs(snapshot.get99thPercentile())); // nowarn
+      log.info("\t 999thPcRequestTime: {}", nsToMs(snapshot.get999thPercentile())); // nowarn
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
index 164db25..8941e0d 100644
--- a/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/RollingRestartTest.java
@@ -83,7 +83,7 @@
     boolean sawLiveDesignate = false;
     int numRestarts = 1 + random().nextInt(TEST_NIGHTLY ? 12 : 2);
     for (int i = 0; i < numRestarts; i++) {
-      log.info("Rolling restart #{}", i + 1); // logOk
+      log.info("Rolling restart #{}", i + 1); // nowarn
       for (CloudJettyRunner cloudJetty : designateJettys) {
         log.info("Restarting {}", cloudJetty);
         chaosMonkey.stopJetty(cloudJetty);
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
index 6fea2cb..48f0dc8 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPI.java
@@ -35,6 +35,7 @@
 import java.security.Principal;
 import java.util.Arrays;
 import java.util.Collection;
+import java.util.Collections;
 import java.util.Deque;
 import java.util.HashSet;
 import java.util.Iterator;
@@ -68,6 +69,7 @@
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.client.solrj.response.CollectionAdminResponse;
 import org.apache.solr.client.solrj.response.ConfigSetAdminResponse;
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.cloud.ZkConfigManager;
@@ -136,51 +138,84 @@
   @Test
   public void testCreateErrors() throws Exception {
     final String baseUrl = cluster.getJettySolrRunners().get(0).getBaseUrl().toString();
-    final SolrClient solrClient = getHttpSolrClient(baseUrl);
-    zkConfigManager.uploadConfigDir(configset("configset-2"), "configSet");
+    try (final SolrClient solrClient = getHttpSolrClient(baseUrl)) {
+      zkConfigManager.uploadConfigDir(configset("configset-2"), "configSet");
 
-    // no action
-    CreateNoErrorChecking createNoAction = new CreateNoErrorChecking();
-    createNoAction.setAction(null);
-    verifyException(solrClient, createNoAction, "action");
+      // no action
+      CreateNoErrorChecking createNoAction = new CreateNoErrorChecking();
+      createNoAction.setAction(null);
+      verifyException(solrClient, createNoAction, "action");
 
-    // no ConfigSet name
-    CreateNoErrorChecking create = new CreateNoErrorChecking();
-    verifyException(solrClient, create, NAME);
+      // no ConfigSet name
+      CreateNoErrorChecking create = new CreateNoErrorChecking();
+      verifyException(solrClient, create, NAME);
 
-    // set ConfigSet
-    create.setConfigSetName("configSetName");
+      // set ConfigSet
+      create.setConfigSetName("configSetName");
 
-    // ConfigSet already exists
-    Create alreadyExists = new Create();
-    alreadyExists.setConfigSetName("configSet").setBaseConfigSetName("baseConfigSet");
-    verifyException(solrClient, alreadyExists, "ConfigSet already exists");
+      // ConfigSet already exists
+      Create alreadyExists = new Create();
+      alreadyExists.setConfigSetName("configSet").setBaseConfigSetName("baseConfigSet");
+      verifyException(solrClient, alreadyExists, "ConfigSet already exists");
 
-    // Base ConfigSet does not exist
-    Create baseConfigNoExists = new Create();
-    baseConfigNoExists.setConfigSetName("newConfigSet").setBaseConfigSetName("baseConfigSet");
-    verifyException(solrClient, baseConfigNoExists, "Base ConfigSet does not exist");
-
-    solrClient.close();
+      // Base ConfigSet does not exist
+      Create baseConfigNoExists = new Create();
+      baseConfigNoExists.setConfigSetName("newConfigSet").setBaseConfigSetName("baseConfigSet");
+      verifyException(solrClient, baseConfigNoExists, "Base ConfigSet does not exist");
+    }
   }
 
   @Test
   public void testCreate() throws Exception {
     // no old, no new
-    verifyCreate(null, "configSet1", null, null);
+    verifyCreate(null, "configSet1", null, null, "solr");
 
     // no old, new
     verifyCreate("baseConfigSet2", "configSet2",
-        null, ImmutableMap.<String, String>of("immutable", "true", "key1", "value1"));
+        null, ImmutableMap.<String, String>of("immutable", "true", "key1", "value1"), "solr");
 
     // old, no new
     verifyCreate("baseConfigSet3", "configSet3",
-        ImmutableMap.<String, String>of("immutable", "false", "key2", "value2"), null);
+        ImmutableMap.<String, String>of("immutable", "false", "key2", "value2"), null, "solr");
 
     // old, new
     verifyCreate("baseConfigSet4", "configSet4",
         ImmutableMap.<String, String>of("immutable", "true", "onlyOld", "onlyOldValue"),
-        ImmutableMap.<String, String>of("immutable", "false", "onlyNew", "onlyNewValue"));
+        ImmutableMap.<String, String>of("immutable", "false", "onlyNew", "onlyNewValue"), "solr");
+  }
+
+  @Test
+  public void testCreateWithTrust() throws Exception {
+    String configsetName = "regular";
+    String configsetSuffix = "testCreateWithTrust";
+    String configsetSuffix2 = "testCreateWithTrust2";
+    uploadConfigSetWithAssertions(configsetName, configsetSuffix, "solr");
+    uploadConfigSetWithAssertions(configsetName, configsetSuffix2, null);
+    try (SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(),
+            AbstractZkTestCase.TIMEOUT, 45000, null)) {
+      assertTrue(isTrusted(zkClient, configsetName, configsetSuffix));
+      assertFalse(isTrusted(zkClient, configsetName, configsetSuffix2));
+      try {
+        ignoreException("unauthenticated request");
+        // trusted -> unstrusted
+        createConfigSet(configsetName + configsetSuffix, "foo", Collections.emptyMap(), cluster.getSolrClient(), null);
+        fail("Expecting exception");
+      } catch (SolrException e) {
+        assertEquals(SolrException.ErrorCode.UNAUTHORIZED.code, e.code());
+        unIgnoreException("unauthenticated request");
+      }
+      // trusted -> trusted
+      verifyCreate(configsetName + configsetSuffix, "foo2", Collections.emptyMap(), Collections.emptyMap(), "solr");
+      assertTrue(isTrusted(zkClient, "foo2", ""));
+
+      // unstrusted -> unstrusted
+      verifyCreate(configsetName + configsetSuffix2, "bar", Collections.emptyMap(), Collections.emptyMap(), null);
+      assertFalse(isTrusted(zkClient, "bar", ""));
+
+      // unstrusted -> trusted
+      verifyCreate(configsetName + configsetSuffix2, "bar2", Collections.emptyMap(), Collections.emptyMap(), "solr");
+      assertFalse(isTrusted(zkClient, "bar2", ""));
+    }
   }
 
   private void setupBaseConfigSet(String baseConfigSetName, Map<String, String> oldProps) throws Exception {
@@ -196,33 +231,40 @@
   }
 
   private void verifyCreate(String baseConfigSetName, String configSetName,
-      Map<String, String> oldProps, Map<String, String> newProps) throws Exception {
+      Map<String, String> oldProps, Map<String, String> newProps, String username) throws Exception {
     final String baseUrl = cluster.getJettySolrRunners().get(0).getBaseUrl().toString();
-    final SolrClient solrClient = getHttpSolrClient(baseUrl);
-    setupBaseConfigSet(baseConfigSetName, oldProps);
+    try (final SolrClient solrClient = getHttpSolrClient(baseUrl)) {
+      setupBaseConfigSet(baseConfigSetName, oldProps);
 
-    SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(),
-        AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null);
-    try {
-      ZkConfigManager configManager = new ZkConfigManager(zkClient);
-      assertFalse(configManager.configExists(configSetName));
+      SolrZkClient zkClient = new SolrZkClient(cluster.getZkServer().getZkAddress(),
+              AbstractZkTestCase.TIMEOUT, AbstractZkTestCase.TIMEOUT, null);
+      try {
+        ZkConfigManager configManager = new ZkConfigManager(zkClient);
+        assertFalse(configManager.configExists(configSetName));
 
-      Create create = new Create();
-      create.setBaseConfigSetName(baseConfigSetName).setConfigSetName(configSetName);
-      if (newProps != null) {
-        Properties p = new Properties();
-        p.putAll(newProps);
-        create.setNewConfigSetProperties(p);
+        ConfigSetAdminResponse response = createConfigSet(baseConfigSetName, configSetName, newProps, solrClient, username);
+        assertNotNull(response.getResponse());
+        assertTrue(configManager.configExists(configSetName));
+
+        verifyProperties(configSetName, oldProps, newProps, zkClient);
+      } finally {
+        zkClient.close();
       }
-      ConfigSetAdminResponse response = create.process(solrClient);
-      assertNotNull(response.getResponse());
-      assertTrue(configManager.configExists(configSetName));
-
-      verifyProperties(configSetName, oldProps, newProps, zkClient);
-    } finally {
-      zkClient.close();
     }
-    solrClient.close();
+  }
+
+  private ConfigSetAdminResponse createConfigSet(String baseConfigSetName, String configSetName, Map<String, String> newProps, SolrClient solrClient, String username) throws SolrServerException, IOException {
+    Create create = new Create();
+    create.setBaseConfigSetName(baseConfigSetName).setConfigSetName(configSetName);
+    if (newProps != null) {
+      Properties p = new Properties();
+      p.putAll(newProps);
+      create.setNewConfigSetProperties(p);
+    }
+    if (username != null) {
+      create.addHeader("user", username);
+    }
+    return create.process(solrClient);
   }
 
   @SuppressWarnings({"rawtypes"})
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java
index 20a88e0..bb75e7b 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIExclusivity.java
@@ -17,6 +17,7 @@
 package org.apache.solr.cloud;
 
 import java.lang.invoke.MethodHandles;
+import java.nio.charset.StandardCharsets;
 import java.util.Arrays;
 import java.util.LinkedList;
 import java.util.List;
@@ -92,6 +93,8 @@
 
   private void setupBaseConfigSet(String baseConfigSetName) throws Exception {
     solrCluster.uploadConfigSet(configset("configset-2"), baseConfigSetName);
+    //Make configset untrusted
+    solrCluster.getZkClient().setData("/configs/" + baseConfigSetName, "{\"trusted\": false}".getBytes(StandardCharsets.UTF_8), true);
   }
 
   private Exception getFirstExceptionOrNull(List<Exception> list) {
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java
index 692c82a..e23f71a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestConfigSetsAPIZkFailure.java
@@ -137,6 +137,7 @@
           getConfigSetProps(oldProps), StandardCharsets.UTF_8);
     }
     solrCluster.uploadConfigSet(tmpConfigDir.toPath(), baseConfigSetName);
+    solrCluster.getZkClient().setData("/configs/" + baseConfigSetName, "{\"trusted\": false}".getBytes(StandardCharsets.UTF_8), true);
   }
 
   private StringBuilder getConfigSetProps(Map<String, String> map) {
diff --git a/solr/core/src/test/org/apache/solr/core/MockInfoBean.java b/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
index ce73a02..1217283 100644
--- a/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
+++ b/solr/core/src/test/org/apache/solr/core/MockInfoBean.java
@@ -50,7 +50,7 @@
   @Override
   public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
     solrMetricsContext = parentContext.getChildContext(this);
-    MetricsMap metricsMap = new MetricsMap((detailed, map) -> {
+    MetricsMap metricsMap = new MetricsMap(map -> {
       map.put("Integer", 123);
       map.put("Double",567.534);
       map.put("Long", 32352463l);
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
index d37a544..c673809 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
@@ -51,7 +51,6 @@
 import org.junit.After;
 import org.junit.Before;
 import org.noggit.JSONParser;
-import org.restlet.ext.servlet.ServerServlet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -122,9 +121,6 @@
     FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
 
     final SortedMap<ServletHolder, String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
 
     System.setProperty("managed.schema.mutable", "true");
     System.setProperty("enable.update.log", "false");
diff --git a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
index c538551..3c640da 100644
--- a/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
+++ b/solr/core/src/test/org/apache/solr/handler/TestReplicationHandler.java
@@ -752,7 +752,7 @@
           break;
         } catch (NumberFormatException | AssertionError notYet) {
           if (log.isInfoEnabled()) {
-            log.info("{}th attempt failure on {} details are {}", retries + 1, notYet, followerDetails); // logOk
+            log.info("{}th attempt failure on {} details are {}", retries + 1, notYet, followerDetails); // nowarn
           }
           if (retries>9) {
             log.error("giving up: ", notYet);
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index 4ab33df..8f2bdba 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -18,10 +18,12 @@
 package org.apache.solr.handler.admin;
 
 import java.util.Arrays;
+import java.util.HashMap;
 import java.util.Map;
 
 import com.codahale.metrics.Counter;
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
@@ -84,15 +86,17 @@
     assertNotNull(nl);
     Object o = nl.get("SEARCHER.new.errors");
     assertNotNull(o); // counter type
-    assertTrue(o instanceof Map);
+    assertTrue(o instanceof MapWriter);
     // response wasn't serialized so we get here whatever MetricUtils produced instead of NamedList
-    assertNotNull(((Map) o).get("count"));
-    assertEquals(0L, ((Map) nl.get("SEARCHER.new.errors")).get("count"));
+    assertNotNull(((MapWriter) o)._get("count", null));
+    assertEquals(0L, ((MapWriter) nl.get("SEARCHER.new.errors"))._get("count", null));
     nl = (NamedList) values.get("solr.node");
     assertNotNull(nl.get("CONTAINER.cores.loaded")); // int gauge
-    assertEquals(1, ((Map) nl.get("CONTAINER.cores.loaded")).get("value"));
+    assertEquals(1, ((MapWriter) nl.get("CONTAINER.cores.loaded"))._get("value", null));
     assertNotNull(nl.get("ADMIN./admin/authorization.clientErrors")); // timer type
-    assertEquals(5, ((Map) nl.get("ADMIN./admin/authorization.clientErrors")).size());
+    Map<String, Object> map = new HashMap<>();
+    ((MapWriter) nl.get("ADMIN./admin/authorization.clientErrors")).toMap(map);
+    assertEquals(5, map.size());
 
     resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm,jetty"), resp);
@@ -184,9 +188,9 @@
     values = (NamedList) values.get("solr.core.collection1");
     assertEquals(1, values.size());
     @SuppressWarnings({"rawtypes"})
-    Map m = (Map) values.get("CACHE.core.fieldCache");
-    assertNotNull(m);
-    assertNotNull(m.get("entries_count"));
+    MapWriter writer = (MapWriter) values.get("CACHE.core.fieldCache");
+    assertNotNull(writer);
+    assertNotNull(writer._get("entries_count", null));
 
     resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", MetricsHandler.COMPACT_PARAM, "false", CommonParams.WT, "json", "group", "jvm", "prefix", "CONTAINER.cores"), resp);
@@ -200,8 +204,8 @@
     values = resp.getValues();
     assertNotNull(values.get("metrics"));
     @SuppressWarnings({"rawtypes"})
-    SimpleOrderedMap map = (SimpleOrderedMap) values.get("metrics");
-    assertEquals(0, map.size());
+    SimpleOrderedMap map1 = (SimpleOrderedMap) values.get("metrics");
+    assertEquals(0, map1.size());
     handler.close();
   }
 
@@ -243,9 +247,9 @@
     assertNotNull(nl);
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
-      assertTrue(v instanceof Map);
-      @SuppressWarnings({"rawtypes"})
-      Map map = (Map) v;
+      assertTrue(v instanceof MapWriter);
+      Map<String, Object> map = new HashMap<>();
+      ((MapWriter) v).toMap(map);
       assertTrue(map.size() > 2);
     });
 
@@ -259,10 +263,10 @@
     assertNotNull(nl);
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
-      assertTrue(v instanceof Map);
-      @SuppressWarnings({"rawtypes"})
-      Map map = (Map) v;
-      assertEquals(2, map.size());
+      assertTrue(v instanceof MapWriter);
+      Map<String, Object> map = new HashMap<>();
+      ((MapWriter) v).toMap(map);
+      assertEquals("k=" + k + ", v=" + map, 2, map.size());
       assertNotNull(map.get("inserts"));
       assertNotNull(map.get("size"));
     });
@@ -281,15 +285,14 @@
     NamedList values = resp.getValues();
     Object val = values.findRecursive("metrics", key1);
     assertNotNull(val);
-    assertTrue(val instanceof Map);
-    assertTrue(((Map) val).size() >= 2);
+    assertTrue(val instanceof MapWriter);
+    assertTrue(((MapWriter)val)._size() >= 2);
 
     String key2 = "solr.core.collection1:CACHE.core.fieldCache:entries_count";
     resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, key2), resp);
-    values = resp.getValues();
-    val = values.findRecursive("metrics", key2);
+    val = resp.getValues()._get("metrics/" + key2, null);
     assertNotNull(val);
     assertTrue(val instanceof Number);
 
@@ -297,8 +300,8 @@
     resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, key3), resp);
-    values = resp.getValues();
-    val = values.findRecursive("metrics", key3);
+
+    val = resp.getValues()._get( "metrics/" + key3, null);
     assertNotNull(val);
     assertTrue(val instanceof Number);
     assertEquals(3, ((Number) val).intValue());
@@ -307,20 +310,20 @@
     resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, key1, MetricsHandler.KEY_PARAM, key2, MetricsHandler.KEY_PARAM, key3), resp);
-    values = resp.getValues();
-    val = values.findRecursive("metrics", key1);
+
+    val = resp.getValues()._get( "metrics/" + key1, null);
     assertNotNull(val);
-    val = values.findRecursive("metrics", key2);
+    val = resp.getValues()._get( "metrics/" + key2, null);
     assertNotNull(val);
-    val = values.findRecursive("metrics", key3);
+    val = resp.getValues()._get( "metrics/" + key3, null);
     assertNotNull(val);
 
     String key4 = "solr.core.collection1:QUERY./select.requestTimes:1minRate";
     resp = new SolrQueryResponse();
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, key4), resp);
-    values = resp.getValues();
-    val = values.findRecursive("metrics", key4);
+    // the key contains a slash, need explicit list of path elements
+    val = resp.getValues()._get(Arrays.asList("metrics", key4), null);
     assertNotNull(val);
     assertTrue(val instanceof Number);
 
@@ -460,7 +463,7 @@
     @Override
     public void initializeMetrics(SolrMetricsContext parentContext, String scope) {
       super.initializeMetrics(parentContext, scope);
-      MetricsMap metrics = new MetricsMap((detailed, map) -> map.putAll(gaugevals));
+      MetricsMap metrics = new MetricsMap(map -> gaugevals.forEach((k, v) -> map.putNoEx(k, v)));
       solrMetricsContext.gauge(
            metrics,  true, "dumphandlergauge", getCategory().toString(), scope);
 
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java b/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java
index a77830f..f0ce7ef 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/TestConfigsApi.java
@@ -27,7 +27,6 @@
 import org.apache.solr.response.SolrQueryResponse;
 
 import static org.apache.solr.client.solrj.SolrRequest.METHOD.DELETE;
-import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
 import static org.apache.solr.cloud.Overseer.QUEUE_OPERATION;
 import static org.apache.solr.handler.admin.TestCollectionAPIs.compareOutput;
 
@@ -58,9 +57,6 @@
 //      for (Api api : handler.getApis()) apiBag.register(api, EMPTY_MAP);
       compareOutput(apiBag, "/cluster/configs/sample", DELETE, null, null,
           "{name :sample, operation:delete}");
-
-      compareOutput(apiBag, "/cluster/configs", POST, "{create:{name : newconf, baseConfigSet: sample }}", null,
-          "{operation:create, name :newconf,  baseConfigSet: sample, immutable: false }");
     }
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/request/TestStreamBody.java b/solr/core/src/test/org/apache/solr/request/TestStreamBody.java
index ab4648d..181a008 100644
--- a/solr/core/src/test/org/apache/solr/request/TestStreamBody.java
+++ b/solr/core/src/test/org/apache/solr/request/TestStreamBody.java
@@ -32,7 +32,6 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.restlet.ext.servlet.ServerServlet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -50,9 +49,6 @@
     FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
 
     final SortedMap<ServletHolder, String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
 
     System.setProperty("managed.schema.mutable", "true");
     System.setProperty("enable.update.log", "false");
diff --git a/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java b/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java
index 5ce6a9e..354d316 100644
--- a/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java
+++ b/solr/core/src/test/org/apache/solr/rest/SolrRestletTestBase.java
@@ -18,7 +18,6 @@
 import org.apache.solr.util.RestTestBase;
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.junit.BeforeClass;
-import org.restlet.ext.servlet.ServerServlet;
 
 import java.nio.file.Path;
 import java.util.Properties;
@@ -26,9 +25,8 @@
 import java.util.TreeMap;
 
 /**
- * Base class for Solr Restlet-based tests. Creates jetty and test harness
- * with solrconfig.xml and schema-rest.xml, including "extra" servlets for
- * all Solr Restlet Application subclasses.
+ * Base class for Solr Rest-oriented API tests. Creates jetty and test harness
+ * with solrconfig.xml and schema-rest.xml.
  *
  * Use RestTestBase instead if you need to specialize the solrconfig,
  * the schema, or jetty/test harness creation; otherwise you'll get
@@ -51,9 +49,6 @@
     System.setProperty("configSetBaseDir", TEST_HOME());
 
     final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
-    final ServletHolder solrSchemaRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrSchemaRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrSchemaRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
 
     Properties props = new Properties();
     props.setProperty("name", DEFAULT_TEST_CORENAME);
diff --git a/solr/core/src/test/org/apache/solr/rest/TestRestManager.java b/solr/core/src/test/org/apache/solr/rest/TestRestManager.java
index b4aff0b..bfe1f11 100644
--- a/solr/core/src/test/org/apache/solr/rest/TestRestManager.java
+++ b/solr/core/src/test/org/apache/solr/rest/TestRestManager.java
@@ -19,146 +19,19 @@
 import java.io.File;
 import java.nio.file.Paths;
 import java.util.Arrays;
-import java.util.Locale;
-import java.util.Set;
 
-import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.rest.ManagedResourceStorage.StorageIO;
 import org.apache.solr.rest.schema.analysis.ManagedWordSetResource;
-import org.junit.Ignore;
 import org.junit.Test;
-import org.restlet.Request;
-import org.restlet.data.Reference;
 
 /**
  * Tests {@link RestManager} functionality, including resource registration,
  * and REST API requests and responses.
  */
 public class TestRestManager extends SolrRestletTestBase {
-  
-  private class BogusManagedResource extends ManagedResource {
-
-    protected BogusManagedResource(String resourceId,
-        SolrResourceLoader loader, StorageIO storageIO) throws SolrException {
-      super(resourceId, loader, storageIO);
-    }
-
-    @Override
-    protected void onManagedDataLoadedFromStorage(NamedList<?> managedInitArgs, Object managedData)
-        throws SolrException {}
-
-    @Override
-    protected Object applyUpdatesToManagedData(Object updates) {
-      return null;
-    }
-
-    @Override
-    public void doDeleteChild(BaseSolrResource endpoint, String childId) {}
-
-    @Override
-    public void doGet(BaseSolrResource endpoint, String childId) {}
-    
-  }
-  
-  private static class MockAnalysisComponent implements ManagedResourceObserver {
-
-    @Override
-    public void onManagedResourceInitialized(NamedList<?> args, ManagedResource res)
-        throws SolrException {
-      assertTrue(res instanceof ManagedWordSetResource);      
-    }
-  }
-  
-  /**
-   * Test RestManager initialization and handling of registered ManagedResources. 
-   */
-  @Test
-  @Ignore
-  public void testManagedResourceRegistrationAndInitialization() throws Exception {
-    // first, we need to register some ManagedResources, which is done with the registry
-    // provided by the SolrResourceLoader
-    SolrResourceLoader loader = new SolrResourceLoader(Paths.get("./"));
-    
-    RestManager.Registry registry = loader.getManagedResourceRegistry();
-    assertNotNull("Expected a non-null RestManager.Registry from the SolrResourceLoader!", registry);
-    
-    String resourceId = "/config/test/foo";
-    registry.registerManagedResource(resourceId, 
-                                     ManagedWordSetResource.class, 
-                                     new MockAnalysisComponent());
-    
-    // verify the two different components can register the same ManagedResource in the registry
-    registry.registerManagedResource(resourceId, 
-                                     ManagedWordSetResource.class, 
-                                     new MockAnalysisComponent());
-    
-    // verify we can register another resource under a different resourceId
-    registry.registerManagedResource("/config/test/foo2", 
-                                     ManagedWordSetResource.class, 
-                                     new MockAnalysisComponent());
-
-    ignoreException("REST API path .* already registered to instances of ");
-
-    String failureMessage = "Should not be able to register a different"
-                          + " ManagedResource implementation for {}";
-
-    // verify that some other hooligan cannot register another type
-    // of ManagedResource implementation under the same resourceId
-    try {
-      registry.registerManagedResource(resourceId, 
-                                       BogusManagedResource.class, 
-                                       new MockAnalysisComponent());
-      fail(String.format(Locale.ROOT, failureMessage, resourceId));
-    } catch (SolrException solrExc) {
-      // expected output
-    }
-
-    resetExceptionIgnores();
-
-    ignoreException("is a reserved endpoint used by the Solr REST API!");
-
-    failureMessage = "Should not be able to register reserved endpoint {}";
-
-    // verify that already-spoken-for REST API endpoints can't be registered
-    Set<String> reservedEndpoints = registry.getReservedEndpoints();
-    assertTrue(reservedEndpoints.size() > 2);
-    assertTrue(reservedEndpoints.contains(RestManager.SCHEMA_BASE_PATH + RestManager.MANAGED_ENDPOINT));
-    for (String endpoint : reservedEndpoints) {
-
-      try {
-        registry.registerManagedResource
-            (endpoint, BogusManagedResource.class, new MockAnalysisComponent());
-        fail(String.format(Locale.ROOT, failureMessage, endpoint));
-      } catch (SolrException solrExc) {
-        // expected output
-      }
-
-      // also try to register already-spoken-for REST API endpoints with a child segment
-      endpoint += "/kid";
-      try {
-        registry.registerManagedResource
-            (endpoint, BogusManagedResource.class, new MockAnalysisComponent());
-        fail(String.format(Locale.ROOT, failureMessage, endpoint));
-      } catch (SolrException solrExc) {
-        // expected output
-      }
-    }
-
-    resetExceptionIgnores();
-    
-    NamedList<String> initArgs = new NamedList<>();
-    RestManager restManager = new RestManager();
-    restManager.init(loader, initArgs, new ManagedResourceStorage.InMemoryStorageIO());
-    
-    ManagedResource res = restManager.getManagedResource(resourceId);
-    assertTrue(res instanceof ManagedWordSetResource);    
-    assertEquals(res.getResourceId(), resourceId);
-    
-    restManager.getManagedResource("/config/test/foo2"); // exception if it isn't registered
-  }
 
   /**
    * Tests {@link RestManager}'s responses to REST API requests on /config/managed
@@ -255,27 +128,15 @@
 
   @Test
   public void testResolveResourceId () throws Exception {
-    Request testRequest = new Request();
-    Reference rootRef = new Reference("http://solr.apache.org/");
-    testRequest.setRootRef(rootRef);
-
-    Reference resourceRef = new Reference("http://solr.apache.org/schema/analysis/synonyms/de");
-    testRequest.setResourceRef(resourceRef);
-
-    String resourceId = RestManager.ManagedEndpoint.resolveResourceId(testRequest);
+    String path = "http://solr.apache.org/schema/analysis/synonyms/de";
+    String resourceId = RestManager.ManagedEndpoint.resolveResourceId(path);
     assertEquals(resourceId, "/schema/analysis/synonyms/de");
   }
 
   @Test
   public void testResolveResourceIdDecodeUrlEntities () throws Exception {
-    Request testRequest = new Request();
-    Reference rootRef = new Reference("http://solr.apache.org/");
-    testRequest.setRootRef(rootRef);
-
-    Reference resourceRef = new Reference("http://solr.apache.org/schema/analysis/synonyms/de/%C3%84ndern");
-    testRequest.setResourceRef(resourceRef);
-
-    String resourceId = RestManager.ManagedEndpoint.resolveResourceId(testRequest);
+    String path = "http://solr.apache.org/schema/analysis/synonyms/de/%C3%84ndern";
+    String resourceId = RestManager.ManagedEndpoint.resolveResourceId(path);
     assertEquals(resourceId, "/schema/analysis/synonyms/de/Ändern");
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java b/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java
index f50d669..4750b0f 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/TestSerializedLuceneMatchVersion.java
@@ -19,7 +19,6 @@
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import org.restlet.ext.servlet.ServerServlet;
 
 import java.util.SortedMap;
 import java.util.TreeMap;
@@ -30,9 +29,6 @@
   @BeforeClass
   public static void init() throws Exception {
     final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
 
     createJettyAndHarness(TEST_HOME(), "solrconfig-minimal.xml", "schema-rest-lucene-match-version.xml",
                           "/solr", true, extraServlets);
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java
index 4950ac4..d411325 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedStopFilterFactory.java
@@ -28,7 +28,6 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.restlet.ext.servlet.ServerServlet;
 
 /**
  * Test the REST API for managing stop words, which is pretty basic:
@@ -49,9 +48,6 @@
     FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
 
     final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
 
     System.setProperty("managed.schema.mutable", "true");
     System.setProperty("enable.update.log", "false");
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
index 603249b..8740cec 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymFilterFactory.java
@@ -32,7 +32,6 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.restlet.ext.servlet.ServerServlet;
 
 import static org.apache.solr.common.util.Utils.toJSONString;
 
@@ -49,9 +48,6 @@
     FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
 
     final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");
 
     System.setProperty("managed.schema.mutable", "true");
     System.setProperty("enable.update.log", "false");
diff --git a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java
index 66e9efe..778b19a 100644
--- a/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java
+++ b/solr/core/src/test/org/apache/solr/rest/schema/analysis/TestManagedSynonymGraphFilterFactory.java
@@ -33,7 +33,6 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.restlet.ext.servlet.ServerServlet;
 
 import static org.apache.solr.common.util.Utils.toJSONString;
 
@@ -51,9 +50,6 @@
     FileUtils.copyDirectory(new File(TEST_HOME()), tmpSolrHome.getAbsoluteFile());
 
     final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");
 
     System.setProperty("managed.schema.mutable", "true");
     System.setProperty("enable.update.log", "false");
diff --git a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java
index 91a6be8..09b2a6d 100644
--- a/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java
+++ b/solr/core/src/test/org/apache/solr/schema/TestCloudSchemaless.java
@@ -34,7 +34,6 @@
 import org.eclipse.jetty.servlet.ServletHolder;
 import org.junit.After;
 import org.junit.Test;
-import org.restlet.ext.servlet.ServerServlet;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -66,9 +65,6 @@
   @Override
   public SortedMap<ServletHolder,String> getExtraServlets() {
     final SortedMap<ServletHolder,String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
     return extraServlets;
   }
 
diff --git a/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java
index 9149c19..e384d84 100644
--- a/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestRandomCollapseQParserPlugin.java
@@ -16,9 +16,10 @@
  */
 package org.apache.solr.search;
 
-import java.util.List;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
 
 import org.apache.lucene.util.TestUtil;
 import org.apache.solr.CursorPagingTest;
@@ -27,14 +28,14 @@
 import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
 import org.apache.solr.client.solrj.response.QueryResponse;
 import org.apache.solr.common.SolrDocument;
+import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.params.SolrParams;
-import static org.apache.solr.search.CollapsingQParserPlugin.NULL_IGNORE;
-import static org.apache.solr.search.CollapsingQParserPlugin.NULL_COLLAPSE;
-import static org.apache.solr.search.CollapsingQParserPlugin.NULL_EXPAND;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
+import static org.hamcrest.core.StringContains.containsString;
+
 public class TestRandomCollapseQParserPlugin extends SolrTestCaseJ4 {
 
   /** Full SolrServer instance for arbitrary introspection of response data and adding fqs */
@@ -43,7 +44,9 @@
   public static List<String> ALL_COLLAPSE_FIELD_NAMES;
 
   private static String[] NULL_POLICIES
-    = new String[] {NULL_IGNORE, NULL_COLLAPSE, NULL_EXPAND};
+    = new String[] {CollapsingQParserPlugin.NullPolicy.IGNORE.getName(),
+                    CollapsingQParserPlugin.NullPolicy.COLLAPSE.getName(),
+                    CollapsingQParserPlugin.NullPolicy.EXPAND.getName()};
   
   @BeforeClass
   public static void buildIndexAndClient() throws Exception {
@@ -143,7 +146,7 @@
           "" : " size=" + TestUtil.nextInt(random(),1,10000);
 
         final String nullPolicy = randomNullPolicy();
-        final String nullPs = NULL_IGNORE.equals(nullPolicy)
+        final String nullPs =  nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.IGNORE.getName())
           // ignore is default, randomly be explicit about it
           ? (random().nextBoolean() ? "" : " nullPolicy=ignore")
           : (" nullPolicy=" + nullPolicy);
@@ -162,14 +165,14 @@
             final Object collapseVal = doc.getFieldValue(collapseField);
             
             if (null == collapseVal) {
-              if (NULL_EXPAND.equals(nullPolicy)) {
+              if (nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.EXPAND.getName())) {
                 // nothing to check for this doc, it's in its own group
                 continue;
               }
               
               assertFalse(groupHeadId + " has null collapseVal but nullPolicy==ignore; " + 
                           "mainP: " + mainP + ", collapseP: " + collapseP,
-                          NULL_IGNORE.equals(nullPolicy));
+                          nullPolicy.equals(CollapsingQParserPlugin.NullPolicy.IGNORE.getName()));
             }
             
             // workaround for SOLR-8082...
@@ -204,6 +207,40 @@
       }
     }
   }
+  
+  public void testParsedFilterQueryResponse() throws Exception {
+    String nullPolicy = randomNullPolicy();
+    String groupHeadSort = "'_version_ asc'";
+    String collapseSize = "5000";
+    String collapseHint = "top_fc";
+    String filterQuery = "{!collapse field=id sort=" + groupHeadSort + " nullPolicy=" + nullPolicy + " size=" +
+                                collapseSize + " hint=" + collapseHint + "}";
+    SolrParams solrParams = params("q", "*:*", "rows", "0", "debug", "true", "fq", filterQuery);
+
+    QueryResponse response = SOLR.query(solrParams);
+    // Query name is occurring twice, this should be handled in QueryParsing.toString
+    String expectedParsedFilterString = "CollapsingPostFilter(CollapsingPostFilter(field=id, " +
+        "nullPolicy=" + nullPolicy + ", GroupHeadSelector(selectorText=" + groupHeadSort.substring(1,
+        groupHeadSort.length() - 1) + ", type=SORT" +
+        "), hint=" + collapseHint + ", size=" + collapseSize + "))";
+    List<String> expectedParsedFilterQuery = Collections.singletonList(expectedParsedFilterString);
+    assertEquals(expectedParsedFilterQuery, response.getDebugMap().get("parsed_filter_queries"));
+    assertEquals(Collections.singletonList(filterQuery), response.getDebugMap().get("filter_queries"));
+  }
+
+  public void testNullPolicy() {
+    String nullPolicy = "xyz";
+    String groupHeadSort = "'_version_ asc'";
+    String filterQuery = "{!collapse field=id sort=" + groupHeadSort + " nullPolicy=" + nullPolicy + "}";
+    SolrParams solrParams = params("q", "*:*", "fq", filterQuery);
+
+    SolrException e = expectThrows(SolrException.class, () -> SOLR.query(solrParams));
+    assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code());
+    assertThat(e.getMessage(), containsString("Invalid nullPolicy: " + nullPolicy));
+
+    // valid nullPolicy
+    assertQ(req("q", "*:*", "fq", "{!collapse field=id nullPolicy=" + randomNullPolicy() + "}"));
+  }
 
   private String randomNullPolicy() {
     return NULL_POLICIES[ TestUtil.nextInt(random(), 0, NULL_POLICIES.length-1) ];
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index 9b977ee..c23a4f0 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@ -265,7 +265,7 @@
         assertTrue(obj.containsKey("memory"));
       } catch (Exception e) {
         log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}"
-            , e, baos.toString(StandardCharsets.UTF_8.name())); // logOk
+            , e, baos.toString(StandardCharsets.UTF_8.name())); // nowarn
       }
 
       SolrParams params = new MapSolrParams(Collections.singletonMap("q", "*:*"));
diff --git a/solr/core/src/test/org/apache/solr/servlet/SecurityHeadersTest.java b/solr/core/src/test/org/apache/solr/servlet/SecurityHeadersTest.java
new file mode 100644
index 0000000..115f8f6
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/servlet/SecurityHeadersTest.java
@@ -0,0 +1,93 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.servlet;
+
+import java.net.URI;
+import java.util.Arrays;
+import java.util.Map;
+
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.params.SolrParams;
+
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpGet;
+
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+/**
+ * Confirm that the expected security headers are returned when making requests to solr,
+ * regardless of wether the request is interanlly forwared to another node.
+ */
+@org.apache.lucene.util.LuceneTestCase.AwaitsFix(bugUrl="https://issues.apache.org/jira/browse/SOLR-14903")
+public class SecurityHeadersTest extends SolrCloudTestCase {
+
+  private static final String COLLECTION = "xxx" ;
+
+  private static final int NODE_COUNT = 2;
+
+  /* A quick and dirty mapping of the headers/values we expect to find */
+  private static final SolrParams EXPECTED_HEADERS
+    = params("Content-Security-Policy", "default-src 'none'; base-uri 'none'; connect-src 'self'; form-action 'self'; font-src 'self'; frame-ancestors 'none'; img-src 'self'; media-src 'self'; style-src 'self' 'unsafe-inline'; script-src 'self'; worker-src 'self';",
+             "X-Content-Type-Options", "nosniff",
+             "X-Frame-Options", "SAMEORIGIN",
+             "X-XSS-Protection", "1; mode=block");
+  
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+
+    configureCluster(NODE_COUNT).configure();
+
+    // create a 1 shard x 1 node collection
+    CollectionAdminRequest.createCollection(COLLECTION, null, 1, 1)
+        .process(cluster.getSolrClient());
+
+  }
+
+  @Test
+  public void testHeaders() throws Exception {
+    // it shouldn't matter what node our lone replica/core wound up on, headers should be the same...
+    for (JettySolrRunner jetty : cluster.getJettySolrRunners()) {
+      try (SolrClient solrClient = jetty.newClient()) {
+        final HttpClient client = ((HttpSolrClient) solrClient).getHttpClient();
+
+        // path shouldn't matter -- even if bogus / 404
+        for (String path : Arrays.asList("/select", "/bogus")) {
+          final HttpResponse resp = client.execute
+            (new HttpGet(URI.create(jetty.getBaseUrl().toString() + "/" + COLLECTION + path)));
+
+          for (Map.Entry<String,String[]> entry : EXPECTED_HEADERS) {
+            // these exact arrays (of 1 element each) should be *ALL* of the header instances...
+            // no more, no less.
+            assertEquals(entry.getValue(),
+                         resp.getHeaders(entry.getKey()));
+            
+          }
+        }
+      }
+    }
+    
+  }
+
+  
+}
+
diff --git a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
index 2214f88..baf6320 100644
--- a/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
+++ b/solr/core/src/test/org/apache/solr/update/TestInPlaceUpdatesDistrib.java
@@ -943,7 +943,7 @@
 
     if (log.isInfoEnabled()) {
       log.info("Non leader 0: {}", ((HttpSolrClient) NONLEADERS.get(0)).getBaseURL());
-      log.info("Non leader 1: {}", ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); // logOk
+      log.info("Non leader 1: {}", ((HttpSolrClient) NONLEADERS.get(1)).getBaseURL()); // nowarn
     }
     
     SolrDocument doc0 = NONLEADERS.get(0).getById(String.valueOf(0), params("distrib", "false"));
@@ -1034,7 +1034,7 @@
       if (log.isInfoEnabled()) {
         log.info("Testing client (Fetch missing test): {}", ((HttpSolrClient) client).getBaseURL());
         log.info("Version at {} is: {}"
-            , ((HttpSolrClient) client).getBaseURL(), getReplicaValue(client, 1, "_version_")); // logOk
+            , ((HttpSolrClient) client).getBaseURL(), getReplicaValue(client, 1, "_version_")); // nowarn
       }
       assertReplicaValue(client, 1, "inplace_updatable_float", (newinplace_updatable_float + 2.0f),
           "inplace_updatable_float didn't match for replica at client: " + ((HttpSolrClient) client).getBaseURL());
@@ -1344,7 +1344,7 @@
       if (log.isInfoEnabled()) {
         log.info("Testing client (testDBQUsingUpdatedFieldFromDroppedUpdate): {}", ((HttpSolrClient) client).getBaseURL());
         log.info("Version at {} is: {}", ((HttpSolrClient) client).getBaseURL(),
-            getReplicaValue(client, 1, "_version_")); // logOk
+            getReplicaValue(client, 1, "_version_")); // nowarn
       }
       assertNull(client.getById("1", params("distrib", "false")));
     }
diff --git a/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java
index 618013e..9e3d6b8 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/CategoryRoutedAliasUpdateProcessorTest.java
@@ -76,7 +76,7 @@
     //log this to help debug potential causes of problems
     if (log.isInfoEnabled()) {
       log.info("SolrClient: {}", solrClient);
-      log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk
+      log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java
index 59aced6..ef516de 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/DimensionalRoutedAliasUpdateProcessorTest.java
@@ -74,7 +74,7 @@
     //log this to help debug potential causes of problems
     if (log.isInfoEnabled()) {
       log.info("SolrClient: {}", solrClient);
-      log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk
+      log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java
index c3214f4..5141601 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/TimeRoutedAliasUpdateProcessorTest.java
@@ -91,7 +91,7 @@
     //log this to help debug potential causes of problems
     if (log.isInfoEnabled()) {
       log.info("SolrClient: {}", solrClient);
-      log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // logOk
+      log.info("ClusterStateProvider {}", solrClient.getClusterStateProvider()); // nowarn
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
index 5721ecd..c15abed 100644
--- a/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
+++ b/solr/core/src/test/org/apache/solr/util/TestSolrCLIRunExample.java
@@ -359,7 +359,7 @@
         assertEquals("it should be ok "+tool+" "+Arrays.toString(toolArgs),0, status);
       } catch (Exception e) {
         log.error("RunExampleTool failed due to: {}; stdout from tool prior to failure: {}"
-            , e , baos.toString(StandardCharsets.UTF_8.name())); // logOk
+            , e , baos.toString(StandardCharsets.UTF_8.name())); // nowarn
         throw e;
       }
   
diff --git a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
index f9595e1..e95bea4 100644
--- a/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
+++ b/solr/core/src/test/org/apache/solr/util/stats/MetricUtilsTest.java
@@ -31,8 +31,11 @@
 import com.codahale.metrics.Snapshot;
 import com.codahale.metrics.Timer;
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.metrics.AggregateMetric;
+import org.apache.solr.metrics.MetricsMap;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.junit.Test;
 
 public class MetricUtilsTest extends SolrTestCaseJ4 {
@@ -48,8 +51,8 @@
     }
     // obtain timer metrics
     Map<String,Object> map = new HashMap<>();
-    MetricUtils.convertTimer("", timer, MetricUtils.PropertyFilter.ALL, false, false, ".", (k, v) -> {
-      map.putAll((Map<String,Object>)v);
+    MetricUtils.convertTimer("", timer, MetricUtils.ALL_PROPERTIES, false, false, ".", (k, v) -> {
+      ((MapWriter) v).toMap(map);
     });
     @SuppressWarnings({"rawtypes"})
     NamedList lst = new NamedList(map);
@@ -103,10 +106,26 @@
     registry.register("gauge", gauge);
     Gauge<Long> error = () -> {throw new InternalError("Memory Pool not found error");};
     registry.register("memory.expected.error", error);
+
+    MetricsMap metricsMapWithMap = new MetricsMap((detailed, map) -> {
+      map.put("foo", "bar");
+    });
+    registry.register("mapWithMap", metricsMapWithMap);
+    MetricsMap metricsMap = new MetricsMap(map -> {
+      map.putNoEx("foo", "bar");
+    });
+    registry.register("map", metricsMap);
+
+    SolrMetricManager.GaugeWrapper<Map<String,Object>> gaugeWrapper = new SolrMetricManager.GaugeWrapper<>(metricsMap, "foo-tag");
+    registry.register("wrappedGauge", gaugeWrapper);
+
     MetricUtils.toMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL,
-        MetricUtils.PropertyFilter.ALL, false, false, false, false, (k, o) -> {
+        MetricUtils.ALL_PROPERTIES, false, false, false, false, (k, o) -> {
       @SuppressWarnings({"rawtypes"})
-      Map v = (Map)o;
+      Map<String, Object> v = new HashMap<>();
+      if (o != null) {
+        ((MapWriter) o).toMap(v);
+      }
       if (k.startsWith("counter")) {
         assertEquals(1L, v.get("count"));
       } else if (k.startsWith("gauge")) {
@@ -139,12 +158,16 @@
         assertEquals(0D, v.get("max"));
         assertEquals(0D, v.get("mean"));
       } else if (k.startsWith("memory.expected.error")) {
-        assertNull(v);
+        assertTrue(v.isEmpty());
+      } else if (k.startsWith("map") || k.startsWith("wrapped")) {
+        assertNotNull(v.toString(), v.get("value"));
+        assertTrue(v.toString(), v.get("value") instanceof Map);
+        assertEquals(v.toString(), "bar", ((Map) v.get("value")).get("foo"));
       }
     });
     // test compact format
     MetricUtils.toMaps(registry, Collections.singletonList(MetricFilter.ALL), MetricFilter.ALL,
-        MetricUtils.PropertyFilter.ALL, false, false, true, false, (k, o) -> {
+        MetricUtils.ALL_PROPERTIES, false, false, true, false, (k, o) -> {
           if (k.startsWith("counter")) {
             assertTrue(o instanceof Long);
             assertEquals(1L, o);
@@ -152,25 +175,25 @@
             assertTrue(o instanceof String);
             assertEquals("foobar", o);
           } else if (k.startsWith("timer")) {
-            assertTrue(o instanceof Map);
-            @SuppressWarnings({"rawtypes"})
-            Map v = (Map)o;
+            assertTrue(o instanceof MapWriter);
+            Map<String, Object> v = new HashMap<>();
+            ((MapWriter) o).toMap(v);
             assertEquals(1L, v.get("count"));
             assertTrue(((Number)v.get("min_ms")).intValue() > 100);
           } else if (k.startsWith("meter")) {
-            assertTrue(o instanceof Map);
-            @SuppressWarnings({"rawtypes"})
-            Map v = (Map)o;
+            assertTrue(o instanceof MapWriter);
+            Map<String, Object> v = new HashMap<>();
+            ((MapWriter) o).toMap(v);
             assertEquals(1L, v.get("count"));
           } else if (k.startsWith("histogram")) {
-            assertTrue(o instanceof Map);
-            @SuppressWarnings({"rawtypes"})
-            Map v = (Map)o;
+            assertTrue(o instanceof MapWriter);
+            Map<String, Object> v = new HashMap<>();
+            ((MapWriter) o).toMap(v);
             assertEquals(1L, v.get("count"));
           } else if (k.startsWith("aggregate1")) {
-            assertTrue(o instanceof Map);
-            @SuppressWarnings({"rawtypes"})
-            Map v = (Map)o;
+            assertTrue(o instanceof MapWriter);
+            Map<String, Object> v = new HashMap<>();
+            ((MapWriter) o).toMap(v);
             assertEquals(4, v.get("count"));
             Map<String, Object> values = (Map<String, Object>)v.get("values");
             assertNotNull(values);
@@ -182,9 +205,9 @@
             assertEquals(-2, update.get("value"));
             assertEquals(2, update.get("updateCount"));
           } else if (k.startsWith("aggregate2")) {
-            assertTrue(o instanceof Map);
-            @SuppressWarnings({"rawtypes"})
-            Map v = (Map)o;
+            assertTrue(o instanceof MapWriter);
+            Map<String, Object> v = new HashMap<>();
+            ((MapWriter) o).toMap(v);
             assertEquals(2, v.get("count"));
             Map<String, Object> values = (Map<String, Object>)v.get("values");
             assertNotNull(values);
@@ -197,9 +220,15 @@
             assertEquals(1, update.get("updateCount"));
           } else if (k.startsWith("memory.expected.error")) {
             assertNull(o);
+          } else if (k.startsWith("map") || k.startsWith("wrapped")) {
+            assertTrue(o instanceof MapWriter);
+            MapWriter writer = (MapWriter) o;
+            assertEquals(1, writer._size());
+            assertEquals("bar", writer._get("foo", null));
           } else {
-            @SuppressWarnings({"rawtypes"})
-            Map v = (Map)o;
+            assertTrue(o instanceof MapWriter);
+            Map<String, Object> v = new HashMap<>();
+            ((MapWriter) o).toMap(v);
             assertEquals(1L, v.get("count"));
           }
         });
diff --git a/solr/example/films/README.md b/solr/example/films/README.md
index 8bd0ce7..d82322c 100644
--- a/solr/example/films/README.md
+++ b/solr/example/films/README.md
@@ -69,29 +69,6 @@
      
        http://localhost:8983/solr/films/query?q=*:*&facet=true&facet.field=genre
 
-     - Browse the indexed films in a traditional browser search interface:
-     
-       http://localhost:8983/solr/films/browse
-
-       Now browse including the genre field as a facet:
-       
-       http://localhost:8983/solr/films/browse?facet.field=genre
-
-       If you want to set a facet for /browse to keep around for every request add the facet.field into the "facets"
-       param set (which the /browse handler is already configured to use):
-
-        ```
-        curl http://localhost:8983/solr/films/config/params -H 'Content-type:application/json'  -d '{
-          "update" : {
-            "facets": {
-              "facet.field":"genre"
-            }
-          }
-        }'
-        ```
-       
-        And now http://localhost:8983/solr/films/browse will display the _genre_ facet automatically.
-
 Exploring the data further - 
 
   * Increase the MAX_ITERATIONS value, put in your freebase API_KEY and run the film_data_generator.py script using Python 3.
@@ -133,29 +110,6 @@
     }
 }'
 bin/post -c films example/films/films.json
-curl http://localhost:8983/solr/films/config/params -H 'Content-type:application/json'  -d '{
-"update" : {
-  "facets": {
-    "facet.field":"genre"
-    }
-  }
-}'
 
 # END_OF_SCRIPT
 ```
-
-Additional fun -
-
-```
-Add highlighting:
-curl http://localhost:8983/solr/films/config/params -H 'Content-type:application/json'  -d '{
-"set" : {
-  "browse": {
-    "hl":"on",
-    "hl.fl":"name"
-    }
-  }
-}'
-```
-
-try http://localhost:8983/solr/films/browse?q=batman now, and you'll see "batman" highlighted in the results
diff --git a/solr/licenses/commons-io-2.6.jar.sha1 b/solr/licenses/commons-io-2.6.jar.sha1
deleted file mode 100644
index 9fa55f5..0000000
--- a/solr/licenses/commons-io-2.6.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-815893df5f31da2ece4040fe0a12fd44b577afaf
diff --git a/solr/licenses/commons-io-2.8.0.jar.sha1 b/solr/licenses/commons-io-2.8.0.jar.sha1
new file mode 100644
index 0000000..3a5fc71
--- /dev/null
+++ b/solr/licenses/commons-io-2.8.0.jar.sha1
@@ -0,0 +1 @@
+92999e26e6534606b5678014e66948286298a35c
diff --git a/solr/licenses/org.restlet-2.4.3.jar.sha1 b/solr/licenses/org.restlet-2.4.3.jar.sha1
deleted file mode 100644
index 67a7f5c..0000000
--- a/solr/licenses/org.restlet-2.4.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-fb9441cfe1e17b04976bed9b0dfd8c4a39c41b78
diff --git a/solr/licenses/org.restlet-LICENSE-ASL.txt b/solr/licenses/org.restlet-LICENSE-ASL.txt
deleted file mode 100644
index 261eeb9..0000000
--- a/solr/licenses/org.restlet-LICENSE-ASL.txt
+++ /dev/null
@@ -1,201 +0,0 @@
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/solr/licenses/org.restlet-NOTICE.txt b/solr/licenses/org.restlet-NOTICE.txt
deleted file mode 100644
index c7839b5..0000000
--- a/solr/licenses/org.restlet-NOTICE.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-This product includes software developed by
-the Restlet project (http://www.restlet.org).
\ No newline at end of file
diff --git a/solr/licenses/org.restlet.ext.servlet-2.4.3.jar.sha1 b/solr/licenses/org.restlet.ext.servlet-2.4.3.jar.sha1
deleted file mode 100644
index b0aa84b..0000000
--- a/solr/licenses/org.restlet.ext.servlet-2.4.3.jar.sha1
+++ /dev/null
@@ -1 +0,0 @@
-5e805b9c6c07cd21958288805451236895316f56
diff --git a/solr/licenses/org.restlet.ext.servlet-LICENSE-ASL.txt b/solr/licenses/org.restlet.ext.servlet-LICENSE-ASL.txt
deleted file mode 100644
index 261eeb9..0000000
--- a/solr/licenses/org.restlet.ext.servlet-LICENSE-ASL.txt
+++ /dev/null
@@ -1,201 +0,0 @@
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/solr/licenses/org.restlet.ext.servlet-NOTICE.txt b/solr/licenses/org.restlet.ext.servlet-NOTICE.txt
deleted file mode 100644
index 154ac0a..0000000
--- a/solr/licenses/org.restlet.ext.servlet-NOTICE.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-This product includes software developed by
-the SimpleXML project (http://simple.sourceforge.net).
\ No newline at end of file
diff --git a/solr/server/solr/configsets/_default/conf/solrconfig.xml b/solr/server/solr/configsets/_default/conf/solrconfig.xml
index 61a09b2..0b7a912 100644
--- a/solr/server/solr/configsets/_default/conf/solrconfig.xml
+++ b/solr/server/solr/configsets/_default/conf/solrconfig.xml
@@ -735,90 +735,26 @@
 
   <!-- Request Handlers
 
-       http://wiki.apache.org/solr/SolrRequestHandler
+       https://lucene.apache.org/solr/guide/requesthandlers-and-searchcomponents-in-solrconfig.html
 
-       Incoming queries will be dispatched to a specific handler by name
-       based on the path specified in the request.
+       Incoming queries will be dispatched to a specific handler by name based on the path specified in the request.
 
-       If a Request Handler is declared with startup="lazy", then it will
-       not be initialized until the first request that uses it.
+       All handlers (Search Handlers, Update Request Handlers, and other specialized types) can have default parameters (defaults, appends and invariants).
 
+       Search Handlers can also (append, prepend or even replace) default or defined Search Components.
+
+       Update Request Handlers can leverage Update Request Processors to pre-process documents after they are loaded
+       and before they are indexed/stored.
+
+       Not all Request Handlers are defined in the solrconfig.xml, many are implicit.
     -->
-  <!-- SearchHandler
 
-       http://wiki.apache.org/solr/SearchHandler
-
-       For processing Search Queries, the primary Request Handler
-       provided with Solr is "SearchHandler" It delegates to a sequent
-       of SearchComponents (see below) and supports distributed
-       queries across multiple shards
-    -->
+  <!-- Primary search handler, expected by most clients, examples and UI frameworks -->
   <requestHandler name="/select" class="solr.SearchHandler">
-    <!-- default values for query parameters can be specified, these
-         will be overridden by parameters in the request
-      -->
     <lst name="defaults">
       <str name="echoParams">explicit</str>
       <int name="rows">10</int>
-      <!-- Default search field
-         <str name="df">text</str> 
-        -->
-      <!-- Change from JSON to XML format (the default prior to Solr 7.0)
-         <str name="wt">xml</str> 
-        -->
     </lst>
-    <!-- In addition to defaults, "appends" params can be specified
-         to identify values which should be appended to the list of
-         multi-val params from the query (or the existing "defaults").
-      -->
-    <!-- In this example, the param "fq=instock:true" would be appended to
-         any query time fq params the user may specify, as a mechanism for
-         partitioning the index, independent of any user selected filtering
-         that may also be desired (perhaps as a result of faceted searching).
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "appends" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
-    <!--
-       <lst name="appends">
-         <str name="fq">inStock:true</str>
-       </lst>
-      -->
-    <!-- "invariants" are a way of letting the Solr maintainer lock down
-         the options available to Solr clients.  Any params values
-         specified here are used regardless of what values may be specified
-         in either the query, the "defaults", or the "appends" params.
-
-         In this example, the facet.field and facet.query params would
-         be fixed, limiting the facets clients can use.  Faceting is
-         not turned on by default - but if the client does specify
-         facet=true in the request, these are the only facets they
-         will be able to see counts for; regardless of what other
-         facet.field or facet.query params they may specify.
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "invariants" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
-    <!--
-       <lst name="invariants">
-         <str name="facet.field">cat</str>
-         <str name="facet.field">manu_exact</str>
-         <str name="facet.query">price:[* TO 500]</str>
-         <str name="facet.query">price:[500 TO *]</str>
-       </lst>
-      -->
-    <!-- If the default list of SearchComponents is not desired, that
-         list can either be overridden completely, or components can be
-         prepended or appended to the default list.  (see below)
-      -->
-    <!--
-       <arr name="components">
-         <str>nameOfCustomComponent1</str>
-         <str>nameOfCustomComponent2</str>
-       </arr>
-      -->
   </requestHandler>
 
   <!-- A request handler that returns indented JSON by default -->
@@ -830,62 +766,19 @@
     </lst>
   </requestHandler>
 
+  <!-- Shared parameters for multiple Request Handlers -->
   <initParams path="/update/**,/query,/select,/spell">
     <lst name="defaults">
       <str name="df">_text_</str>
     </lst>
   </initParams>
 
-  <!-- Search Components
-
-       Search components are registered to SolrCore and used by
-       instances of SearchHandler (which can access them by name)
-
-       By default, the following components are available:
-
-       <searchComponent name="query"     class="solr.QueryComponent" />
-       <searchComponent name="facet"     class="solr.FacetComponent" />
-       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
-       <searchComponent name="highlight" class="solr.HighlightComponent" />
-       <searchComponent name="stats"     class="solr.StatsComponent" />
-       <searchComponent name="terms"     class="solr.TermsComponent" />
-       <searchComponent name="debug"     class="solr.DebugComponent" />
-
-       Default configuration in a requestHandler would look like:
-
-       <arr name="components">
-         <str>query</str>
-         <str>facet</str>
-         <str>mlt</str>
-         <str>highlight</str>
-         <str>stats</str>
-         <str>debug</str>
-       </arr>
-
-       If you register a searchComponent to one of the standard names,
-       that will be used instead of the default.
-
-       To insert components before or after the 'standard' components, use:
-
-       <arr name="first-components">
-         <str>myFirstComponentName</str>
-       </arr>
-
-       <arr name="last-components">
-         <str>myLastComponentName</str>
-       </arr>
-
-       NOTE: The component registered with the name "debug" will
-       always be executed after the "last-components"
-
-     -->
-
   <!-- Spell Check
 
        The spell check component can return a list of alternative spelling
        suggestions.
 
-       http://wiki.apache.org/solr/SpellCheckComponent
+       https://lucene.apache.org/solr/guide/spell-checking.html
     -->
   <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
 
@@ -942,7 +835,7 @@
        IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
        NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
 
-       See http://wiki.apache.org/solr/SpellCheckComponent for details
+       See https://lucene.apache.org/solr/guide/spell-checking.html for details
        on the request parameters.
     -->
   <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
@@ -969,7 +862,7 @@
 
   <!-- Highlighting Component
 
-       http://wiki.apache.org/solr/HighlightingParameters
+       https://lucene.apache.org/solr/guide/highlighting.html
     -->
   <searchComponent class="solr.HighlightComponent" name="highlight">
     <highlighting>
@@ -1073,29 +966,28 @@
     </highlighting>
   </searchComponent>
 
-  <!-- Update Processors
+  <!-- Update Request Processors
+       https://lucene.apache.org/solr/guide/update-request-processors.html
 
-       Chains of Update Processor Factories for dealing with Update
-       Requests can be declared, and then used by name in Update
-       Request Processors
-
-       http://wiki.apache.org/solr/UpdateRequestProcessor
-
+       Chains or individual Update Request Processor Factories can be declared and referenced
+       to preprocess documents sent to Update Request Handlers.
     -->
 
   <!-- Add unknown fields to the schema
 
-       Field type guessing update processors that will
+       Field type guessing update request processors that will
        attempt to parse string-typed field values as Booleans, Longs,
        Doubles, or Dates, and then add schema fields with the guessed
-       field types. Text content will be indexed as "text_general" as
+       field types Text content will be indexed as "text_general" as
        well as a copy to a plain string version in *_str.
+       See the updateRequestProcessorChain defined later for the order they are executed in.
 
        These require that the schema is both managed and mutable, by
        declaring schemaFactory as ManagedIndexSchemaFactory, with
        mutable specified as true.
 
-       See http://wiki.apache.org/solr/GuessingFieldTypes
+       See https://lucene.apache.org/solr/guide/schemaless-mode.html for further explanation.
+
     -->
   <updateProcessor class="solr.UUIDUpdateProcessorFactory" name="uuid"/>
   <updateProcessor class="solr.RemoveBlankFieldUpdateProcessorFactory" name="remove-blank"/>
@@ -1157,7 +1049,7 @@
 
   <!-- Deduplication
 
-       An example dedup update processor that creates the "id" field
+       An example dedup update request processor chain that creates the "id" field
        on the fly based on the hash code of some other fields.  This
        example has overwriteDupes set to false since we are using the
        id field as the signatureField and Solr will maintain
diff --git a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
index e8fda27..4395070 100644
--- a/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
+++ b/solr/server/solr/configsets/sample_techproducts_configs/conf/solrconfig.xml
@@ -738,87 +738,34 @@
 
   <!-- Request Handlers
 
-       http://wiki.apache.org/solr/SolrRequestHandler
+       https://lucene.apache.org/solr/guide/requesthandlers-and-searchcomponents-in-solrconfig.html
 
-       Incoming queries will be dispatched to a specific handler by name
-       based on the path specified in the request.
+       Incoming queries will be dispatched to a specific handler by name based on the path specified in the request.
 
-       If a Request Handler is declared with startup="lazy", then it will
-       not be initialized until the first request that uses it.
+       All handlers (Search Handlers, Update Request Handlers, and other specialized types) can have default parameters (defaults, appends and invariants).
 
+       Search Handlers can also (append, prepend or even replace) default or defined Search Components.
+
+       Update Request Handlers can leverage Update Request Processors to pre-process documents after they are loaded
+       and before they are indexed/stored.
+
+       Not all Request Handlers are defined in the solrconfig.xml, many are implicit.
     -->
-  <!-- SearchHandler
 
-       http://wiki.apache.org/solr/SearchHandler
-
-       For processing Search Queries, the primary Request Handler
-       provided with Solr is "SearchHandler" It delegates to a sequent
-       of SearchComponents (see below) and supports distributed
-       queries across multiple shards
-    -->
+  <!-- Primary search handler, expected by most clients, examples and UI frameworks -->
   <requestHandler name="/select" class="solr.SearchHandler">
-    <!-- default values for query parameters can be specified, these
-         will be overridden by parameters in the request
-      -->
      <lst name="defaults">
        <str name="echoParams">explicit</str>
        <int name="rows">10</int>
-       <!-- Default search field
-          <str name="df">text</str>
-         -->
-       <!-- Change from JSON to XML format (the default prior to Solr 7.0)
-          <str name="wt">xml</str>
-         -->
-       <!-- Controls the distribution of a query to shards other than itself.
-            Consider making 'preferLocalShards' true when:
-              1) more than 1 replica may be located on a node
-              2) Number of shards > 1
-              3) CloudSolrClient or LbHttpSolrServer is used by clients.
-            Without this option, every core broadcasts the distributed query to
-            a replica of each shard where the replicas are chosen randomly.
-            This option directs the cores to prefer cores hosted locally, thus
-            preventing network delays between machines.
-            This behavior also immunizes a bad/slow machine from slowing down all
-            the good machines (if those good machines were querying this bad one).
-
-            Specify this option=false for clients connecting through HttpSolrServer
-       -->
        <bool name="preferLocalShards">false</bool>
      </lst>
-    <!-- In addition to defaults, "appends" params can be specified
-         to identify values which should be appended to the list of
-         multi-val params from the query (or the existing "defaults").
-      -->
-    <!-- In this example, the param "fq=instock:true" would be appended to
-         any query time fq params the user may specify, as a mechanism for
-         partitioning the index, independent of any user selected filtering
-         that may also be desired (perhaps as a result of faceted searching).
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "appends" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
+    <!-- "appends" example to ensure only products in stock are returned when using this Search Handler -->
     <!--
        <lst name="appends">
          <str name="fq">inStock:true</str>
        </lst>
       -->
-    <!-- "invariants" are a way of letting the Solr maintainer lock down
-         the options available to Solr clients.  Any params values
-         specified here are used regardless of what values may be specified
-         in either the query, the "defaults", or the "appends" params.
-
-         In this example, the facet.field and facet.query params would
-         be fixed, limiting the facets clients can use.  Faceting is
-         not turned on by default - but if the client does specify
-         facet=true in the request, these are the only facets they
-         will be able to see counts for; regardless of what other
-         facet.field or facet.query params they may specify.
-
-         NOTE: there is *absolutely* nothing a client can do to prevent these
-         "invariants" values from being used, so don't use this mechanism
-         unless you are sure you always want it.
-      -->
+    <!-- "invariants" example that will completely lock down the facets fields and queries if the facets are turned on -->
     <!--
        <lst name="invariants">
          <str name="facet.field">cat</str>
@@ -827,16 +774,6 @@
          <str name="facet.query">price:[500 TO *]</str>
        </lst>
       -->
-    <!-- If the default list of SearchComponents is not desired, that
-         list can either be overridden completely, or components can be
-         prepended or appended to the default list.  (see below)
-      -->
-    <!--
-       <arr name="components">
-         <str>nameOfCustomComponent1</str>
-         <str>nameOfCustomComponent2</str>
-       </arr>
-      -->
     </requestHandler>
 
   <!-- A request handler that returns indented JSON by default -->
@@ -855,7 +792,7 @@
     </lst>
   </initParams>
 
-  <!-- The following are implicitly added
+  <!-- The following are some of the implicit Update Request Handlers
   <requestHandler name="/update/json" class="solr.UpdateRequestHandler">
         <lst name="invariants">
          <str name="stream.contentType">application/json</str>
@@ -870,7 +807,7 @@
 
   <!-- Solr Cell Update Request Handler
 
-       http://wiki.apache.org/solr/ExtractingRequestHandler
+       https://lucene.apache.org/solr/guide/uploading-data-with-solr-cell-using-apache-tika.html
 
     -->
   <requestHandler name="/update/extract"
@@ -887,56 +824,12 @@
     </lst>
   </requestHandler>
 
-  <!-- Search Components
-
-       Search components are registered to SolrCore and used by
-       instances of SearchHandler (which can access them by name)
-
-       By default, the following components are available:
-
-       <searchComponent name="query"     class="solr.QueryComponent" />
-       <searchComponent name="facet"     class="solr.FacetComponent" />
-       <searchComponent name="mlt"       class="solr.MoreLikeThisComponent" />
-       <searchComponent name="highlight" class="solr.HighlightComponent" />
-       <searchComponent name="stats"     class="solr.StatsComponent" />
-       <searchComponent name="terms"     class="solr.TermsComponent" />
-       <searchComponent name="debug"     class="solr.DebugComponent" />
-
-       Default configuration in a requestHandler would look like:
-
-       <arr name="components">
-         <str>query</str>
-         <str>facet</str>
-         <str>mlt</str>
-         <str>highlight</str>
-         <str>stats</str>
-         <str>debug</str>
-       </arr>
-
-       If you register a searchComponent to one of the standard names,
-       that will be used instead of the default.
-
-       To insert components before or after the 'standard' components, use:
-
-       <arr name="first-components">
-         <str>myFirstComponentName</str>
-       </arr>
-
-       <arr name="last-components">
-         <str>myLastComponentName</str>
-       </arr>
-
-       NOTE: The component registered with the name "debug" will
-       always be executed after the "last-components"
-
-     -->
-
    <!-- Spell Check
 
         The spell check component can return a list of alternative spelling
         suggestions.
 
-        http://wiki.apache.org/solr/SpellCheckComponent
+        https://lucene.apache.org/solr/guide/spell-checking.html
      -->
   <searchComponent name="spellcheck" class="solr.SpellCheckComponent">
 
@@ -1030,7 +923,7 @@
        IN OTHER WORDS, THERE IS REALLY GOOD CHANCE THE SETUP BELOW IS
        NOT WHAT YOU WANT FOR YOUR PRODUCTION SYSTEM!
 
-       See http://wiki.apache.org/solr/SpellCheckComponent for details
+       See https://lucene.apache.org/solr/guide/spell-checking.html for details
        on the request parameters.
     -->
   <requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
@@ -1061,9 +954,7 @@
        As with the rest of this solrconfig.xml file, the configuration of this component is purely
        an example that applies specifically to this configset and example documents.
 
-       More information about this component and other configuration options are described in the
-       "Suggester" section of the reference guide available at
-       http://archive.apache.org/dist/lucene/solr/ref-guide
+       See: https://lucene.apache.org/solr/guide/suggester.html
     -->
   <searchComponent name="suggest" class="solr.SuggestComponent">
     <lst name="suggester">
@@ -1091,7 +982,7 @@
 
   <!-- Term Vector Component
 
-       http://wiki.apache.org/solr/TermVectorComponent
+       https://lucene.apache.org/solr/guide/the-term-vector-component.html
     -->
   <searchComponent name="tvComponent" class="solr.TermVectorComponent"/>
 
@@ -1129,7 +1020,6 @@
       * org.carrot2.clustering.lingo.LingoClusteringAlgorithm
       * org.carrot2.clustering.stc.STCClusteringAlgorithm
       * org.carrot2.clustering.kmeans.BisectingKMeansClusteringAlgorithm
-    See http://project.carrot2.org/algorithms.html for more information.
 
     Commercial algorithm Lingo3G (needs to be installed separately):
       * com.carrotsearch.lingo3g.Lingo3GClusteringAlgorithm
@@ -1203,7 +1093,7 @@
 
   <!-- Query Elevation Component
 
-       http://wiki.apache.org/solr/QueryElevationComponent
+       https://lucene.apache.org/solr/guide/the-query-elevation-component.html
 
        a search component that enables you to configure the top
        results for a given query regardless of the normal lucene
@@ -1227,7 +1117,7 @@
 
   <!-- Highlighting Component
 
-       http://wiki.apache.org/solr/HighlightingParameters
+       https://lucene.apache.org/solr/guide/highlighting.html
     -->
   <searchComponent class="solr.HighlightComponent" name="highlight">
     <highlighting>
@@ -1331,18 +1221,15 @@
     </highlighting>
   </searchComponent>
 
-  <!-- Update Processors
+  <!-- Update Request Processors
+       https://lucene.apache.org/solr/guide/update-request-processors.html
 
-       Chains of Update Processor Factories for dealing with Update
-       Requests can be declared, and then used by name in Update
-       Request Processors
-
-       http://wiki.apache.org/solr/UpdateRequestProcessor
-
+       Chains or individual Update Request Processor Factories can be declared and referenced
+       to preprocess documents sent to Update Request Handlers.
     -->
   <!-- Deduplication
 
-       An example dedup update processor that creates the "id" field
+       An example dedup update request processor chain that creates the "id" field
        on the fly based on the hash code of some other fields.  This
        example has overwriteDupes set to false since we are using the
        id field as the signatureField and Solr will maintain
@@ -1365,13 +1252,14 @@
 
   <!-- Language identification
 
-       This example update chain identifies the language of the incoming
+       This example update request process chain identifies the language of the incoming
        documents using the langid contrib. The detected language is
        written to field language_s. No field name mapping is done.
        The fields used for detection are text, title, subject and description,
        making this example suitable for detecting languages form full-text
        rich documents injected via ExtractingRequestHandler.
-       See more about langId at http://wiki.apache.org/solr/LanguageDetection
+
+       See more about langId at https://lucene.apache.org/solr/guide/detecting-languages-during-indexing.html
     -->
     <!--
      <updateRequestProcessorChain name="langid">
diff --git a/solr/solr-ref-guide/build.gradle b/solr/solr-ref-guide/build.gradle
index 0a4bf84..e9f1908 100644
--- a/solr/solr-ref-guide/build.gradle
+++ b/solr/solr-ref-guide/build.gradle
@@ -15,28 +15,6 @@
  * limitations under the License.
  */
 
-// TODO 1: the separation of sources between tools and refGuide is awkward; it'd be
-// better to separate the refGuideTools as a plain Java module and then depend on
-// it as a project dependency. This would enable this module to *not* be a java module at all
-// and inherit from base, adding just refGuide-related tasks.
-// OR (better) one could rewrite those tools in Groovy (or Kotlin) and use them directly, without
-// an additional compilation phase.
-
-// TODO 2: property expansion via ant properties is awkward in gradle. We can do cleaner than going
-// through ant -- we could use gradle's expand when copying or at least use some more humane
-// property names.
-
-// TODO 3: currently buildscript dependencies are hardcoded (asciidoctor) because they can't be resolved
-// using Palantir's plugin. This is another reason to switch to gradle-based tools -- then
-// only the build script dependencies would be needed and asciidoctorj would be removed from version
-// properties entirely (it is only used locally in this build file).
-
-import java.time.*
-import java.time.format.*
-import java.nio.file.*
-import org.asciidoctor.*
-import groovy.json.StringEscapeUtils
-
 buildscript {
     repositories {
         mavenCentral()
@@ -50,7 +28,7 @@
 
 plugins {
     id 'java'
-    id 'com.github.jruby-gradle.base' version '2.0.0-alpha.7'
+    id 'com.github.jruby-gradle.base' version '2.0.0'
 }
 
 // This project does not contribute anything to main dependencies.
@@ -146,13 +124,11 @@
         htmlLuceneJavadocs = "https://lucene.apache.org/core/${solrGuideVersionPath}_0/"
     }
 
-    // ivy* props will be set in setupLazyProps
-    // (because they need to be computed after evaluation is complete).
     templateProps = [
       javadocLink: "https://docs.oracle.com/en/java/javase/11/docs/api/",
                       
       solrGuideDraftStatus: propertyOrDefault("solrGuideDraft", "true").toBoolean() ? "DRAFT" : "",
-      solrRootPath: StringEscapeUtils.escapeJava(project(':solr').projectDir.toString() + File.separator),
+      solrRootPath: project(':solr').projectDir.toString() + File.separator,
       
       solrDocsVersion: solrDocsVersion,
       solrGuideVersionPath: solrGuideVersionPath,
@@ -163,31 +139,38 @@
     ]   
 }
 
-task setupLazyProps {
+task prepareSources(type: Sync) {
+    dependsOn configurations.depVer
+    
+    // If replaceable properties change, we have to rerun the task.
+    inputs.properties templateProps
+
+    final def escapedProps = [:] // must be final and only contents may change in doFirst, otherwise it's not picked up by expand.
     doFirst {
+        // Copy over template properties and add dependency versions resolved during execution phase.
+        final def props = templateProps.clone()
+
         // These properties have to be resolved after the configuration phase is complete (palantir's constraint)
-        // so we can't use them as input for caches.
+        // so we can't use them as input for caches. But as this task depends on the configuration, it's used correctly
         [
-                ["ivyCommonsCodec", "commons-codec", "commons-codec"],
-                ["ivyDropwizardMetrics", "io.dropwizard.metrics", "metrics-core"],
-                ["ivyLog4j", "org.apache.logging.log4j", "log4j-core"],
-                ["ivyOpennlpTools", "org.apache.opennlp", "opennlp-tools"],
-                ["ivyTika", "org.apache.tika", "tika-core"],
-                ["ivyZookeeper", "org.apache.zookeeper", "zookeeper"],
+            ["ivyCommonsCodec", "commons-codec", "commons-codec"],
+            ["ivyDropwizardMetrics", "io.dropwizard.metrics", "metrics-core"],
+            ["ivyLog4j", "org.apache.logging.log4j", "log4j-core"],
+            ["ivyOpennlpTools", "org.apache.opennlp", "opennlp-tools"],
+            ["ivyTika", "org.apache.tika", "tika-core"],
+            ["ivyZookeeper", "org.apache.zookeeper", "zookeeper"],
         ].each { p, depGroup, depId ->
-            templateProps[p] = getVersion(depGroup, depId, configurations.depVer)
+            props[p] = getVersion(depGroup, depId, configurations.depVer)
         }
 
         // Emit info about properties for clarity.
-        logger.warn("Building ref guide with:\n" + templateProps.collect({ k, v -> "  ${k} -> ${v}" }).join('\n'))
+        logger.lifecycle('Building ref guide with:\n{}', props.collect({ k, v -> "  ${k} -> ${v}" }).join('\n'))
+
+        // Escape all the properties, so they can be inserted into YAML templates.
+        props.each{ k, v ->
+          escapedProps[k] = v.replace("'","''")
+        }
     }
-}
-
-task prepareSources(type: Sync) {
-    dependsOn setupLazyProps
-
-    // If replaceable properties change, we have to rerun the task.
-    inputs.properties templateProps
 
     from(file("src"), {
        exclude '**/*.template'
@@ -197,7 +180,7 @@
         include '**/*.template'
         rename '(.+)\\.template', '$1'
         filteringCharset = 'UTF-8'
-        expand(templateProps)
+        expand(escapedProps)
     })
 
     into buildContentDir
@@ -211,8 +194,8 @@
     workingDir = buildContentDir
 
     args([
-            "${buildContentDir}",
-            "${mainPage}"
+        "${buildContentDir}",
+        "${mainPage}"
     ])
 
     doFirst {
@@ -259,5 +242,5 @@
 // Hook up custom tasks with standard tasks.
 check.dependsOn buildSite
 
-// Do not hook site building to assemble, at least for now.
-// assemble.dependsOn buildSite
+// Hook site building to assemble.
+assemble.dependsOn buildSiteJekyll
diff --git a/solr/solr-ref-guide/src/_config.yml.template b/solr/solr-ref-guide/src/_config.yml.template
index 2791b83..4553577 100755
--- a/solr/solr-ref-guide/src/_config.yml.template
+++ b/solr/solr-ref-guide/src/_config.yml.template
@@ -7,7 +7,7 @@
 #
 
 # Gems that are included for building the site. jekyll-asciidoc allows Jekyll to use Asciidoctor for variables and settings
-gems: [jekyll-asciidoc]
+plugins: [jekyll-asciidoc]
 
 destination: ../html-site
 
@@ -68,21 +68,21 @@
 # NOTE: If you add any attributes here for use in adoc files, you almost certainly need to also add
 # them to the <asciidoctor:convert/> ant task for precommit validation as well.
 solr-attributes: &solr-attributes-ref
-  solr-root-path: "${solrRootPath}"
-  solr-guide-draft-status: "${solrGuideDraftStatus}"
-  solr-guide-version-path: "${solrGuideVersionPath}"
-  solr-docs-version: "${solrDocsVersion}"
-  java-javadocs: "${javadocLink}"
-  solr-javadocs: "${htmlSolrJavadocs}"
-  lucene-javadocs: "${htmlLuceneJavadocs}"
-  build-date: "${buildDate}"
-  build-year: "${buildYear}"
-  ivy-commons-codec-version: "${ivyCommonsCodec}"
-  ivy-dropwizard-version: "${ivyDropwizardMetrics}"
-  ivy-log4j-version: "${ivyLog4j}"
-  ivy-opennlp-version: "${ivyOpennlpTools}"
-  ivy-tika-version: "${ivyTika}"
-  ivy-zookeeper-version: "${ivyZookeeper}"
+  solr-root-path: '${solrRootPath}'
+  solr-guide-draft-status: '${solrGuideDraftStatus}'
+  solr-guide-version-path: '${solrGuideVersionPath}'
+  solr-docs-version: '${solrDocsVersion}'
+  java-javadocs: '${javadocLink}'
+  solr-javadocs: '${htmlSolrJavadocs}'
+  lucene-javadocs: '${htmlLuceneJavadocs}'
+  build-date: '${buildDate}'
+  build-year: '${buildYear}'
+  ivy-commons-codec-version: '${ivyCommonsCodec}'
+  ivy-dropwizard-version: '${ivyDropwizardMetrics}'
+  ivy-log4j-version: '${ivyLog4j}'
+  ivy-opennlp-version: '${ivyOpennlpTools}'
+  ivy-tika-version: '${ivyTika}'
+  ivy-zookeeper-version: '${ivyZookeeper}'
 
 asciidoctor:
   safe: 0
diff --git a/solr/solr-ref-guide/src/analytics.adoc b/solr/solr-ref-guide/src/analytics.adoc
index 7cd452e..573858b 100644
--- a/solr/solr-ref-guide/src/analytics.adoc
+++ b/solr/solr-ref-guide/src/analytics.adoc
@@ -57,7 +57,7 @@
 == Request Syntax
 
 An Analytics request is passed to Solr with the parameter `analytics` in a request sent to the
-<<requesthandlers-and-searchcomponents-in-solrconfig.adoc#searchhandlers,Search Handler>>.
+<<requesthandlers-and-searchcomponents-in-solrconfig.adoc#search-handlers,Search Handler>>.
 Since the analytics request is sent inside of a search handler request, it will compute results based on the result set determined by the search handler.
 
 For example, this curl command encodes and POSTs a simple analytics request to the the search handler:
diff --git a/solr/solr-ref-guide/src/collection-management.adoc b/solr/solr-ref-guide/src/collection-management.adoc
index bb71351..701a223 100644
--- a/solr/solr-ref-guide/src/collection-management.adoc
+++ b/solr/solr-ref-guide/src/collection-management.adoc
@@ -77,7 +77,10 @@
 This parameter is ignored if `createNodeSet` is not also specified.
 
 `collection.configName`::
-Defines the name of the configuration (which *must already be stored in ZooKeeper*) to use for this collection. If not provided, Solr will use the configuration of `_default` configset to create a new (and mutable) configset named `<collectionName>.AUTOCREATED` and will use it for the new collection. When such a collection (that uses a copy of the _default configset) is deleted, the autocreated configset is not deleted by default.
+Defines the name of the configuration (which *must already be stored in ZooKeeper*) to use for this collection.
++
+If not provided, Solr will use the configuration of `_default` configset to create a new (and mutable) configset named `<collectionName>.AUTOCREATED` and will use it for the new collection.
+When such a collection is deleted, its autocreated configset will be deleted by default when it is not in use by any other collection.
 
 `router.field`::
 If this parameter is specified, the router will look at the value of the field in an input document to compute the hash and identify a shard instead of looking at the `uniqueKey` field. If the field specified is null in the document, the document will be rejected.
diff --git a/solr/solr-ref-guide/src/common-query-parameters.adoc b/solr/solr-ref-guide/src/common-query-parameters.adoc
index c101a8b..a9d7a8d 100644
--- a/solr/solr-ref-guide/src/common-query-parameters.adoc
+++ b/solr/solr-ref-guide/src/common-query-parameters.adoc
@@ -18,7 +18,7 @@
 
 Several query parsers share supported query parameters.
 
-The following sections describe Solr's common query parameters, which are supported by the <<requesthandlers-and-searchcomponents-in-solrconfig#searchhandlers,Search RequestHandlers>>.
+The following sections describe Solr's common query parameters, which are supported by the <<requesthandlers-and-searchcomponents-in-solrconfig#search-handlers,Search RequestHandlers>>.
 
 == defType Parameter
 
@@ -307,11 +307,13 @@
 
 The `echoParams` parameter accepts the following values:
 
-* `explicit`: This is the default value. Only parameters included in the actual request, plus the `_` parameter (which is a 64-bit numeric timestamp) will be added to the `params` section of the response header.
+* `explicit`: Only parameters included in the actual request will be added to the `params` section of the response header.
 * `all`: Include all request parameters that contributed to the query. This will include everything defined in the request handler definition found in `solrconfig.xml` as well as parameters included with the request, plus the `_` parameter. If a parameter is included in the request handler definition AND the request, it will appear multiple times in the response header.
 * `none`: Entirely removes the `params` section of the response header. No information about the request parameters will be available in the response.
 
-Here is an example of a JSON response where the echoParams parameter was not included, so the default of `explicit` is active. The request URL that created this response included three parameters - `q`, `wt`, and `indent`:
+The default value is `none`, though many `solrconfig.xml` handlers set default to be `explicit`.
+Here is an example of a JSON response where the echoParams parameter was set in that SearchHandler's default,
+so it itself was not echoed, but only three parameters from the request itself - `q`, `wt`, and `indent`:
 
 [source,json]
 ----
diff --git a/solr/solr-ref-guide/src/configsets-api.adoc b/solr/solr-ref-guide/src/configsets-api.adoc
index 3110189..6c8a162 100644
--- a/solr/solr-ref-guide/src/configsets-api.adoc
+++ b/solr/solr-ref-guide/src/configsets-api.adoc
@@ -94,13 +94,13 @@
 
 The `upload` command takes the following parameters:
 
-name::
+`name`::
 The configset to be created when the upload is complete. This parameter is required.
 
-overwrite::
+`overwrite`::
 If set to `true`, Solr will overwrite an existing configset with the same name (if false, the request will fail). Default is `false`.
 
-cleanup::
+`cleanup`::
 When overwriting an existing configset (`overwrite=true`), this parameter tells Solr to delete the files in ZooKeeper that existed in the old configset but not in the one being uploaded. Default is `false`.
 
 The body of the request should be a zip file that contains the configset. The zip file must be created from within the `conf` directory (i.e., `solrconfig.xml` must be the top level entry in the zip file).
@@ -132,13 +132,13 @@
 
 The following parameters are supported when creating a configset.
 
-name::
+`name`::
 The configset to be created. This parameter is required.
 
-baseConfigSet::
+`baseConfigSet`::
 The name of the configset to copy as a base. This defaults to `_default`
 
-configSetProp._property_=_value_::
+`configSetProp._property_=_value_`::
 A configset property from the base configset to override in the copied configset.
 
 For example, to create a configset named "myConfigset" based on a previously defined "predefinedTemplate" configset, overriding the immutable property to false.
@@ -192,7 +192,7 @@
 
 The `delete` command removes a configset. It does not remove any collections that were created with the configset.
 
-name::
+`name`::
 The configset to be deleted. This parameter is required.
 
 To delete a configset named "myConfigSet":
diff --git a/solr/solr-ref-guide/src/faceting.adoc b/solr/solr-ref-guide/src/faceting.adoc
index dd0eafa..65b24cb 100644
--- a/solr/solr-ref-guide/src/faceting.adoc
+++ b/solr/solr-ref-guide/src/faceting.adoc
@@ -20,6 +20,8 @@
 
 Searchers are presented with the indexed terms, along with numerical counts of how many matching documents were found for each term. Faceting makes it easy for users to explore search results, narrowing in on exactly the results they are looking for.
 
+See also <<json-facet-api.adoc#json-facet-api, JSON Facet API>> for an alternative approach to this.
+
 == General Facet Parameters
 
 There are two general parameters for controlling faceting.
diff --git a/solr/solr-ref-guide/src/highlighting.adoc b/solr/solr-ref-guide/src/highlighting.adoc
index a1ddd5b..1992d83 100644
--- a/solr/solr-ref-guide/src/highlighting.adoc
+++ b/solr/solr-ref-guide/src/highlighting.adoc
@@ -141,7 +141,7 @@
 
 == Choosing a Highlighter
 
-Solr provides a `HighlightComponent` (a <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#search-components,`SearchComponent`>>) and it's in the default list of components for search handlers. It offers a somewhat unified API over multiple actual highlighting implementations (or simply "highlighters") that do the business of highlighting.
+Solr provides a `HighlightComponent` (a <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#defining-search-components,`SearchComponent`>>) and it's in the default list of components for search handlers. It offers a somewhat unified API over multiple actual highlighting implementations (or simply "highlighters") that do the business of highlighting.
 
 There are many parameters supported by more than one highlighter, and sometimes the implementation details and semantics will be a bit different, so don't expect identical results when switching highlighters. You should use the `hl.method` parameter to choose a highlighter but it's also possible to explicitly configure an implementation by class name in `solrconfig.xml`.
 
diff --git a/solr/solr-ref-guide/src/index-replication.adoc b/solr/solr-ref-guide/src/index-replication.adoc
index e635ca1..d993cac 100644
--- a/solr/solr-ref-guide/src/index-replication.adoc
+++ b/solr/solr-ref-guide/src/index-replication.adoc
@@ -82,7 +82,7 @@
 In addition to `ReplicationHandler` configuration options specific to the leader/follower roles, there are a few special configuration options that are generally supported (even when using SolrCloud).
 
 * `maxNumberOfBackups` an integer value dictating the maximum number of backups this node will keep on disk as it receives `backup` commands.
-* Similar to most other request handlers in Solr you may configure a set of <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#searchhandlers,defaults, invariants, and/or appends>> parameters corresponding with any request parameters supported by the `ReplicationHandler` when <<HTTP API Commands for the ReplicationHandler,processing commands>>.
+* Similar to most other request handlers in Solr you may configure a set of <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#search-handlers,defaults, invariants, and/or appends>> parameters corresponding with any request parameters supported by the `ReplicationHandler` when <<HTTP API Commands for the ReplicationHandler,processing commands>>.
 
 === Configuring the Replication RequestHandler on a Leader Server
 
diff --git a/solr/solr-ref-guide/src/initparams-in-solrconfig.adoc b/solr/solr-ref-guide/src/initparams-in-solrconfig.adoc
index 0429783..aa21f5f 100644
--- a/solr/solr-ref-guide/src/initparams-in-solrconfig.adoc
+++ b/solr/solr-ref-guide/src/initparams-in-solrconfig.adoc
@@ -31,7 +31,7 @@
 
 [source,xml]
 ----
-<initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell,/browse">
+<initParams path="/update/**,/query,/select,/tvrh,/elevate,/spell">
   <lst name="defaults">
     <str name="df">_text_</str>
   </lst>
diff --git a/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc b/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc
index f59f5e5..b989126 100644
--- a/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc
+++ b/solr/solr-ref-guide/src/major-changes-in-solr-9.adoc
@@ -125,6 +125,10 @@
 
 * SOLR-14783: Data Import Handler (DIH) has been removed from Solr. The community package is available at: https://github.com/rohitbemax/dataimporthandler (Alexandre Rafalovitch)
 
+* SOLR-14792: VelocityResponseWriter has been removed from Solr.  This encompasses all previous included `/browse` and
+  `wt=velocity` examples.  This feature has been migrated to an installable package at
+  https://github.com/erikhatcher/solritas
+
 * SOLR-14510: The `writeStartDocumentList` in `TextResponseWriter` now receives an extra boolean parameter representing the "exactness" of the numFound value (exact vs approximation).
   Any custom response writer extending `TextResponseWriter` will need to implement this abstract method now (instead previous with the same name but without the new boolean parameter).
 
diff --git a/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc b/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc
index 10676ae..c4c99a6 100644
--- a/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc
+++ b/solr/solr-ref-guide/src/requesthandlers-and-searchcomponents-in-solrconfig.adoc
@@ -1,4 +1,4 @@
-= RequestHandlers and SearchComponents in SolrConfig
+= Request Handlers and Search Components in SolrConfig
 // Licensed to the Apache Software Foundation (ASF) under one
 // or more contributor license agreements.  See the NOTICE file
 // distributed with this work for additional information
@@ -18,34 +18,84 @@
 
 After the `<query>` section of `solrconfig.xml`, request handlers and search components are configured.
 
-A _request handler_ processes requests coming to Solr. These might be query requests or index update requests. You will likely need several of these defined, depending on how you want Solr to handle the various requests you will make.
+A _request handler_ processes requests coming to Solr. These might be query requests, index update requests or specialized interactions such as <<ping.adoc#ping,ping>>.
 
-A _search component_ is a feature of search, such as highlighting or faceting. The search component is defined in `solrconfig.xml` separate from the request handlers, and then registered with a request handler as needed.
+Not all handlers are defined explicitly in `solrconfig.xml`, many essential ones are actually defined <<implicit-requesthandlers.adoc#implicit-requesthandlers,implicitly>>.
 
-These are often referred to as "requestHandler" and "searchComponent", which is how they are defined in `solrconfig.xml`.
+Additionally, handlers can be defined - or even overridden - in `configoverlay.json` by using <<config-api.adoc#config-api,Config API>>.
+Finally, independent parameter sets can be also defined by <<request-parameters-api.adoc#request-parameters-api,Request Parameters API>>.
+They will be stored in `params.json` file and referenced with <<#paramsets-and-useparams,useParams>>.
 
-== Request Handlers
+All of this multi-layered configuration, may be verified via  <<config-api.adoc#config-api,Config API>>.
 
-Every request handler is defined with a name and a class. The name of the request handler is referenced with the request to Solr, typically as a path. For example, if Solr is installed at `\http://localhost:8983/solr/` and you have a collection named "gettingstarted", you can make a request that looks like this:
+Defining your own config handlers is often a useful way to provide defaults and advanced configuration to support business cases and simplify client API.
+At the same time, using every single option explained in this guide, will most certainly cause some confusion about which parameter is actually used when.
+
+== Defining and Calling Request Handlers
+
+Every request handler is defined with a name and a class. The name of the request handler is referenced with the request to Solr, typically as a path.
+For example, if Solr is installed at `\http://localhost:8983/solr/`, and you have a collection named "gettingstarted", you can make a request that looks like this:
 
 [source,text]
 ----
 http://localhost:8983/solr/gettingstarted/select?q=solr
 ----
 
-This query will be processed by the request handler with the name `/select`. We've only used the "q" parameter here, which includes our query term, a simple keyword of "solr". If the request handler has more parameters defined, those will be used with any query we send to this request handler unless they are over-ridden by the client (or user) in the query itself.
+This query will be processed by the request handler with the name `/select`. We've only used the "q" parameter here, which includes our query term, a simple keyword of "solr".
+If the request handler has more default parameters defined, those will be used with any query we send to that request handler unless they are overridden by the client (or user) in the query itself.
 
-If you have another request handler defined, you would send your request with that name. For example, `/update` is a request handler that handles index updates (i.e., sending new documents to the index). By default, `/select` is a request handler that handles query requests.
+If you have another request handler defined, you could send your request with that name.
+For example, `/update` is an implicit request handler that handles index updates (i.e., sending new documents to the index).
+By default, `/select` is a request handler that handles query requests and one expected by most examples and tools.
 
-Request handlers can also process requests for nested paths of their names, for example, a request using `/myhandler/extrapath` may be processed by a request handler registered with the name `/myhandler`. If a request handler is explicitly defined by the name `/myhandler/extrapath`, that would take precedence over the nested path. This assumes you are using the request handler classes included with Solr; if you create your own request handler, you should make sure it includes the ability to handle nested paths if you want to use them with your custom request handler.
+Request handlers can also process requests for nested paths in their names,
+for example, a request using `/myhandler/extrapath` may be processed by a request handler registered with the name `/myhandler`.
+If a request handler is explicitly defined by the name `/myhandler/extrapath`, that would take precedence over the nested path.
+This assumes you are using the request handler classes included with Solr; if you create your own request handler,
+you should make sure it includes the ability to handle nested paths if you want to use them with your custom request handler.
 
-It is also possible to configure defaults for request handlers with a section called `initParams`. These defaults can be used when you want to have common properties that will be used by each separate handler. For example, if you intend to create several request handlers that will all request the same list of fields in the response, you can configure an `initParams` section with your list of fields. For more information about `initParams`, see the section <<initparams-in-solrconfig.adoc#initparams-in-solrconfig,InitParams in SolrConfig>>.
+If a request handler is not expected to be used very often, it can be marked with `startup="lazy"` to avoid loading until needed.
 
-=== SearchHandlers
+[source,xml]
+----
+<requestHandler name="/spell" class="solr.SearchHandler" startup="lazy">
+ ...
+</requestHandler>
+----
 
-The primary request handler defined with Solr by default is the "SearchHandler", which handles search queries. The request handler is defined, and then a list of defaults for the handler are defined with a `defaults` list.
+== Configuring Request Handlers
+There are 3 ways to configure request handlers inside their definitions and another 3 ways to configure them somewhere else.
 
-For example, in the default `solrconfig.xml`, the first request handler defined looks like this:
+=== Request Parameters (GET and POST)
+The easiest and most flexible way is to provide parameters with standard GET or POST requests.
+
+Here is an example of sending parameters `id`, `fl`, and `wt` to `/select` Search Handler.
+Notice the URL-encoded space (as +) for the values of `fl` parameter.
+
+[source,text]
+----
+http://localhost:8983/solr/techproducts/select?q=id:SP2514N&fl=id+name&wt=xml
+----
+
+And here is an example of parameters being sent through the POST form to `/query` Search Handler using <<json-request-api.adoc#json-request-api,JSON Request API>>.
+
+[source,bash]
+----
+curl http://localhost:8983/solr/techproducts/query -d '
+{
+  "query" : "memory",
+  "filter" : "inStock:true"
+}'
+----
+
+Either way, the parameters are extracted and combined with other options explained below.
+
+=== Defaults, Appends, and Invariants
+
+==== Defaults
+
+The most common way to configure request handlers is by providing `defaults` section.
+The parameters there are used unless they are overridden by any other method.
 
 [source,xml]
 ----
@@ -57,24 +107,44 @@
 </requestHandler>
 ----
 
-This example defines the `rows` parameter, which defines how many search results to return, to "10". The `echoParams` parameter defines that the parameters defined in the query should be returned when debug information is returned. Note also that the way the defaults are defined in the list varies if the parameter is a string, an integer, or another type.
+This example defined a useful troubleshooting parameter <<common-query-parameters.adoc#echoparams-parameter,echoParams>>, with value that returns only params defined in the request itself (no defaults), set it to `all` for more information.
+It also defines the `rows` parameter, with how many results to return (per page) (10 is a true default actually, so this is a redundant definition, if you are not going to modify it).
 
-All of the parameters described in the section  <<searching.adoc#searching,Searching>> can be defined as defaults for any of the SearchHandlers.
+Note also that the way the defaults are defined in the list varies if the parameter is a string, an integer, or another type.
 
-Besides `defaults`, there are other options for the SearchHandler, which are:
+Here is how some other primitive types are represented:
 
-* `appends`: This allows definition of parameters that are added to the user query. These might be <<common-query-parameters.adoc#fq-filter-query-parameter,filter queries>>, or other query rules that should be added to each query. There is no mechanism in Solr to allow a client to override these additions, so you should be absolutely sure you always want these parameters applied to queries.
-+
+[source,xml]
+----
+  <lst name="defaults">
+    <float name="hl.regex.slop">0.5</float>
+    <bool name="default">true</bool>
+  </lst>
+----
+
+Other specialized types may exist, they would be explained in the sections for relevant components.
+
+==== Appends
+
+In the `appends` section, you can define parameters that are added those already defined elsewhere.
+These are useful when the same parameter may be meaningfully defined multiple times, such as for <<common-query-parameters.adoc#fq-filter-query-parameter,filter queries>>.
+There is no mechanism in Solr to allow a client to override these additions, so you should be absolutely sure you always want these parameters applied to queries.
+
 [source,xml]
 ----
 <lst name="appends">
   <str name="fq">inStock:true</str>
 </lst>
 ----
-+
-In this example, the filter query "inStock:true" will always be added to every query.
-* `invariants`: This allows definition of parameters that cannot be overridden by a client. The values defined in an `invariants` section will always be used regardless of the values specified by the user, by the client, in `defaults` or in `appends`.
-+
+
+In this example, the filter query `inStock:true` will always be added to every query, enforcing that only available "products" are returned.
+
+==== Invariants
+
+In the `invariants` section, you can define parameters that cannot be overridden by a client.
+The values defined in the `invariants` section will always be used regardless of the values specified by the user, by the client, in `defaults` or in `appends`.
+
+
 [source,xml]
 ----
 <lst name="invariants">
@@ -84,42 +154,77 @@
   <str name="facet.query">price:[500 TO *]</str>
 </lst>
 ----
-+
-In this example, facet fields have been defined which limits the facets that will be returned by Solr. If the client requests facets, the facets defined with a configuration like this are the only facets they will see.
 
-The final section of a request handler definition is `components`, which defines a list of search components that can be used with a request handler. They are only registered with the request handler. How to define a search component is discussed further on in the section on <<Search Components>> below. The `components` element can only be used with a request handler that is a SearchHandler.
+In this example, the `facet.field` and `facet.query` params would be fixed, limiting the facets clients can use.
+Faceting is not turned on by default - but if the client does specify  `facet=true` in the request,
+these are the only facets they will be able to see counts for; regardless of what other `facet.field` or `facet.query` params they may specify.
 
-The `solrconfig.xml` file includes many other examples of SearchHandlers that can be used or modified as needed.
+=== InitParams
+It is also possible to configure defaults for request handlers with a section called `initParams`.
+These defaults can be used when you want to have common properties that will be used by each separate handler.
+For example, if you intend to create several request handlers that will all request the same list of fields in the response, you can configure an `initParams` section with your list of fields.
+For more information about `initParams`, see the section <<initparams-in-solrconfig.adoc#initparams-in-solrconfig,InitParams in SolrConfig>>.
 
-=== UpdateRequestHandlers
+=== Paramsets and UseParams
+If you are expecting to change the parameters often, or if you want define sets of parameters that you can apply on the fly,
+you can define them with <<request-parameters-api.adoc#request-parameters-api,Request Parameters API>> and then invoke them
+by providing one or more in `useParams` setting either in the handler definition itself or as a query parameter.
 
-The UpdateRequestHandlers are request handlers which process updates to the index.
+[source,xml]
+----
+<requestHandler name="/terms" class="solr.SearchHandler" useParams="myQueries">
 
-In this guide, we've covered these handlers in detail in the section <<uploading-data-with-index-handlers.adoc#uploading-data-with-index-handlers,Uploading Data with Index Handlers>>.
+...
+</requestHandler>
+----
 
-=== ShardHandlers
+[source,text]
+----
+http://localhost/solr/techproducts/select?useParams=myFacets,myQueries
+----
 
-It is possible to configure a request handler to search across shards of a cluster, used with distributed search. More information about distributed search and how to configure the shardHandler is in the section <<distributed-search-with-index-sharding.adoc#distributed-search-with-index-sharding,Distributed Search with Index Sharding>>.
+If paramset is called but is not defined, it is ignored.
+This allows most <<implicit-requesthandlers.adoc#implicit-requesthandlers,implicit request handlers>> to call specific paramsets,
+that you can define later, as needed.
 
-=== Implicit Request Handlers
 
-Solr includes many out-of-the-box request handlers that are not configured in `solrconfig.xml`, and so are referred to as "implicit" - see <<implicit-requesthandlers.adoc#implicit-requesthandlers,Implicit RequestHandlers>>.
+== Search Handlers
 
-== Search Components
+Search Handlers are very important to Solr, as the data is indexed (roughly) once but is searched many times.
+The whole design of Solr (and Lucene) is optimising data for searching and Search Handler is a flexible gateway to that.
 
-Search components define the logic that is used by the SearchHandler to perform queries for users.
+The following sections are allowed within a Search Handler:
 
-=== Default Components
+[source,xml]
+----
+<requestHandler name="/select" class="solr.SearchHandler">
+... defaults/appends/invariants
+... first-components/last-components or components
+... shardHandlerFactory
+</requestHandler>
+----
 
-There are several default search components that work with all SearchHandlers without any additional configuration. If no components are defined (with the exception of `first-components` and `last-components` - see below), these are executed by default, in the following order:
+All the blocks are optional, especially since parameters can also be provided with `initParams` and `useParams`.
 
-// TODO: Change column width to %autowidth.spread when https://github.com/asciidoctor/asciidoctor-pdf/issues/599 is fixed
+The defaults/appends/invariants blocks were described <<#defaults-appends-and-invariants,higher>> on the page. All the parameters described in the section  <<searching.adoc#searching,Searching>> can be defined as parameters for any of the Search Handlers.
+
+The Search Components blocks are described next, and <<distributed-requests.adoc#configuring-the-shardhandlerfactory,shardHandlerFactory>> is for fine-tuning of the SolrCloud distributed requests.
+
+=== Defining Search Components
+The search components themselves are defined outside of the Request Handlers and then are referenced from various Search Handlers that want to use them.
+Most Search Handlers use the default - implicit - stack of Search Components and only sometimes need to augment them with additional components prepended or appended.
+It is quite rare - and somewhat brittle - to completely override the component stack, though it is used in examples to clearly demonstrate the effect of a specific Search Component.
+
+==== Default Components
+
+As you can see below, what we see as a search experience is mostly a sequence of components defined below. They are called in the order listed.
 
 [cols="20,40,40",options="header"]
 |===
 |Component Name |Class Name |More Information
 |query |`solr.QueryComponent` |Described in the section <<query-syntax-and-parsing.adoc#query-syntax-and-parsing,Query Syntax and Parsing>>.
-|facet |`solr.FacetComponent` |Described in the section <<faceting.adoc#faceting,Faceting>>.
+|facet |`solr.FacetComponent` |Original parameter-based facet component, described in the section <<faceting.adoc#faceting,Faceting>>.
+|facet_module |`solr.facet.FacetModule` | JSON Faceting and Analytics module, described in the section <<json-facet-api.adoc#json-facet-api, JSON Facet API>>.
 |mlt |`solr.MoreLikeThisComponent` |Described in the section <<morelikethis.adoc#morelikethis,MoreLikeThis>>.
 |highlight |`solr.HighlightComponent` |Described in the section <<highlighting.adoc#highlighting,Highlighting>>.
 |stats |`solr.StatsComponent` |Described in the section <<the-stats-component.adoc#the-stats-component,The Stats Component>>.
@@ -128,53 +233,92 @@
 |debug |`solr.DebugComponent` |Described in the section on <<common-query-parameters.adoc#debug-parameter,Common Query Parameters>>.
 |===
 
-If you register a new search component with one of these default names, the newly defined component will be used instead of the default.
+==== Shipped Custom Components
+Apart from default components, Solr ships with a number of additional - very useful - components.
+They do need to defined and referenced in `solrconfig.xml` to be actually used.
 
-=== First-Components and Last-Components
+* `AnalyticsComponent`, described in the section <<analytics.adoc#analytics,Analytics>>.
+* `ClusteringComponent`, described in the section <<result-clustering.adoc#result-clustering,Result Clustering>>.
+* `PhrasesIdentificationComponent`, used to identify & score "phrases" found in the input string, based on shingles in indexed fields, described in the {solr-javadocs}solr-core/org/apache/solr/handler/component/PhrasesIdentificationComponent.html[PhrasesIdentificationComponent] javadocs.
+* `QueryElevationComponent`, described in the section <<the-query-elevation-component.adoc#the-query-elevation-component,The Query Elevation Component>>.
+* `RealTimeGetComponent`, described in the section <<realtime-get.adoc#realtime-get,RealTime Get>>.
+* `ResponseLogComponent`, used to record which documents are returned to the user via the Solr log, described in the {solr-javadocs}solr-core/org/apache/solr/handler/component/ResponseLogComponent.html[ResponseLogComponent] javadocs.
+* `SpellCheckComponent`, described in the section <<spell-checking.adoc#spell-checking,Spell Checking>>.
+* `SuggestComponent`, described in the section <<suggester.adoc#suggester,Suggester>>.
+* `TermVectorComponent`, described in the section <<the-term-vector-component.adoc#the-term-vector-component,The Term Vector Component>>.
+
+Some third party components are also linked from https://solr.cool/ website.
+
+==== Defining Custom Search Components
+To define custom component, the syntax is:
+
+[source,xml]
+----
+<searchComponent name="spellcheck" class="solr.SpellCheckComponent">
+  <lst name="spellchecker">
+    <str name="classname">solr.IndexBasedSpellChecker</str>
+    ...
+  </lst>
+</searchComponent>
+----
+
+Custom components often have configuration elements not described here. Check specific component's documentation/examples for details.
+
+Notice: If you register a new search component with one of the default names, the newly defined component will be used instead of the default.
+This allows to override a specific component, while not having to worry so much about upgrading Solr.
+
+=== Referencing Search Components
 
 It's possible to define some components as being used before (with `first-components`) or after (with `last-components`) the default components listed above.
 
-[IMPORTANT]
-====
-`first-components` and/or `last-components` may only be used in conjunction with the default components. If you define your own `components`, the default components will not be executed, and `first-components` and `last-components` are disallowed.
-====
+[source,xml]
+----
+<searchComponent name="..." class="...">
+ <arr name="first-components">
+      <str>mycomponent</str>
+    </arr>
+    <arr name="last-components">
+      <str>spellcheck</str>
+    </arr>
+</searchComponent>
+----
+
+NOTE: The component registered with the name "debug" will always be executed after the "last-components"
+
+If you define `components` instead, the <<#default-components,default components (above)>> will not be executed, and `first-components` and `last-components` are disallowed.
+This should be considered as a last-resort option as the default list may change in a later Solr version.
 
 [source,xml]
 ----
-<arr name="first-components">
-  <str>mycomponent</str>
-</arr>
-<arr name="last-components">
-  <str>spellcheck</str>
-</arr>
+<searchComponent name="..." class="...">
+    <arr name="components">
+      <str>mycomponent</str>
+      <str>query</str>
+      <str>debug</str>
+    </arr>
+</searchComponent>
 ----
 
-=== Components
 
-If you define `components`, the default components (see above) will not be executed, and `first-components` and `last-components` are disallowed:
+== Update Request Handlers
+
+The Update Request Handlers are request handlers which process updates to the index. Most of the request handlers are <<implicit-requesthandlers.adoc#update-handlers,implicit>>
+and can be customized by defining properly named Paramsets.
+
+If you need to define additional Update Request Handler, the syntax is:
 
 [source,xml]
 ----
-<arr name="components">
-  <str>mycomponent</str>
-  <str>query</str>
-  <str>debug</str>
-</arr>
+<requestHandler name="/update/json" class="solr.UpdateRequestHandler">
+... defaults/appends/invariants
+</requestHandler>
+
 ----
 
-=== Other Useful Components
+The full details are covered in the section <<uploading-data-with-index-handlers.adoc#uploading-data-with-index-handlers,Uploading Data with Index Handlers>>.
 
-Many of the other useful components are described in sections of this Guide for the features they support. These are:
+Similar to Search Components for Search Handlers, Solr has document-preprocessing plugins for Update Request Handlers,
+called <<update-request-processors.adoc#update-request-processors,Update Request Processors>>,
+which also allow for default and custom configuration chains.
 
-* `SpellCheckComponent`, described in the section <<spell-checking.adoc#spell-checking,Spell Checking>>.
-* `TermVectorComponent`, described in the section <<the-term-vector-component.adoc#the-term-vector-component,The Term Vector Component>>.
-* `QueryElevationComponent`, described in the section <<the-query-elevation-component.adoc#the-query-elevation-component,The Query Elevation Component>>.
-* `RealTimeGetComponent`, described in the section <<realtime-get.adoc#realtime-get,RealTime Get>>.
-* `ClusteringComponent`, described in the section <<result-clustering.adoc#result-clustering,Result Clustering>>.
-* `SuggestComponent`, described in the section <<suggester.adoc#suggester,Suggester>>.
-* `AnalyticsComponent`, described in the section <<analytics.adoc#analytics,Analytics>>.
-
-Other components that ship with Solr include:
-
-* `ResponseLogComponent`, used to record which documents are returned to the user via the Solr log, described in the {solr-javadocs}solr-core/org/apache/solr/handler/component/ResponseLogComponent.html[ResponseLogComponent] javadocs.
-* `PhrasesIdentificationComponent`, used to identify & score "phrases" found in the input string, based on shingles in indexed fields, described in the {solr-javadocs}solr-core/org/apache/solr/handler/component/PhrasesIdentificationComponent.html[PhrasesIdentificationComponent] javadocs.
+Note: Do not confuse Update Request Handlers with <<updatehandlers-in-solrconfig.adoc#updatehandlers-in-solrconfig,`updateHandler`>> section also defined in `solrconfig.xml`.
\ No newline at end of file
diff --git a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc
index 2863770..fc00166 100644
--- a/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc
+++ b/solr/solr-ref-guide/src/rule-based-authorization-plugin.adoc
@@ -240,8 +240,8 @@
 ** OVERSEERSTATUS
 ** CLUSTERSTATUS
 ** REQUESTSTATUS
-* *update*: this permission is allowed to perform any update action on any collection. This includes sending documents for indexing (using an <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#updaterequesthandlers,update request handler>>). This applies to all collections by default (`collection:"*"`).
-* *read*: this permission is allowed to perform any read action on any collection. This includes querying using search handlers (using <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#searchhandlers,request handlers>>) such as `/select`, `/get`, `/browse`, `/tvrh`, `/terms`, `/clustering`, `/elevate`, `/export`, `/spell`, `/clustering`, and `/sql`. This applies to all collections by default ( `collection:"*"` ).
+* *update*: this permission is allowed to perform any update action on any collection. This includes sending documents for indexing (using an <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#update-request-handlers,update request handler>>). This applies to all collections by default (`collection:"*"`).
+* *read*: this permission is allowed to perform any read action on any collection. This includes querying using search handlers (using <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#search-handlers,request handlers>>) such as `/select`, `/get`, `/tvrh`, `/terms`, `/clustering`, `/elevate`, `/export`, `/spell`, `/clustering`, and `/sql`. This applies to all collections by default ( `collection:"*"` ).
 * *all*: Any requests coming to Solr.
 
 === Permission Ordering and Resolution
diff --git a/solr/solr-ref-guide/src/solr-tutorial.adoc b/solr/solr-ref-guide/src/solr-tutorial.adoc
index 37380d9..6da43f5 100644
--- a/solr/solr-ref-guide/src/solr-tutorial.adoc
+++ b/solr/solr-ref-guide/src/solr-tutorial.adoc
@@ -751,11 +751,7 @@
 
 ==== Range Facets
 
-For numerics or dates, it's often desirable to partition the facet counts into ranges rather than discrete values. A prime example of numeric range faceting, using the example techproducts data from our previous exercise, is `price`.  In the `/browse` UI, it looks like this:
-
-.Range facets
-image::images/solr-tutorial/tutorial-range-facet.png[Solr Quick Start: Range facets]
-
+For numerics or dates, it's often desirable to partition the facet counts into ranges rather than discrete values. A prime example of numeric range faceting, using the example techproducts data from our previous exercise, is `price`.
 The films data includes the release date for films, and we could use that to create date range facets, which are another common use for range facets.
 
 The Solr Admin UI doesn't yet support range facet options, so you will need to use curl or similar command line tool for the following examples.
@@ -956,13 +952,6 @@
 
 Some of the example techproducts documents we indexed in Exercise 1 have locations associated with them to illustrate the spatial capabilities. To reindex this data, see <<index-the-techproducts-data,Exercise 1>>.
 
-Spatial queries can be combined with any other types of queries, such as in this example of querying for "ipod" within 10 kilometers from San Francisco:
-
-.Spatial queries and results
-image::images/solr-tutorial/tutorial-spatial.png[Solr Quick Start: spatial search]
-
-This is from Solr's example search UI (called `/browse`), which has a nice feature to show a map for each item and allow easy selection of the location to search near. You can see this yourself by going to <http://localhost:8983/solr/techproducts/browse?q=ipod&pt=37.7752%2C-122.4232&d=10&sfield=store&fq=%7B%21bbox%7D&queryOpts=spatial&queryOpts=spatial> in a browser.
-
 To learn more about Solr's spatial capabilities, see the section <<spatial-search.adoc#spatial-search,Spatial Search>>.
 
 == Wrapping Up
diff --git a/solr/solr-ref-guide/src/the-terms-component.adoc b/solr/solr-ref-guide/src/the-terms-component.adoc
index e53f469..11fdf73 100644
--- a/solr/solr-ref-guide/src/the-terms-component.adoc
+++ b/solr/solr-ref-guide/src/the-terms-component.adoc
@@ -22,7 +22,7 @@
 
 == Configuring the Terms Component
 
-Terms Component is one of  <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#search-components,the default search components>>
+Terms Component is one of  <<requesthandlers-and-searchcomponents-in-solrconfig.adoc#defining-search-components,the default search components>>
 and does not need to be defined in `solrconfig.xml`.
 
 The definition is equivalent to:
diff --git a/solr/solr-ref-guide/src/uploading-data-with-index-handlers.adoc b/solr/solr-ref-guide/src/uploading-data-with-index-handlers.adoc
index 4cf0005..b868519 100644
--- a/solr/solr-ref-guide/src/uploading-data-with-index-handlers.adoc
+++ b/solr/solr-ref-guide/src/uploading-data-with-index-handlers.adoc
@@ -23,6 +23,11 @@
 
 A single unified update request handler supports XML, CSV, JSON, and javabin update requests, delegating to the appropriate `ContentStreamLoader` based on the `Content-Type` of the <<content-streams.adoc#content-streams,ContentStream>>.
 
+If you need to pre-process documents after they are loaded but before they are indexed (or even checked against the schema),
+Solr has document-preprocessing plugins for Update Request Handlers,
+called <<update-request-processors.adoc#update-request-processors,Update Request Processors>>,
+which allow for default and custom configuration chains.
+
 == UpdateRequestHandler Configuration
 
 The default configuration file has the update request handler configured by default.
diff --git a/solr/solrj/build.gradle b/solr/solrj/build.gradle
index e8c8a07..7dfd3cb 100644
--- a/solr/solrj/build.gradle
+++ b/solr/solrj/build.gradle
@@ -64,7 +64,6 @@
   testImplementation ('org.eclipse.jetty:jetty-alpn-java-server', {
     exclude group: "org.eclipse.jetty.alpn", module: "alpn-api"
   })
-  testImplementation 'org.restlet.jee:org.restlet.ext.servlet'
   testImplementation 'org.objenesis:objenesis'
   testImplementation('org.mockito:mockito-core', {
     exclude group: "net.bytebuddy", module: "byte-buddy-agent"
diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java b/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java
index c5f6164..5e6dfeb 100644
--- a/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java
+++ b/solr/solrj/src/java/org/apache/solr/common/MapWriterMap.java
@@ -51,6 +51,11 @@
 
 
   @Override
+  public int _size() {
+    return delegate.size();
+  }
+
+  @Override
   @SuppressWarnings("unchecked")
   public Map<String,Object> toMap(Map<String, Object> map) {
     return delegate;
diff --git a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java
index 22dca2b..4f88de5 100644
--- a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java
+++ b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java
@@ -86,4 +86,10 @@
     Object v = Utils.getObjectByPath(this, false, path);
     return v == null ? def : String.valueOf(v);
   }
+
+  default int _size() {
+    int[] size = new int[1];
+    _forEachEntry((k, v) -> size[0]++);
+    return size[0];
+  }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java b/solr/solrj/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java
index 8d185a2..7283f0c 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/NodesSysPropsCacher.java
@@ -154,10 +154,10 @@
           Thread.sleep(backOffTime);
         } catch (InterruptedException e1) {
           Thread.currentThread().interrupt();
-          log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // logOk
+          log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // nowarn
           break;
         }
-        log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // logOk
+        log.info("Exception on caching node:{} system.properties:{}, retry {}/{}", node, tags, i+1, NUM_RETRY, e); // nowarn
       }
     }
   }
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java
index aa6404b..72dacf2 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkConfigManager.java
@@ -138,11 +138,7 @@
       for (String file : files) {
         List<String> children = zkClient.getChildren(fromZkPath + "/" + file, null, true);
         if (children.size() == 0) {
-          final String toZkFilePath = toZkPath + "/" + file;
-          log.info("Copying zk node {}/{} to {}", fromZkPath, file, toZkFilePath);
-          byte[] data = zkClient.getData(fromZkPath + "/" + file, null, null, true);
-          zkClient.makePath(toZkFilePath, data, true);
-          if (copiedToZkPaths != null) copiedToZkPaths.add(toZkFilePath);
+          copyData(copiedToZkPaths, fromZkPath + "/" + file, toZkPath + "/" + file);
         } else {
           copyConfigDirFromZk(fromZkPath + "/" + file, toZkPath + "/" + file, copiedToZkPaths);
         }
@@ -153,6 +149,13 @@
     }
   }
 
+  private void copyData(Set<String> copiedToZkPaths, String fromZkFilePath, String toZkFilePath) throws KeeperException, InterruptedException {
+    log.info("Copying zk node {} to {}", fromZkFilePath, toZkFilePath);
+    byte[] data = zkClient.getData(fromZkFilePath, null, null, true);
+    zkClient.makePath(toZkFilePath, data, true);
+    if (copiedToZkPaths != null) copiedToZkPaths.add(toZkFilePath);
+  }
+
   /**
    * Copy a config in ZooKeeper
    *
@@ -174,7 +177,15 @@
    * @throws IOException if an I/O error occurs
    */
   public void copyConfigDir(String fromConfig, String toConfig, Set<String> copiedToZkPaths) throws IOException {
-    copyConfigDirFromZk(CONFIGS_ZKNODE + "/" + fromConfig, CONFIGS_ZKNODE + "/" + toConfig, copiedToZkPaths);
+    String fromConfigPath = CONFIGS_ZKNODE + "/" + fromConfig;
+    String toConfigPath = CONFIGS_ZKNODE + "/" + toConfig;
+    try {
+      copyData(copiedToZkPaths, fromConfigPath, toConfigPath);
+    } catch (KeeperException | InterruptedException e) {
+      throw new IOException("Error config " + fromConfig + " to " + toConfig,
+              SolrZkClient.checkInterrupted(e));
+    }
+    copyConfigDirFromZk(fromConfigPath, toConfigPath, copiedToZkPaths);
   }
 
   // This method is used by configSetUploadTool and CreateTool to resolve the configset directory.
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java
index 9156619..e5ae8e2 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/NamedList.java
@@ -890,6 +890,11 @@
   }
 
   @Override
+  public int _size() {
+    return size();
+  }
+
+  @Override
   public void forEachEntry(BiConsumer<String, ? super T> fun) {
     forEach(fun);
   }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
index b4f1f47..86bef91 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
@@ -227,9 +227,49 @@
     @Override
     @SuppressWarnings({"rawtypes"})
     public void handleUnknownClass(Object o) {
+      // avoid materializing MapWriter / IteratorWriter to Map / List
+      // instead serialize them directly
       if (o instanceof MapWriter) {
-        Map m = ((MapWriter) o).toMap(new LinkedHashMap<>());
-        write(m);
+        MapWriter mapWriter = (MapWriter) o;
+        startObject();
+        final boolean[] first = new boolean[1];
+        first[0] = true;
+        int sz = mapWriter._size();
+        mapWriter._forEachEntry((k, v) -> {
+          if (first[0]) {
+            first[0] = false;
+          } else {
+            writeValueSeparator();
+          }
+          if (sz > 1) indent();
+          writeString(k.toString());
+          writeNameSeparator();
+          write(v);
+        });
+        endObject();
+      } else if (o instanceof IteratorWriter) {
+        IteratorWriter iteratorWriter = (IteratorWriter) o;
+        startArray();
+        final boolean[] first = new boolean[1];
+        first[0] = true;
+        try {
+          iteratorWriter.writeIter(new IteratorWriter.ItemWriter() {
+            @Override
+            public IteratorWriter.ItemWriter add(Object o) throws IOException {
+              if (first[0]) {
+                first[0] = false;
+              } else {
+                writeValueSeparator();
+              }
+              indent();
+              write(o);
+              return this;
+            }
+          });
+        } catch (IOException e) {
+          throw new RuntimeException("this should never happen", e);
+        }
+        endArray();
       } else {
         super.handleUnknownClass(o);
       }
@@ -239,13 +279,13 @@
   public static byte[] toJSON(Object o) {
     if (o == null) return new byte[0];
     CharArr out = new CharArr();
-    if (!(o instanceof List) && !(o instanceof Map)) {
-      if (o instanceof MapWriter) {
-        o = ((MapWriter) o).toMap(new LinkedHashMap<>());
-      } else if (o instanceof IteratorWriter) {
-        o = ((IteratorWriter) o).toList(new ArrayList<>());
-      }
-    }
+//    if (!(o instanceof List) && !(o instanceof Map)) {
+//      if (o instanceof MapWriter) {
+//        o = ((MapWriter) o).toMap(new LinkedHashMap<>());
+//      } else if (o instanceof IteratorWriter) {
+//        o = ((IteratorWriter) o).toList(new ArrayList<>());
+//      }
+//    }
     new MapWriterJSONWriter(out, 2).write(o); // indentation by default
     return toUTF8(out);
   }
@@ -800,7 +840,7 @@
     int statusCode = rsp.getStatusLine().getStatusCode();
     if (statusCode != 200) {
       try {
-        log.error("Failed a request to: {}, status: {}, body: {}", url, rsp.getStatusLine(), EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8)); // logOk
+        log.error("Failed a request to: {}, status: {}, body: {}", url, rsp.getStatusLine(), EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8)); // nowarn
       } catch (IOException e) {
         log.error("could not print error", e);
       }
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java
index fcb10d6..cb65455 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/request/SchemaTest.java
@@ -45,7 +45,6 @@
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
-import org.restlet.ext.servlet.ServerServlet;
 
 import static org.hamcrest.CoreMatchers.anyOf;
 import static org.hamcrest.CoreMatchers.equalTo;
@@ -107,9 +106,6 @@
     FileUtils.copyDirectory(new File(getFile("solrj/solr/collection1").getParent()), tmpSolrHome.getAbsoluteFile());
 
     final SortedMap<ServletHolder, String> extraServlets = new TreeMap<>();
-    final ServletHolder solrRestApi = new ServletHolder("SolrSchemaRestApi", ServerServlet.class);
-    solrRestApi.setInitParameter("org.restlet.application", "org.apache.solr.rest.SolrSchemaRestApi");
-    extraServlets.put(solrRestApi, "/schema/*");  // '/schema/*' matches '/schema', '/schema/', and '/schema/whatever...'
 
     System.setProperty("managed.schema.mutable", "true");
     System.setProperty("enable.update.log", "false");
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index 5c01018..0b3dce1 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -431,7 +431,7 @@
         if (useTlogReplicas()) {
           if (log.isInfoEnabled()) {
             log.info("create jetty {} in directory {} of type {} in shard {}"
-                , i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); // logOk
+                , i, jettyDir, Replica.Type.TLOG, ((currentI % sliceCount) + 1)); // nowarn
           }
           customThreadPool.submit(() -> {
             try {
@@ -463,7 +463,7 @@
         } else {
           if (log.isInfoEnabled()) {
             log.info("create jetty {} in directory {} of type {} for shard{}"
-                , i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); // logOk
+                , i, jettyDir, Replica.Type.NRT, ((currentI % sliceCount) + 1)); // nowarn
           }
           
           customThreadPool.submit(() -> {
@@ -492,7 +492,7 @@
           addedReplicas++;
         }
       } else {
-        log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); // logOk
+        log.info("create jetty {} in directory {} of type {} for shard{}", i, jettyDir, Replica.Type.PULL, ((currentI % sliceCount) + 1)); // nowarn
         customThreadPool.submit(() -> {
           try {
             JettySolrRunner j = createJetty(jettyDir, useJettyDataDir ? getDataDir(testDir + "/jetty"
diff --git a/solr/webapp/web/WEB-INF/web.xml b/solr/webapp/web/WEB-INF/web.xml
index 53ab57a..2599420 100644
--- a/solr/webapp/web/WEB-INF/web.xml
+++ b/solr/webapp/web/WEB-INF/web.xml
@@ -47,25 +47,11 @@
     <servlet-class>org.apache.solr.servlet.LoadAdminUiServlet</servlet-class>
   </servlet>
 
-  <servlet>
-    <servlet-name>SolrRestApi</servlet-name>
-    <servlet-class>org.restlet.ext.servlet.ServerServlet</servlet-class>
-    <init-param>
-      <param-name>org.restlet.application</param-name>
-      <param-value>org.apache.solr.rest.SolrSchemaRestApi</param-value>
-    </init-param>
-  </servlet>
-
   <servlet-mapping>
     <servlet-name>LoadAdminUI</servlet-name>
     <url-pattern>/index.html</url-pattern>
   </servlet-mapping>
 
-  <servlet-mapping>
-    <servlet-name>SolrRestApi</servlet-name>
-    <url-pattern>/schema/*</url-pattern>
-  </servlet-mapping>
-  
   <mime-mapping>
     <extension>.xsl</extension>
     <!-- per http://www.w3.org/TR/2006/PR-xslt20-20061121/ -->
diff --git a/versions.lock b/versions.lock
index 5c40e17..7291cea 100644
--- a/versions.lock
+++ b/versions.lock
@@ -34,7 +34,7 @@
 commons-cli:commons-cli:1.4 (1 constraints: a9041e2c)
 commons-codec:commons-codec:1.13 (1 constraints: d904f430)
 commons-collections:commons-collections:3.2.2 (1 constraints: 09050236)
-commons-io:commons-io:2.6 (1 constraints: ac04232c)
+commons-io:commons-io:2.8.0 (1 constraints: 0c050d36)
 commons-lang:commons-lang:2.6 (1 constraints: 2a0d520d)
 commons-logging:commons-logging:1.1.3 (2 constraints: c8149e7f)
 de.l3s.boilerpipe:boilerpipe:1.1.0 (1 constraints: 0405f335)
@@ -170,8 +170,6 @@
 org.locationtech.spatial4j:spatial4j:0.7 (1 constraints: ab041e2c)
 org.ow2.asm:asm:7.2 (2 constraints: 900e3e5e)
 org.ow2.asm:asm-commons:7.2 (1 constraints: ad042e2c)
-org.restlet.jee:org.restlet:2.4.3 (2 constraints: eb156ae7)
-org.restlet.jee:org.restlet.ext.servlet:2.4.3 (1 constraints: 0b050436)
 org.rrd4j:rrd4j:3.5 (1 constraints: ac04252c)
 org.slf4j:jcl-over-slf4j:1.7.24 (1 constraints: 4005473b)
 org.slf4j:slf4j-api:1.7.24 (15 constraints: a3ba2a7b)
diff --git a/versions.props b/versions.props
index c86546e..6855cf4 100644
--- a/versions.props
+++ b/versions.props
@@ -27,7 +27,7 @@
 commons-cli:commons-cli=1.4
 commons-codec:commons-codec=1.13
 commons-collections:commons-collections=3.2.2
-commons-io:commons-io=2.6
+commons-io:commons-io=2.8.0
 commons-logging:commons-logging=1.1.3
 de.l3s.boilerpipe:boilerpipe=1.1.0
 io.dropwizard.metrics:*=4.1.5
@@ -94,7 +94,6 @@
 org.mockito:mockito-core=2.23.4
 org.objenesis:objenesis=2.6
 org.ow2.asm:*=7.2
-org.restlet.jee:*=2.4.3
 org.rrd4j:rrd4j=3.5
 org.slf4j:*=1.7.24
 org.tallison:jmatio=1.5