Merge branch 'master' into jira/solr-15016
diff --git a/dev-tools/scripts/LUCENE-3753.patch.hack.pl b/dev-tools/scripts/LUCENE-3753.patch.hack.pl
deleted file mode 100644
index aa8860f..0000000
--- a/dev-tools/scripts/LUCENE-3753.patch.hack.pl
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/perl
-#
-# This script can be used to fix up paths that were moved as a result
-# of the structural changes committed as part of LUCENE-3753.
-#
-# Input is on STDIN, output is to STDOUT
-#
-# Example use:
-#
-#    perl LUCENE-3753.patch.hack.pl <my.pre-LUCENE-3753.patch >my.post-LUCENE-3753.patch
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-use strict;
-use warnings;
-
-my @moves = (
-
-    'lucene/src/java'
- => 'lucene/core/src/java',
-
-    'lucene/src/test'
- => 'lucene/core/src/test',
-
-    'lucene/src/resources'
- => 'lucene/core/src/resources',
-
-    'lucene/src/site'
- => 'lucene/site',
-
-    'lucene/src/test-framework/java'
- => 'lucene/test-framework/src/java',
-
-    'lucene/src/test-framework/resources'
- => 'lucene/test-framework/src/resources',
-
-    'lucene/src/tools/java'
- => 'lucene/tools/src/java',
-
-    'lucene/src/tools/javadoc'
- => 'lucene/tools/javadoc',
-
-    'lucene/src/tools/prettify'
- => 'lucene/tools/prettify',
-
-    'dev-tools/maven/lucene/src/pom.xml.template'
- => 'dev-tools/maven/lucene/core/pom.xml.template',
- 
-    'dev-tools/maven/lucene/src/test-framework/pom.xml.template'
- => 'dev-tools/maven/lucene/test-framework/pom.xml.template',
-);
-
-my @copies = ();
-
-my $diff;
-
-while (<>) {
-  if (/^Index/) {
-    my $next_diff = $_;
-    &fixup_paths if ($diff);
-    $diff = $next_diff;
-  } else {
-    $diff .= $_;
-  }
-}
-
-&fixup_paths; # Handle the final diff
-
-sub fixup_paths {
-  for (my $move_pos = 0 ; $move_pos < $#moves ; $move_pos += 2) {
-    my $source = $moves[$move_pos];
-    my $target = $moves[$move_pos + 1];
-    if ($diff =~ /^Index: \Q$source\E/) {
-      $diff =~ s/^Index: \Q$source\E/Index: $target/;
-      $diff =~ s/\n--- \Q$source\E/\n--- $target/;
-      $diff =~ s/\n\+\+\+ \Q$source\E/\n+++ $target/;
-      $diff =~ s/\nProperty changes on: \Q$source\E/\nProperty changes on: $target/;
-      last;
-    }
-  }
-  print $diff;
-
-  for (my $copy_pos = 0 ; $copy_pos < $#copies ; $copy_pos += 2) {
-    my $source = $copies[$copy_pos];
-    my $target = $copies[$copy_pos + 1];
-    if ($diff =~ /^Index: \Q$source\E/) {
-      my $new_diff = $diff;
-      $new_diff =~ s/^Index: \Q$source\E/Index: $target/;
-      $new_diff =~ s/\n--- \Q$source\E/\n--- $target/;
-      $new_diff =~ s/\n\+\+\+ \Q$source\E/\n+++ $target/;
-      $new_diff =~ s/\nProperty changes on: \Q$source\E/\nProperty changes on: $target/;
-      print $new_diff;
-      last;
-    }
-  }
-}
diff --git a/dev-tools/scripts/crawl.maven.release.dist.sh b/dev-tools/scripts/crawl.maven.release.dist.sh
deleted file mode 100755
index c59d451..0000000
--- a/dev-tools/scripts/crawl.maven.release.dist.sh
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/bin/sh
-#
-# Crawls all Maven release distribution artifacts at the given release RC URL
-# and downloads them to ./lucene/ and ./solr/ after first creating these
-# two directories in the current directory.
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-if [ -z "$1" ] ; then
-    echo "Usage: $0 <RC-URL>"
-    echo ""
-    echo "Example: $0 http://s.apache.org/lusolr36rc1"
-    exit 1;
-fi
-
-# Resolve redirects, e.g. from URL shortening, e.g. http://s.apache.org/lusolr36rc1
-# Also trim trailing slashes, if any, from the resolved URL.
-RC_URL=`(echo "Location: $1" ; wget -l 1 --spider "$1" 2>&1) \
-        | perl -ne '$url=$1 if (/Location:\s*(\S+)/); END { $url =~ s~/+$~~; print $url; }'`
-
-if [ -d lucene ] ; then
-    echo "Please remove directory ./lucene/ before running this script."
-    exit 1;
-elif [ -d solr ] ; then
-    echo "Please remove directory ./solr/ before running this script."
-    exit 1;
-fi
-
-mkdir lucene
-cd lucene
-
-# -r : recurse
-# -np : "no parents": only download below the given URL
-# -l 0 : infinite recursion (no limit on recursive crawling depth)
-# -nH : "no Hostname" output directory - use only path elements
-# -erobots=off : ignore robots.txt
-# --cut-dirs=5: Don't create output directories for the first 5 path elements, e.g.
-#    /~acct/staging_area/lucene-solr-X.Y.Z-RCM-revNNNNNNN/lucene/maven/org/apache/lucene/...
-#    1     2            3                                4      5     6   7      8      9
-#                                                                     ^- Dirs start here     
-wget -r -np -l 0 -nH -erobots=off --cut-dirs=5 \
-     --reject="*.md5,*.sha1,*.sha512,maven-metadata.xml*,index.html*" "${RC_URL}/lucene/maven/"
-
-cd ..
-
-mkdir solr
-cd solr
-
-wget -r -np -l 0 -nH -erobots=off --cut-dirs=5 \
-     --reject="*.md5,*.sha1,*.sha512,maven-metadata.xml*,index.html*" "${RC_URL}/solr/maven/"
-
-cd ..
-
diff --git a/dev-tools/scripts/gitignore-gen.sh b/dev-tools/scripts/gitignore-gen.sh
deleted file mode 100755
index cf703b2..0000000
--- a/dev-tools/scripts/gitignore-gen.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-rm -f .gitignore-new
-
-for dir in `find . -path '*.svn*' -prune -o -type d -print | grep -v  -e "/build"`; do
-  ruby -e 'printf("SVN dir: %-70s", ARGV[0])' "$dir" >&2
-  svn info $dir > /dev/null 2>&1
-  if [ "$?" -eq "0" ]; then
-   svn propget "svn:ignore" $dir | ruby -e 'while $stdin.gets; (puts ARGV[0].gsub(/^\./, "") + "/" + $_) unless $_.strip.empty?; end' "$dir" > .temp
-   if [ -s .temp ]; then
-     echo " OK" >&2
-     echo -e "\n\n# $dir" >> .gitignore-new
-     cat .temp            >> .gitignore-new
-   else
-     echo " --" >&2
-   fi
-   rm .temp
-  else
-   echo " NOT svn controlled." >&2
-  fi
-done
\ No newline at end of file
diff --git a/dev-tools/scripts/write.stage.maven.build.xml.pl b/dev-tools/scripts/write.stage.maven.build.xml.pl
deleted file mode 100755
index 21f09e8..0000000
--- a/dev-tools/scripts/write.stage.maven.build.xml.pl
+++ /dev/null
@@ -1,180 +0,0 @@
-#!/usr/bin/perl
-#
-# This script is called from lucene/build.xml and solr/build.xml, by target
-# stage-maven-artifacts, to populate an internal Maven repository created by
-# generate-maven-artifacts with Ant build files, one per POM.  The
-# stage-maven target is then called from each of these Ant build files.
-#
-# Command line parameters:
-#
-#  1. The directory in which to find Maven distribution POMs,
-#     jars, wars, and signatures.
-#  2. The pathname of the Ant build script to be built.
-#  3. The pathname of common-build.xml, which will be imported
-#     in the Ant build script to be built.
-#  4. Whether to prompt for credentials, rather than consulting
-#     settings.xml: boolean, e.g. "true" or "false"
-#  5. The ID of the target repository
-#  6. The URL to the target repository
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-use strict;
-use warnings;
-use File::Basename;
-use File::Find;
-use Cwd 'abs_path';
-use File::Path qw(make_path);
-
-my $num_artifacts = 0;
-my $maven_dist_dir = abs_path($ARGV[0]);
-my $output_build_xml_file = $ARGV[1];
-my $common_build_xml = $ARGV[2];
-my $m2_credentials_prompt = $ARGV[3];
-my $m2_repository_id = $ARGV[4];
-if ($^O eq 'cygwin') { # Make sure Cygwin Perl can find the output path
-  $output_build_xml_file = `cygpath -u "$output_build_xml_file"`;
-  $output_build_xml_file =~ s/\s+$//; # Trim trailing whitespace
-  $output_build_xml_file =~ s/^\s+//; # Trim leading whitespace
-}
-my ($output_file, $output_dir) = fileparse($output_build_xml_file);
-
-my @basepaths = ();
-my $grandparent_pom = '';
-my @parent_poms = ();
-sub find_poms;
-File::Find::find({follow => 1, wanted => \&find_poms}, $maven_dist_dir);
-
-my $parent_pom_targets = '';
-if (@parent_poms) {
-  $parent_pom_targets = "<parent-poms>\n";
-  if ($grandparent_pom) {
-    $parent_pom_targets .= qq!          <artifact:pom id="grandparent" file="$grandparent_pom"/>\n!;
-  }
-  my $n = 0;
-  for my $parent_pom (@parent_poms) {
-    $parent_pom_targets .= qq!          <artifact:pom id="parent.$n" file="$parent_pom"/>\n!;
-    ++$n;
-  }
-  $parent_pom_targets .= "        </parent-poms>\n";
-}
-
-make_path($output_dir);
-open my $output_build_xml, ">$output_build_xml_file"
-    or die "ERROR opening '$ARGV[1]' for writing: $!";
-
-print $output_build_xml qq!<?xml version="1.0"?>
-<project xmlns:artifact="antlib:org.apache.maven.artifact.ant">
-  <import file="${common_build_xml}"/>
-
-  <target name="stage-maven" depends="install-maven-tasks">
-    <sequential>
-!;
-
-my $credentials = '';
-if ($m2_credentials_prompt !~ /\A(?s:f(?:alse)?|no?)\z/) {
-  print $output_build_xml qq!
-      <input message="Enter $m2_repository_id username: >" addproperty="m2.repository.username"/>
-      <echo>WARNING: ON SOME PLATFORMS YOUR PASSPHRASE WILL BE ECHOED BACK\!\!\!\!\!</echo>
-      <input message="Enter $m2_repository_id password: >" addproperty="m2.repository.password">
-        <handler type="secure"/>
-      </input>\n!;
-
-  $credentials = q!<credentials>
-          <authentication username="${m2.repository.username}" password="${m2.repository.password}"/>
-        </credentials>!;
-}
-
-for my $basepath (@basepaths) {
-  output_deploy_stanza($basepath);
-}
-
-print $output_build_xml q!
-    </sequential>
-  </target>
-</project>
-!;
-
-close $output_build_xml;
-
-print "Wrote '$output_build_xml_file' to stage $num_artifacts Maven artifacts.\n";
-exit;
-
-sub find_poms {
-  /^(.*)\.pom\z/s && do {
-    my $pom_dir = $File::Find::dir;
-    if ($^O eq 'cygwin') { # Output windows-style paths on Windows
-      $pom_dir = `cygpath -w "$pom_dir"`;
-      $pom_dir =~ s/\s+$//; # Trim trailing whitespace
-      $pom_dir =~ s/^\s+//; # Trim leading whitespace
-    }
-    my $basefile = $_;
-    $basefile =~ s/\.pom\z//;
-    my $basepath = "$pom_dir/$basefile";
-    push @basepaths, $basepath;
-
-    if ($basefile =~ /grandparent/) {
-      $grandparent_pom = "$basepath.pom";
-    } elsif ($basefile =~ /parent/) {
-      push @parent_poms, "$basepath.pom";
-    }
-  }
-}
-
-sub output_deploy_stanza {
-  my $basepath = shift;
-  my $pom_file = "$basepath.pom";
-  my $jar_file = "$basepath.jar";
-  my $war_file = "$basepath.war";
-
-  if (-f $war_file) {
-    print $output_build_xml qq!
-      <m2-deploy pom.xml="${pom_file}" jar.file="${war_file}">
-        $parent_pom_targets
-        <artifact-attachments>
-          <attach file="${pom_file}.asc" type="pom.asc"/>
-          <attach file="${war_file}.asc" type="war.asc"/>
-        </artifact-attachments>
-        $credentials
-      </m2-deploy>\n!;
-  } elsif (-f $jar_file) {
-    print $output_build_xml qq!
-      <m2-deploy pom.xml="${pom_file}" jar.file="${jar_file}">
-        $parent_pom_targets
-        <artifact-attachments>
-          <attach file="${basepath}-sources.jar" classifier="sources"/>
-          <attach file="${basepath}-javadoc.jar" classifier="javadoc"/>
-          <attach file="${pom_file}.asc" type="pom.asc"/>
-          <attach file="${jar_file}.asc" type="jar.asc"/>
-          <attach file="${basepath}-sources.jar.asc" classifier="sources" type="jar.asc"/>
-          <attach file="${basepath}-javadoc.jar.asc" classifier="javadoc" type="jar.asc"/>
-        </artifact-attachments>
-        $credentials
-      </m2-deploy>\n!;
-  } else {
-    print $output_build_xml qq!
-      <m2-deploy pom.xml="${pom_file}">
-        $parent_pom_targets
-        <artifact-attachments>
-          <attach file="${pom_file}.asc" type="pom.asc"/>
-        </artifact-attachments>
-        $credentials
-      </m2-deploy>\n!;
-  }
-
-  ++$num_artifacts;
-}
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 2d3c279..8a482b7 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -84,6 +84,8 @@
 
 Improvements
 
+* LUCENE-9618: Do not call IntervalIterator.nextInterval after NO_MORE_DOCS is returned. (Haoyu Zhai)
+
 * LUCENE-9576: Improve ConcurrentMergeScheduler settings by default, assuming modern I/O.
   Previously Lucene was too conservative, jumping through hoops to detect if disks were SSD-backed.
   In many common modern cases (VMs, RAID arrays, containers, encrypted mounts, non-Linux OS),
@@ -186,6 +188,8 @@
 
 Other
 
+* LUCENE-9631: Properly override slice() on subclasses of OffsetRange. (Dawid Weiss)
+
 * LUCENE-9312: Allow gradle builds against arbitrary JVMs. (Tomoko Uchida, Dawid Weiss)
 
 * LUCENE-9391: Upgrade HPPC to 0.8.2. (Haoyu Zhai)
diff --git a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
index d022238..9534998 100644
--- a/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/FieldComparator.java
@@ -142,6 +142,17 @@
   public void setSingleSort() {
   }
 
+  /**
+   * Informs the comparator that the skipping of documents should be disabled.
+   * This function is called by TopFieldCollector in cases when the skipping functionality
+   * should not be applied or not necessary. An example could be when
+   * search sort is a part of the index sort, and can be already efficiently
+   * handled by TopFieldCollector, and doing extra work for skipping in the comparator
+   * is redundant.
+   */
+  public void disableSkipping() {
+  }
+
   /** Sorts by descending relevance.  NOTE: if you are
    *  sorting only by descending relevance and then
    *  secondarily by ascending docID, performance is faster
diff --git a/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java b/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
index 97f745c..6a93658 100644
--- a/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/LeafFieldComparator.java
@@ -131,5 +131,5 @@
    */
   default void setHitsThresholdReached() throws IOException{
   }
-
+  
 }
diff --git a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
index e529892..a47c7bb 100644
--- a/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
+++ b/lucene/core/src/java/org/apache/lucene/search/TopFieldCollector.java
@@ -46,38 +46,31 @@
   // always compare lower than a real hit; this would
   // save having to check queueFull on each insert
 
-  private static abstract class MultiComparatorLeafCollector implements LeafCollector {
+  private abstract class TopFieldLeafCollector implements LeafCollector  {
 
     final LeafFieldComparator comparator;
     final int reverseMul;
     Scorable scorer;
-
-    MultiComparatorLeafCollector(LeafFieldComparator[] comparators, int[] reverseMul) {
-      if (comparators.length == 1) {
-        this.reverseMul = reverseMul[0];
-        this.comparator = comparators[0];
-      } else {
-        this.reverseMul = 1;
-        this.comparator = new MultiLeafFieldComparator(comparators, reverseMul);
-      }
-    }
-
-    @Override
-    public void setScorer(Scorable scorer) throws IOException {
-      comparator.setScorer(scorer);
-      this.scorer = scorer;
-    }
-  }
-
-  private abstract class TopFieldLeafCollector extends MultiComparatorLeafCollector {
-
-    final boolean canEarlyTerminate;
     boolean collectedAllCompetitiveHits = false;
 
     TopFieldLeafCollector(FieldValueHitQueue<Entry> queue, Sort sort, LeafReaderContext context) throws IOException {
-      super(queue.getComparators(context), queue.getReverseMul());
-      final Sort indexSort = context.reader().getMetaData().getSort();
-      canEarlyTerminate = canEarlyTerminate(sort, indexSort);
+      // as all segments are sorted in the same way, enough to check only the 1st segment for indexSort
+      if (searchSortPartOfIndexSort == null) {
+        final Sort indexSort = context.reader().getMetaData().getSort();
+        searchSortPartOfIndexSort = canEarlyTerminate(sort, indexSort);
+        if (searchSortPartOfIndexSort) {
+          firstComparator.disableSkipping();
+        }
+      }
+      LeafFieldComparator[] comparators = queue.getComparators(context);
+      int[] reverseMuls = queue.getReverseMul();
+      if (comparators.length == 1) {
+        this.reverseMul = reverseMuls[0];
+        this.comparator = comparators[0];
+      } else {
+        this.reverseMul = 1;
+        this.comparator = new MultiLeafFieldComparator(comparators, reverseMuls);
+      }
     }
 
     void countHit(int doc) throws IOException {
@@ -100,7 +93,7 @@
         // since docs are visited in doc Id order, if compare is 0, it means
         // this document is largest than anything else in the queue, and
         // therefore not competitive.
-        if (canEarlyTerminate) {
+        if (searchSortPartOfIndexSort) {
           if (hitsThresholdChecker.isThresholdReached()) {
             totalHitsRelation = Relation.GREATER_THAN_OR_EQUAL_TO;
             throw new CollectionTerminatedException();
@@ -139,7 +132,8 @@
 
     @Override
     public void setScorer(Scorable scorer) throws IOException {
-      super.setScorer(scorer);
+      this.scorer = scorer;
+      comparator.setScorer(scorer);
       minCompetitiveScore = 0f;
       updateMinCompetitiveScore(scorer);
       if (minScoreAcc != null) {
@@ -154,8 +148,6 @@
 
   }
 
-  // TODO: remove this code when all bulk scores similar to {@code DefaultBulkScorer} use collectors' iterator,
-  // as early termination should be implemented in their respective comparators and removed from a collector
   static boolean canEarlyTerminate(Sort searchSort, Sort indexSort) {
     return canEarlyTerminateOnDocId(searchSort) ||
            canEarlyTerminateOnPrefix(searchSort, indexSort);
@@ -286,9 +278,11 @@
 
   final int numHits;
   final HitsThresholdChecker hitsThresholdChecker;
-  final FieldComparator.RelevanceComparator relevanceComparator;
+  final FieldComparator<?> firstComparator;
   final boolean canSetMinScore;
 
+  Boolean searchSortPartOfIndexSort = null; // shows if Search Sort if a part of the Index Sort
+
   // an accumulator that maintains the maximum of the segment's minimum competitive scores
   final MaxScoreAccumulator minScoreAcc;
   // the current local minimum competitive score already propagated to the underlying scorer
@@ -314,17 +308,15 @@
     this.numHits = numHits;
     this.hitsThresholdChecker = hitsThresholdChecker;
     this.numComparators = pq.getComparators().length;
-    FieldComparator<?> firstComparator = pq.getComparators()[0];
+    this.firstComparator = pq.getComparators()[0];
     int reverseMul = pq.reverseMul[0];
 
     if (firstComparator.getClass().equals(FieldComparator.RelevanceComparator.class)
             && reverseMul == 1 // if the natural sort is preserved (sort by descending relevance)
             && hitsThresholdChecker.getHitsThreshold() != Integer.MAX_VALUE) {
-      relevanceComparator = (FieldComparator.RelevanceComparator) firstComparator;
       scoreMode = ScoreMode.TOP_SCORES;
       canSetMinScore = true;
     } else {
-      relevanceComparator = null;
       canSetMinScore = false;
       if (hitsThresholdChecker.getHitsThreshold() != Integer.MAX_VALUE) {
         scoreMode = needsScores ? ScoreMode.TOP_DOCS_WITH_SCORES : ScoreMode.TOP_DOCS;
@@ -360,8 +352,8 @@
     if (canSetMinScore
           && queueFull
           && hitsThresholdChecker.isThresholdReached()) {
-      assert bottom != null && relevanceComparator != null;
-      float minScore = relevanceComparator.value(bottom.slot);
+      assert bottom != null;
+      float minScore = (float) firstComparator.value(bottom.slot);
       if (minScore > minCompetitiveScore) {
         scorer.setMinCompetitiveScore(minScore);
         minCompetitiveScore = minScore;
diff --git a/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java b/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java
index fa5f267..11b186b 100644
--- a/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java
+++ b/lucene/core/src/java/org/apache/lucene/search/comparators/NumericComparator.java
@@ -39,19 +39,19 @@
     protected final T missingValue;
     protected final String field;
     protected final boolean reverse;
-    protected final boolean primarySort;
     private final int bytesCount; // how many bytes are used to encode this number
 
     protected boolean topValueSet;
     protected boolean singleSort; // singleSort is true, if sort is based on a single sort field.
     protected boolean hitsThresholdReached;
     protected boolean queueFull;
+    private boolean canSkipDocuments;
 
     protected NumericComparator(String field, T missingValue, boolean reverse, int sortPos, int bytesCount) {
         this.field = field;
         this.missingValue = missingValue;
         this.reverse = reverse;
-        this.primarySort = (sortPos == 0);
+        this.canSkipDocuments = (sortPos == 0); // skipping functionality is only relevant for primary sort
         this.bytesCount = bytesCount;
     }
 
@@ -65,17 +65,22 @@
         singleSort = true;
     }
 
+    @Override
+    public void disableSkipping() {
+        canSkipDocuments = false;
+    }
+
     /**
      * Leaf comparator for {@link NumericComparator} that provides skipping functionality
      */
     public abstract class NumericLeafComparator implements LeafFieldComparator {
         protected final NumericDocValues docValues;
         private final PointValues pointValues;
-        private final boolean enableSkipping; // if skipping functionality should be enabled
+        private final boolean enableSkipping; // if skipping functionality should be enabled on this segment
         private final int maxDoc;
         private final byte[] minValueAsBytes;
         private final byte[] maxValueAsBytes;
-
+        
         private DocIdSetIterator competitiveIterator;
         private long iteratorCost;
         private int maxDocVisited = 0;
@@ -83,9 +88,9 @@
 
         public NumericLeafComparator(LeafReaderContext context) throws IOException {
             this.docValues = getNumericDocValues(context, field);
-            this.pointValues = primarySort ? context.reader().getPointValues(field) : null;
+            this.pointValues = canSkipDocuments ? context.reader().getPointValues(field) : null;
             if (pointValues != null) {
-                this.enableSkipping = true; // skipping is enabled on primarySort and when points are available
+                this.enableSkipping = true; // skipping is enabled when points are available
                 this.maxDoc = context.reader().maxDoc();
                 this.maxValueAsBytes = reverse == false ? new byte[bytesCount] : topValueSet ? new byte[bytesCount] : null;
                 this.minValueAsBytes = reverse ? new byte[bytesCount] : topValueSet ? new byte[bytesCount] : null;
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/MatchHighlighter.java b/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/MatchHighlighter.java
index d59af2a..277a324 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/MatchHighlighter.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/MatchHighlighter.java
@@ -163,6 +163,11 @@
       super(from, to);
       this.query = query;
     }
+
+    @Override
+    public QueryOffsetRange slice(int from, int to) {
+      return new QueryOffsetRange(query, from, to);
+    }
   }
 
   private static class DocHit {
diff --git a/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/Passage.java b/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/Passage.java
index 9a4dc4b..7fafedd 100644
--- a/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/Passage.java
+++ b/lucene/highlighter/src/java/org/apache/lucene/search/matchhighlight/Passage.java
@@ -32,6 +32,15 @@
     this.markers = markers;
   }
 
+  /**
+   * Passages can't be sliced as it could split previously determined
+   * highlight markers.
+   */
+  @Override
+  public OffsetRange slice(int from, int to) {
+    throw new RuntimeException("Passages.slice() does not make sense?");
+  }
+
   @Override
   public String toString() {
     return "[" + super.toString() + ", markers=" + markers + "]";
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/intervals/ConjunctionIntervalIterator.java b/lucene/queries/src/java/org/apache/lucene/queries/intervals/ConjunctionIntervalIterator.java
index bc58d9c..e813184 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/intervals/ConjunctionIntervalIterator.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/intervals/ConjunctionIntervalIterator.java
@@ -46,14 +46,18 @@
   @Override
   public int nextDoc() throws IOException {
     int doc = approximation.nextDoc();
-    reset();
+    if (doc != NO_MORE_DOCS) {
+      reset();
+    }
     return doc;
   }
 
   @Override
   public int advance(int target) throws IOException {
     int doc = approximation.advance(target);
-    reset();
+    if (doc != NO_MORE_DOCS) {
+      reset();
+    }
     return doc;
   }
 
diff --git a/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalIterator.java b/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalIterator.java
index c5fbf2a..d08aba8 100644
--- a/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalIterator.java
+++ b/lucene/queries/src/java/org/apache/lucene/queries/intervals/IntervalIterator.java
@@ -82,6 +82,12 @@
   /**
    * Advance the iterator to the next interval
    *
+   * Should not be called after {@link DocIdSetIterator#NO_MORE_DOCS} is returned by {@link DocIdSetIterator#nextDoc()} or
+   * {@link DocIdSetIterator#advance(int)}.
+   * If that's the case in some existing code, please consider opening an issue.
+   * However, after {@link IntervalIterator#NO_MORE_INTERVALS} is returned by this method, it might be
+   * called again.
+   *
    * @return the start of the next interval, or {@link IntervalIterator#NO_MORE_INTERVALS} if
    *         there are no more intervals on the current document
    */
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/intervals/OneTimeIntervalSource.java b/lucene/queries/src/test/org/apache/lucene/queries/intervals/OneTimeIntervalSource.java
new file mode 100644
index 0000000..e778564
--- /dev/null
+++ b/lucene/queries/src/test/org/apache/lucene/queries/intervals/OneTimeIntervalSource.java
@@ -0,0 +1,189 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.lucene.queries.intervals;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Collections;
+
+import org.apache.lucene.index.LeafReaderContext;
+import org.apache.lucene.search.MatchesIterator;
+import org.apache.lucene.search.Query;
+import org.apache.lucene.search.QueryVisitor;
+
+/**
+ * A mock interval source that will only return a constant position for all documents
+ */
+public class OneTimeIntervalSource extends IntervalsSource {
+  @Override
+  public IntervalIterator intervals(String field, LeafReaderContext ctx) throws IOException {
+    return new IntervalIterator() {
+      int doc = -1;
+      boolean flag;
+      final int maxDoc = ctx.reader().maxDoc();
+      @Override
+      public int start() {
+        return 0;
+      }
+
+      @Override
+      public int end() {
+        return 0;
+      }
+
+      @Override
+      public int gaps() {
+        return 0;
+      }
+
+      /* only returns valid position every first time called per doc */
+      @Override
+      public int nextInterval() throws IOException {
+        if (doc != NO_MORE_DOCS) {
+          if (flag) {
+            flag = false;
+            return start();
+          } else {
+            return NO_MORE_INTERVALS;
+          }
+        }
+        throw new AssertionError("Called with docId == NO_MORE_DOCS");
+      }
+
+      @Override
+      public float matchCost() {
+        return 0;
+      }
+
+      @Override
+      public int docID() {
+        return doc;
+      }
+
+      @Override
+      public int nextDoc() throws IOException {
+        doc++;
+        if (doc >= maxDoc) {
+          doc = NO_MORE_DOCS;
+        }
+        flag = true;
+        return doc;
+      }
+
+      @Override
+      public int advance(int target) throws IOException {
+        doc = target;
+        if (doc >= maxDoc) {
+          doc = NO_MORE_DOCS;
+        }
+        flag = true;
+        return doc;
+      }
+
+      @Override
+      public long cost() {
+        return 0;
+      }
+    };
+  }
+
+  @Override
+  public IntervalMatchesIterator matches(String field, LeafReaderContext ctx, int doc) throws IOException {
+    return new IntervalMatchesIterator() {
+      boolean next = true;
+      @Override
+      public int gaps() {
+        return 0;
+      }
+
+      @Override
+      public int width() {
+        return 1;
+      }
+
+      @Override
+      public boolean next() throws IOException {
+        if (next) {
+          next = false;
+          return true;
+        }
+        return false;
+      }
+
+      @Override
+      public int startPosition() {
+        return 0;
+      }
+
+      @Override
+      public int endPosition() {
+        return 0;
+      }
+
+      @Override
+      public int startOffset() throws IOException {
+        return 0;
+      }
+
+      @Override
+      public int endOffset() throws IOException {
+        return 0;
+      }
+
+      @Override
+      public MatchesIterator getSubMatches() throws IOException {
+        return null;
+      }
+
+      @Override
+      public Query getQuery() {
+        return null;
+      }
+    };
+  }
+
+  @Override
+  public void visit(String field, QueryVisitor visitor) {
+
+  }
+
+  @Override
+  public int minExtent() {
+    return 0;
+  }
+
+  @Override
+  public Collection<IntervalsSource> pullUpDisjunctions() {
+    return Collections.singleton(this);
+  }
+
+  @Override
+  public int hashCode() {
+    return 0;
+  }
+
+  @Override
+  public boolean equals(Object other) {
+    return false;
+  }
+
+  @Override
+  public String toString() {
+    return "";
+  }
+}
diff --git a/lucene/queries/src/test/org/apache/lucene/queries/intervals/TestIntervalQuery.java b/lucene/queries/src/test/org/apache/lucene/queries/intervals/TestIntervalQuery.java
index 2ca79b3..a259534 100644
--- a/lucene/queries/src/test/org/apache/lucene/queries/intervals/TestIntervalQuery.java
+++ b/lucene/queries/src/test/org/apache/lucene/queries/intervals/TestIntervalQuery.java
@@ -331,4 +331,10 @@
     expectThrows(IllegalArgumentException.class, () -> new IntervalQuery(field, source, 1, -1f));
   }
 
+  public void testAdvanceBehavior() throws IOException {
+    Query q = new IntervalQuery(field,
+        Intervals.containing(Intervals.term("w1"), new OneTimeIntervalSource()));
+    checkHits(q, new int[]{ 0, 1, 2, 3});
+  }
+
 }
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index d2e095f..dcb2a31 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -49,6 +49,22 @@
   properly regardless of the mode (standalone, distributed). The API has been stripped of ancient, unused, interfaces
   and simplified. (Dawid Weiss)
 
+* SOLR-14972: Prometheus: Change default port of prometheus exporter to 8989
+  because it clashed with default embedded zookeeper port (janhoy)
+
+* SOLR-14001: Docker: Removed /var/solr initialization from the Dockerfile; depend on init_var_solr.sh instead.
+  This leads to more consistent behavior no matter how /var/solr is mounted.
+  * init_var_solr.sh is now invoked by docker-entrypoint.sh; not in a bunch of other places.
+  * as before, you can set NO_INIT_VAR_SOLR=1 to short-circuit this.
+  * init_var_solr.sh no longer echo's anything.  For verbosity, set VERBOSE=yes.
+  (David Smiley)
+
+* SOLR-14957: Docker, Prometheus: Add Prometheus Exporter to docker PATH. Fix classpath issues.
+  (Houston Putman)
+
+* SOLR-14949: Docker: Ability to customize the FROM image when building.
+  (Houston Putman)
+
 Other Changes
 ----------------------
 * SOLR-14656: Autoscaling framework removed (Ishan Chattopadhyaya, noble, Ilan Ginzburg)
@@ -151,18 +167,33 @@
 * SOLR-14035: Remove deprecated preferLocalShards=true support in favour of the shards.preference=replica.location:local alternative.
   (Alex Bulygin via Christine Poerschke)
 
+* SOLR-14934: Remove redundent deprecated "solr.solr.home" logic (hossman)
+
+* SOLR-14915: Prometheus: Reduced dependencies from Solr server down to just SolrJ.  Don't add WEB-INF/lib.
+  * Can run via gradle, "gradlew run"
+  * Has own log4j2.xml now
+  * Was missing some dependencies in lib/; now has all except SolrJ & logging.
+  (David Smiley, Houston Putman)
+
+* SOLR-14789: Docker: Migrate docker image creation from docker-solr repo to solr/docker.
+  (Houston Putman, Martijn Koster, Tim Potter, David Smiley, janhoy, Mike Drob)
+
 Bug Fixes
 ---------------------
 * SOLR-14546: Fix for a relatively hard to hit issue in OverseerTaskProcessor that could lead to out of order execution
   of Collection API tasks competing for a lock (Ilan Ginzburg).
 
-* SOLR-12182: Don't persist base_url in ZK as the URL scheme is variable, compute from node_name instead when reading
-  state back from ZK. (Timothy Potter)
-
 ==================  8.8.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
 
+Upgrade Notes
+---------------------
+
+* Internal logic for identifying 'Solr Home' has been refactored to make testing less error prone.  Plugin
+  developers using SolrPaths.locateSolrHome() or 'new SolrResourceLoader' should check deprecation warnings as existing
+  some existing functionality will be removed in 9.0.  See SOLR-14934 for more details.
+
 New Features
 ---------------------
 
@@ -183,6 +214,10 @@
 
 * SOLR-15015 : Add interleaving algorithm parameter support in Learning To Rank (Alessandro Benedetti)
 
+* SOLR-14965: metrics: Adds two metrics to the SolrCloud Overseer: solr_metrics_overseer_stateUpdateQueueSize
+  and solr_metrics_overseer_collectionWorkQueueSize with corresponding entries in the the Prometheus exporter's
+  default/stock configuration.  (Saatchi Bhalla, Megan Carey, Andrzej Białecki, David Smiley)
+
 Optimizations
 ---------------------
 * SOLR-14975: Optimize CoreContainer.getAllCoreNames, getLoadedCoreNames and getCoreDescriptors. (Bruno Roustant)
@@ -213,6 +248,13 @@
 * SOLR-15017: Core lib directories were not being recognized unless the solrconfig included a <lib> directive.
   (Thomas Mortagne)
 
+* SOLR-14934: Refactored duplicate "Solr Home" logic into a single place to eliminate risk of tests using divergent values
+  for a single solr node.  (hossman)
+
+* SOLR-12182: Don't persist base_url in ZK as the URL scheme is variable, compute from node_name instead when reading
+  state back from ZK. (Timothy Potter)
+
+
 Other Changes
 ---------------------
 
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java
index e921bcb..8501944 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRReRankingPipeline.java
@@ -17,6 +17,7 @@
 package org.apache.solr.ltr;
 
 import java.io.IOException;
+import java.nio.file.Paths;
 import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Collections;
@@ -57,7 +58,7 @@
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private static final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+  private static final SolrResourceLoader solrResourceLoader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
 
   private IndexSearcher getSearcher(IndexReader r) {
     // 'yes' to maybe wrapping in general
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java
index 973436f..71df1ed 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestLTRScoringQuery.java
@@ -17,6 +17,7 @@
 package org.apache.solr.ltr;
 
 import java.io.IOException;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashMap;
@@ -55,7 +56,7 @@
 
 public class TestLTRScoringQuery extends SolrTestCase {
 
-  public final static SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+  public final static SolrResourceLoader solrResourceLoader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
 
   private IndexSearcher getSearcher(IndexReader r) {
     final IndexSearcher searcher = newSearcher(r, false, false);
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
index 5dd7cf0..bf35d3d 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/TestRerankBase.java
@@ -21,6 +21,7 @@
 import java.lang.invoke.MethodHandles;
 import java.net.URL;
 import java.nio.file.Files;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -53,7 +54,7 @@
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  protected static final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+  protected static final SolrResourceLoader solrResourceLoader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
 
   protected static File tmpSolrHome;
   protected static File tmpConfDir;
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
index 7627ae9..52e39b5 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestMinMaxNormalizer.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.ltr.norm;
 
+import java.nio.file.Paths;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -28,7 +29,7 @@
 
 public class TestMinMaxNormalizer {
 
-  private final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+  private final SolrResourceLoader solrResourceLoader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
 
   private Normalizer implTestMinMax(Map<String,Object> params,
       float expectedMin, float expectedMax) {
@@ -123,7 +124,7 @@
 
     final Map<String,Object> params = n1.paramsToMap();
     final MinMaxNormalizer n2 = (MinMaxNormalizer) Normalizer.getInstance(
-        new SolrResourceLoader(),
+        new SolrResourceLoader(Paths.get("").toAbsolutePath()),
         MinMaxNormalizer.class.getName(),
         params);
     assertEquals(n1.getMin(), n2.getMin(), 1e-6);
diff --git a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
index 62e415f..bc94337 100644
--- a/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
+++ b/solr/contrib/ltr/src/test/org/apache/solr/ltr/norm/TestStandardNormalizer.java
@@ -16,6 +16,7 @@
  */
 package org.apache.solr.ltr.norm;
 
+import java.nio.file.Paths;
 import java.util.HashMap;
 import java.util.Map;
 
@@ -28,7 +29,7 @@
 
 public class TestStandardNormalizer {
 
-  private final SolrResourceLoader solrResourceLoader = new SolrResourceLoader();
+  private final SolrResourceLoader solrResourceLoader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
 
   private Normalizer implTestStandard(Map<String,Object> params,
       float expectedAvg, float expectedStd) {
@@ -129,7 +130,7 @@
 
     final Map<String, Object> params = n1.paramsToMap();
     final StandardNormalizer n2 = (StandardNormalizer) Normalizer.getInstance(
-        new SolrResourceLoader(),
+        new SolrResourceLoader(Paths.get("").toAbsolutePath()),
         StandardNormalizer.class.getName(),
         params);
     assertEquals(n1.getAvg(), n2.getAvg(), 1e-6);
diff --git a/solr/contrib/prometheus-exporter/CHANGES.md b/solr/contrib/prometheus-exporter/CHANGES.md
deleted file mode 100644
index 42deac6..0000000
--- a/solr/contrib/prometheus-exporter/CHANGES.md
+++ /dev/null
@@ -1,20 +0,0 @@
-This file lists release notes for this module.
-Prior to version 9, changes were in Solr's CHANGES.txt
-
-9.0.0
-======================
-
-Improvements
-----------------------
-* SOLR-14972: Change default port of prometheus exporter to 8989 
-  because it clashed with default embedded zookeeper port (janhoy)
-
-Other Changes
-----------------------
-* SOLR-14915: Reduced dependencies from Solr server down to just SolrJ.  Don't add WEB-INF/lib.
-  * Can run via gradle, "gradlew run"
-  * Has own log4j2.xml now
-  * Was missing some dependencies in lib/; now has all except SolrJ & logging.
-  (David Smiley, Houston Putman)
-
-* SOLR-14957: Add Prometheus Exporter to docker PATH. Fix classpath issues. (Houston Putman)
diff --git a/solr/contrib/prometheus-exporter/conf/solr-exporter-config.xml b/solr/contrib/prometheus-exporter/conf/solr-exporter-config.xml
index e20680c..bfcc54a 100644
--- a/solr/contrib/prometheus-exporter/conf/solr-exporter-config.xml
+++ b/solr/contrib/prometheus-exporter/conf/solr-exporter-config.xml
@@ -280,6 +280,40 @@
               value        : $value
             }
           </str>
+          <!--
+            overseer metrics
+          -->
+          <str>
+            .metrics | to_entries | .[] | select(.key | startswith("solr.overseer")) as $object |
+            $object.value as $value | $value | to_entries | .[]  |
+            select(.key | startswith("queue.") and endswith("collectionWorkQueueSize")) as $object |
+            $object.value as $value |
+            {
+            name         : "solr_metrics_overseer_collectionWorkQueueSize",
+            type         : "GAUGE",
+            help         : "See following URL: https://lucene.apache.org/solr/guide/metrics-reporting.html",
+            label_names  : [],
+            label_values : [],
+            value        : $value
+            }
+          </str>
+          <str>
+            .metrics | to_entries | .[] | select(.key | startswith("solr.overseer")) as $object |
+            $object.value as $value | $value | to_entries | .[]  |
+            select(.key | startswith("queue.") and endswith("stateUpdateQueueSize")) as $object |
+            $object.value as $value |
+            {
+            name         : "solr_metrics_overseer_stateUpdateQueueSize",
+            type         : "GAUGE",
+            help         : "See following URL: https://lucene.apache.org/solr/guide/metrics-reporting.html",
+            label_names  : [],
+            label_values : [],
+            value        : $value
+            }
+          </str>
+          <!--
+            node metrics
+          -->
           <str>
             .metrics["solr.node"] | to_entries | .[] | select(.key | endswith(".clientErrors")) as $object |
             $object.key | split(".")[0] as $category |
diff --git a/solr/core/src/java/org/apache/solr/cloud/Overseer.java b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
index fc0d0eb..5cd553c 100644
--- a/solr/core/src/java/org/apache/solr/cloud/Overseer.java
+++ b/solr/core/src/java/org/apache/solr/cloud/Overseer.java
@@ -66,9 +66,12 @@
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.CloudConfig;
 import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.handler.admin.CollectionsHandler;
 import org.apache.solr.handler.component.HttpShardHandler;
 import org.apache.solr.logging.MDCLoggingContext;
+import org.apache.solr.metrics.SolrMetricProducer;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.update.UpdateShardHandler;
 import org.apache.zookeeper.CreateMode;
 import org.apache.zookeeper.KeeperException;
@@ -143,6 +146,9 @@
   public static final int NUM_RESPONSES_TO_STORE = 10000;
   public static final String OVERSEER_ELECT = "/overseer_elect";
 
+  private SolrMetricsContext solrMetricsContext;
+  private volatile String metricTag = SolrMetricProducer.getUniqueMetricTag(this, null);
+
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   enum LeaderStatus {DONT_KNOW, NO, YES}
@@ -173,6 +179,8 @@
 
     private final Stats zkStats;
 
+    private SolrMetricsContext clusterStateUpdaterMetricContext;
+
     private boolean isClosed = false;
 
     public ClusterStateUpdater(final ZkStateReader reader, final String myId, Stats zkStats) {
@@ -185,6 +193,9 @@
       this.completedMap = getCompletedMap(zkClient);
       this.myId = myId;
       this.reader = reader;
+
+      clusterStateUpdaterMetricContext = solrMetricsContext.getChildContext(this);
+      clusterStateUpdaterMetricContext.gauge(() -> stateUpdateQueue.getZkStats().getQueueLength(), true, "stateUpdateQueueSize", "queue" );
     }
 
     public Stats getStateUpdateQueueStats() {
@@ -544,6 +555,7 @@
     @Override
       public void close() {
         this.isClosed = true;
+        clusterStateUpdaterMetricContext.unregister();
       }
 
   }
@@ -616,6 +628,8 @@
     this.zkController = zkController;
     this.stats = new Stats();
     this.config = config;
+
+    this.solrMetricsContext = new SolrMetricsContext(zkController.getCoreContainer().getMetricManager(), SolrInfoBean.Group.overseer.toString(), metricTag);
   }
 
   public synchronized void start(String id) {
@@ -636,7 +650,7 @@
     ThreadGroup ccTg = new ThreadGroup("Overseer collection creation process.");
 
     OverseerNodePrioritizer overseerPrioritizer = new OverseerNodePrioritizer(reader, getStateUpdateQueue(), adminPath, shardHandler.getShardHandlerFactory());
-    overseerCollectionConfigSetProcessor = new OverseerCollectionConfigSetProcessor(reader, id, shardHandler, adminPath, stats, Overseer.this, overseerPrioritizer);
+    overseerCollectionConfigSetProcessor = new OverseerCollectionConfigSetProcessor(reader, id, shardHandler, adminPath, stats, Overseer.this, overseerPrioritizer, solrMetricsContext);
     ccThread = new OverseerThread(ccTg, overseerCollectionConfigSetProcessor, "OverseerCollectionConfigSetProcessor-" + id);
     ccThread.setDaemon(true);
 
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionConfigSetProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionConfigSetProcessor.java
index 78ddc82..d3819d0 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionConfigSetProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionConfigSetProcessor.java
@@ -26,6 +26,7 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.handler.component.HttpShardHandler;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
+import org.apache.solr.metrics.SolrMetricsContext;
 
 /**
  * An {@link OverseerTaskProcessor} that handles:
@@ -37,7 +38,7 @@
    public OverseerCollectionConfigSetProcessor(ZkStateReader zkStateReader, String myId,
                                                final HttpShardHandler shardHandler,
                                                String adminPath, Stats stats, Overseer overseer,
-                                               OverseerNodePrioritizer overseerNodePrioritizer) {
+                                               OverseerNodePrioritizer overseerNodePrioritizer, SolrMetricsContext solrMetricsContext) {
     this(
         zkStateReader,
         myId,
@@ -49,7 +50,8 @@
         overseer.getCollectionQueue(zkStateReader.getZkClient(), stats),
         Overseer.getRunningMap(zkStateReader.getZkClient()),
         Overseer.getCompletedMap(zkStateReader.getZkClient()),
-        Overseer.getFailureMap(zkStateReader.getZkClient())
+        Overseer.getFailureMap(zkStateReader.getZkClient()),
+        solrMetricsContext
     );
   }
 
@@ -62,7 +64,7 @@
                                         OverseerTaskQueue workQueue,
                                         DistributedMap runningMap,
                                         DistributedMap completedMap,
-                                        DistributedMap failureMap) {
+                                        DistributedMap failureMap, SolrMetricsContext solrMetricsContext) {
     super(
         zkStateReader,
         myId,
@@ -73,7 +75,8 @@
         workQueue,
         runningMap,
         completedMap,
-        failureMap);
+        failureMap,
+        solrMetricsContext);
   }
 
   private static OverseerMessageHandlerSelector getOverseerMessageHandlerSelector(
diff --git a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
index 2464a46..e496d14 100644
--- a/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
+++ b/solr/core/src/java/org/apache/solr/cloud/OverseerTaskProcessor.java
@@ -46,6 +46,7 @@
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.logging.MDCLoggingContext;
 import org.apache.solr.common.util.SolrNamedThreadFactory;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.zookeeper.KeeperException;
 import org.apache.zookeeper.data.Stat;
 import org.slf4j.Logger;
@@ -94,6 +95,7 @@
   private boolean isClosed;
 
   private volatile Stats stats;
+  private SolrMetricsContext overseerTaskProcessorMetricsContext;
 
   /**
    * Set of tasks that have been picked up for processing but not cleaned up from zk work-queue.
@@ -134,13 +136,14 @@
   private String thisNode;
 
   public OverseerTaskProcessor(ZkStateReader zkStateReader, String myId,
-                                        Stats stats,
-                                        OverseerMessageHandlerSelector selector,
-                                        OverseerNodePrioritizer prioritizer,
-                                        OverseerTaskQueue workQueue,
-                                        DistributedMap runningMap,
-                                        DistributedMap completedMap,
-                                        DistributedMap failureMap) {
+                               Stats stats,
+                               OverseerMessageHandlerSelector selector,
+                               OverseerNodePrioritizer prioritizer,
+                               OverseerTaskQueue workQueue,
+                               DistributedMap runningMap,
+                               DistributedMap completedMap,
+                               DistributedMap failureMap,
+                               SolrMetricsContext solrMetricsContext) {
     this.zkStateReader = zkStateReader;
     this.myId = myId;
     this.stats = stats;
@@ -154,6 +157,9 @@
     this.runningTasks = ConcurrentHashMap.newKeySet();
     this.completedTasks = new ConcurrentHashMap<>();
     thisNode = Utils.getMDCNode();
+
+    overseerTaskProcessorMetricsContext = solrMetricsContext.getChildContext(this);
+    overseerTaskProcessorMetricsContext.gauge(() -> workQueue.getZkStats().getQueueLength(), true, "collectionWorkQueueSize", "queue");
   }
 
   @Override
@@ -386,6 +392,7 @@
 
   public void close() {
     isClosed = true;
+    overseerTaskProcessorMetricsContext.unregister();
     if (tpe != null) {
       if (!tpe.isShutdown()) {
         ExecutorUtil.shutdownAndAwaitTermination(tpe);
diff --git a/solr/core/src/java/org/apache/solr/core/NodeConfig.java b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
index 46f1807..bd2202f 100644
--- a/solr/core/src/java/org/apache/solr/core/NodeConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/NodeConfig.java
@@ -136,6 +136,8 @@
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
           "SolrCloud requires a value of at least 2 for coreLoadThreads (configured value = " + this.coreLoadThreads + ")");
     }
+    if (null == this.solrHome) throw new NullPointerException("solrHome");
+    if (null == this.loader) throw new NullPointerException("loader");
   }
 
   public String getNodeName() {
diff --git a/solr/core/src/java/org/apache/solr/core/SolrPaths.java b/solr/core/src/java/org/apache/solr/core/SolrPaths.java
index 9819798..b69ae5c 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrPaths.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrPaths.java
@@ -17,16 +17,11 @@
 
 package org.apache.solr.core;
 
-import javax.naming.Context;
-import javax.naming.InitialContext;
-import javax.naming.NamingException;
-import javax.naming.NoInitialContextException;
 import java.io.File;
 import java.lang.invoke.MethodHandles;
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.Set;
-import java.util.concurrent.ConcurrentSkipListSet;
 
 import org.apache.commons.exec.OS;
 import org.apache.solr.common.SolrException;
@@ -39,69 +34,15 @@
 public final class SolrPaths {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  private static final Set<String> loggedOnce = new ConcurrentSkipListSet<>();
-
   private SolrPaths() {} // don't create this
 
   /**
-   * Finds the solrhome based on looking up the value in one of three places:
-   * <ol>
-   * <li>JNDI: via java:comp/env/solr/home</li>
-   * <li>The system property solr.solr.home</li>
-   * <li>Look in the current working directory for a solr/ directory</li>
-   * </ol>
-   * <p>
-   *
-   * @return the Solr home, absolute and normalized.
-   */
-  public static Path locateSolrHome() {
-
-    String home = null;
-    // Try JNDI
-    try {
-      Context c = new InitialContext();
-      home = (String) c.lookup("java:comp/env/solr/home");
-      logOnceInfo("home_using_jndi", "Using JNDI solr.home: " + home);
-    } catch (NoInitialContextException e) {
-      log.debug("JNDI not configured for solr (NoInitialContextEx)");
-    } catch (NamingException e) {
-      log.debug("No /solr/home in JNDI");
-    } catch (RuntimeException ex) {
-      log.warn("Odd RuntimeException while testing for JNDI: ", ex);
-    }
-
-    // Now try system property
-    if (home == null) {
-      String prop = "solr.solr.home";
-      home = System.getProperty(prop);
-      if (home != null) {
-        logOnceInfo("home_using_sysprop", "Using system property " + prop + ": " + home);
-      }
-    }
-
-    // if all else fails, try
-    if (home == null) {
-      home = "solr/";
-      logOnceInfo("home_default", "solr home defaulted to '" + home + "' (could not find system property or JNDI)");
-    }
-    return Paths.get(home).toAbsolutePath().normalize();
-  }
-
-  /**
    * Ensures a directory name always ends with a '/'.
    */
   public static String normalizeDir(String path) {
     return (path != null && (!(path.endsWith("/") || path.endsWith("\\")))) ? path + File.separator : path;
   }
 
-  // Logs a message only once per startup
-  private static void logOnceInfo(String key, String msg) {
-    if (!loggedOnce.contains(key)) {
-      loggedOnce.add(key);
-      log.info(msg);
-    }
-  }
-
   /**
    * Checks that the given path is relative to one of the allowPaths supplied. Typically this will be
    * called from {@link CoreContainer#assertPathAllowed(Path)} and allowPaths pre-filled with the node's
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index 609da78..fa183c6 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -129,10 +129,6 @@
     return schemaLoader;
   }
 
-  public SolrResourceLoader() {
-    this(SolrPaths.locateSolrHome(), null);
-  }
-
   /**
    * Creates a loader.
    * Note: we do NOT call {@link #reloadLuceneSPI()}.
@@ -152,6 +148,10 @@
   }
 
 
+  /**
+   * Creates a loader.
+   * @param instanceDir - base directory for this resource loader, must not be null
+   */
   public SolrResourceLoader(Path instanceDir) {
     this(instanceDir, null);
   }
@@ -160,18 +160,14 @@
    * This loader will delegate to Solr's classloader when possible,
    * otherwise it will attempt to resolve resources using any jar files
    * found in the "lib/" directory in the specified instance directory.
-   *
-   * @param instanceDir - base directory for this resource loader, if null locateSolrHome() will be used.
-   * @see SolrPaths#locateSolrHome()
    */
   public SolrResourceLoader(Path instanceDir, ClassLoader parent) {
     if (instanceDir == null) {
-      this.instanceDir = SolrPaths.locateSolrHome();
-      log.debug("new SolrResourceLoader for deduced Solr Home: '{}'", this.instanceDir);
-    } else {
-      this.instanceDir = instanceDir;
-      log.debug("new SolrResourceLoader for directory: '{}'", this.instanceDir);
+      throw new NullPointerException("SolrResourceLoader instanceDir must be non-null");
     }
+    
+    this.instanceDir = instanceDir;
+    log.debug("new SolrResourceLoader for directory: '{}'", this.instanceDir);
 
     if (parent == null) {
       parent = getClass().getClassLoader();
diff --git a/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java b/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java
index cd0ca7e..6d8cb92 100644
--- a/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java
+++ b/solr/core/src/java/org/apache/solr/core/XmlConfigFile.java
@@ -110,10 +110,9 @@
    */
   public XmlConfigFile(SolrResourceLoader loader, String name, InputSource is, String prefix, Properties substituteProps) throws ParserConfigurationException, IOException, SAXException
   {
-    if( loader == null ) {
-      loader = new SolrResourceLoader(SolrPaths.locateSolrHome());
-    }
+    if (null == loader) throw new NullPointerException("loader");
     this.loader = loader;
+    
     this.substituteProperties = substituteProps;
     this.name = name;
     this.prefix = (prefix != null && !prefix.endsWith("/"))? prefix + '/' : prefix;
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
index 23d3a93..9d6e038 100755
--- a/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCoreParser.java
@@ -44,11 +44,13 @@
 
   protected final SolrQueryRequest req;
 
-  public SolrCoreParser(String defaultField, Analyzer analyzer,
-      SolrQueryRequest req) {
+  public SolrCoreParser(String defaultField, Analyzer analyzer, SolrQueryRequest req) {
     super(defaultField, analyzer);
     queryFactory.addBuilder("LegacyNumericRangeQuery", new LegacyNumericRangeQueryBuilder());
     this.req = req;
+    if (null == req) {
+      throw new NullPointerException("req must not be null");
+    }
   }
 
   @Override
@@ -57,12 +59,7 @@
     if (initArgs == null || initArgs.size() == 0) {
       return;
     }
-    final SolrResourceLoader loader;
-    if (req == null) {
-      loader = new SolrResourceLoader();
-    } else {
-      loader = req.getCore().getResourceLoader();
-    }
+    final SolrResourceLoader loader = req.getCore().getResourceLoader();
 
     final Iterable<Map.Entry<String,Object>> args = initArgs;
     for (final Map.Entry<String,Object> entry : args) {
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index e29b016..0c983f1 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -16,6 +16,10 @@
  */
 package org.apache.solr.servlet;
 
+import javax.naming.Context;
+import javax.naming.InitialContext;
+import javax.naming.NamingException;
+import javax.naming.NoInitialContextException;
 import javax.servlet.FilterChain;
 import javax.servlet.FilterConfig;
 import javax.servlet.ReadListener;
@@ -73,7 +77,6 @@
 import org.apache.solr.core.NodeConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrInfoBean;
-import org.apache.solr.core.SolrPaths;
 import org.apache.solr.core.SolrXmlConfig;
 import org.apache.solr.metrics.AltBufferPoolMetricSet;
 import org.apache.solr.metrics.MetricsMap;
@@ -184,10 +187,8 @@
           excludePatterns.add(Pattern.compile(element));
         }
       }
-      
-      String solrHome = (String) config.getServletContext().getAttribute(SOLRHOME_ATTRIBUTE);
-      final Path solrHomePath = solrHome == null ? SolrPaths.locateSolrHome() : Paths.get(solrHome);
-      coresInit = createCoreContainer(solrHomePath, extraProperties);
+
+      coresInit = createCoreContainer(computeSolrHome(config), extraProperties);
       this.httpClient = coresInit.getUpdateShardHandler().getDefaultHttpClient();
       setupJvmMetrics(coresInit);
       
@@ -291,6 +292,59 @@
   }
 
   /**
+   * Returns the effective Solr Home to use for this node, based on looking up the value in this order:
+   * <ol>
+   * <li>attribute in the FilterConfig</li>
+   * <li>JNDI: via java:comp/env/solr/home</li>
+   * <li>The system property solr.solr.home</li>
+   * <li>Look in the current working directory for a solr/ directory</li>
+   * </ol>
+   * <p>
+   *
+   * @return the Solr home, absolute and normalized.
+   * @see #SOLRHOME_ATTRIBUTE
+   */
+  private static Path computeSolrHome(FilterConfig config) {
+
+    // start with explicit check of servlet config...
+    String source = "servlet config: " + SOLRHOME_ATTRIBUTE;
+    String home = (String) config.getServletContext().getAttribute(SOLRHOME_ATTRIBUTE);
+
+    if (null == home) {
+      final String lookup = "java:comp/env/solr/home";
+      // Try JNDI
+      source = "JNDI: " + lookup;
+      try {
+        Context c = new InitialContext();
+        home = (String) c.lookup(lookup);
+      } catch (NoInitialContextException e) {
+        log.debug("JNDI not configured for solr (NoInitialContextEx)");
+      } catch (NamingException e) {
+        log.debug("No /solr/home in JNDI");
+      } catch (RuntimeException ex) {
+        log.warn("Odd RuntimeException while testing for JNDI: ", ex);
+      }
+    }
+
+    if (null == home) {
+      // Now try system property
+      final String prop = "solr.solr.home";
+      source = "system property: " + prop;
+      home = System.getProperty(prop);
+    }
+
+    if (null == home) {
+      // if all else fails, assume default dir
+      home = "solr/";
+      source = "defaulted to '" + home + "' ... could not find system property or JNDI";
+    }
+    final Path solrHome = Paths.get(home).toAbsolutePath().normalize();
+    log.info("Solr Home: {} (source: {})", solrHome, source);
+    
+    return solrHome;
+  }
+  
+  /**
    * Override this to change CoreContainer initialization
    * @return a CoreContainer to hold this server's cores
    */
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java
index ee5caeb..6127e59 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerCollectionConfigSetProcessorTest.java
@@ -64,6 +64,7 @@
 import org.apache.solr.handler.component.HttpShardHandler;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
 import org.apache.solr.handler.component.ShardRequest;
+import org.apache.solr.metrics.SolrMetricsContext;
 import org.apache.solr.update.UpdateShardHandler;
 import org.apache.solr.util.TimeOut;
 import org.apache.zookeeper.CreateMode;
@@ -124,7 +125,8 @@
   private static HttpClient httpClientMock;
   @SuppressWarnings("rawtypes")
   private static PlacementPluginFactory placementPluginFactoryMock;
-  
+  private static SolrMetricsContext solrMetricsContextMock;
+
   private static ObjectCache objectCache;
   private Map<String, byte[]> zkClientData = new HashMap<>();
   private final Map<String, ClusterState.CollectionRef> collectionsSet = new HashMap<>();
@@ -147,8 +149,9 @@
         OverseerTaskQueue workQueue, DistributedMap runningMap,
         Overseer overseer,
         DistributedMap completedMap,
-        DistributedMap failureMap) {
-      super(zkStateReader, myId, shardHandlerFactory, adminPath, new Stats(), overseer, new OverseerNodePrioritizer(zkStateReader, overseer.getStateUpdateQueue(), adminPath, shardHandlerFactory), workQueue, runningMap, completedMap, failureMap);
+        DistributedMap failureMap,
+        SolrMetricsContext solrMetricsContext) {
+      super(zkStateReader, myId, shardHandlerFactory, adminPath, new Stats(), overseer, new OverseerNodePrioritizer(zkStateReader, overseer.getStateUpdateQueue(), adminPath, shardHandlerFactory), workQueue, runningMap, completedMap, failureMap, solrMetricsContext);
     }
     
     @Override
@@ -184,6 +187,7 @@
     updateShardHandlerMock = mock(UpdateShardHandler.class);
     httpClientMock = mock(HttpClient.class);
     placementPluginFactoryMock = mock(PlacementPluginFactory.class);
+    solrMetricsContextMock = mock(SolrMetricsContext.class);
   }
   
   @AfterClass
@@ -209,6 +213,7 @@
     updateShardHandlerMock = null;
     httpClientMock = null;
     placementPluginFactoryMock = null;
+    solrMetricsContextMock = null;
   }
   
   @Before
@@ -239,6 +244,7 @@
     reset(updateShardHandlerMock);
     reset(httpClientMock);
     reset(placementPluginFactoryMock);
+    reset(solrMetricsContextMock);
 
     zkClientData.clear();
     collectionsSet.clear();
@@ -501,7 +507,9 @@
           }}).when(distribStateManagerMock).makePath(anyString());
 
     zkClientData.put("/configs/myconfig", new byte[1]);
-    
+
+    when(solrMetricsContextMock.getChildContext(any(Object.class))).thenReturn(solrMetricsContextMock);
+
     return liveNodes;
   }
 
@@ -736,7 +744,7 @@
 
     underTest = new OverseerCollectionConfigSetProcessorToBeTested(zkStateReaderMock,
         "1234", shardHandlerFactoryMock, ADMIN_PATH, workQueueMock, runningMapMock,
-        overseerMock, completedMapMock, failureMapMock);
+        overseerMock, completedMapMock, failureMapMock, solrMetricsContextMock);
 
 
     if (log.isInfoEnabled()) {
diff --git a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
index 4a5346c..91d0f95 100644
--- a/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/OverseerTest.java
@@ -73,6 +73,7 @@
 import org.apache.solr.core.SolrResourceLoader;
 import org.apache.solr.handler.component.HttpShardHandler;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.update.UpdateShardHandler;
 import org.apache.solr.update.UpdateShardHandlerConfig;
 import org.apache.solr.util.TimeOut;
@@ -1410,8 +1411,10 @@
 
     CoreContainer mockAlwaysUpCoreContainer = mock(CoreContainer.class,
         Mockito.withSettings().defaultAnswer(Mockito.CALLS_REAL_METHODS));
+    SolrMetricManager mockMetricManager = mock(SolrMetricManager.class);
+    when(mockAlwaysUpCoreContainer.getMetricManager()).thenReturn(mockMetricManager);
     when(mockAlwaysUpCoreContainer.isShutDown()).thenReturn(testDone);  // Allow retry on session expiry
-    when(mockAlwaysUpCoreContainer.getResourceLoader()).thenReturn(new SolrResourceLoader());
+    when(mockAlwaysUpCoreContainer.getResourceLoader()).thenReturn(new SolrResourceLoader(createTempDir()));
     ClusterSingletons singletons = new ClusterSingletons(() -> true, r -> r.run());
     // don't wait for all singletons
     singletons.setReady();
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
index 4e5c39e..2ff0976 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestPullReplicaErrorHandling.java
@@ -111,6 +111,7 @@
     collectionName = suggestedCollectionName();
     expectThrows(SolrException.class, () -> getCollectionState(collectionName));
     cluster.getSolrClient().setDefaultCollection(collectionName);
+    cluster.waitForAllNodes(30);
   }
 
   @Override
@@ -236,8 +237,9 @@
     JettySolrRunner jetty = getJettyForReplica(s.getReplicas(EnumSet.of(Replica.Type.PULL)).get(0));
     SolrCore core = jetty.getCoreContainer().getCores().iterator().next();
 
-    for (int i = 0; i < 5; i++) {
+    for (int i = 0; i < (TEST_NIGHTLY ? 5 : 2); i++) {
       cluster.expireZkSession(jetty);
+      waitForState("Expecting node to be disconnected", collectionName, activeReplicaCount(1, 0, 0));
       waitForState("Expecting node to reconnect", collectionName, activeReplicaCount(1, 0, 1));
       // We have two active ReplicationHandler with two close hooks each, one for triggering recovery and one for doing interval polling
       assertEquals(5, core.getCloseHooks().size());
diff --git a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
index 603c414..f92039a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/ZkControllerTest.java
@@ -37,6 +37,7 @@
 import org.apache.solr.core.*;
 import org.apache.solr.handler.admin.CoreAdminHandler;
 import org.apache.solr.handler.component.HttpShardHandlerFactory;
+import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.update.UpdateShardHandler;
 import org.apache.solr.update.UpdateShardHandlerConfig;
 import org.apache.solr.util.LogLevel;
@@ -48,6 +49,7 @@
 import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICA;
+import static org.mockito.Mockito.mock;
 
 @Slow
 @SolrTestCaseJ4.SuppressSSL
@@ -333,6 +335,7 @@
 
   private static class MockCoreContainer extends CoreContainer {
     UpdateShardHandler updateShardHandler = new UpdateShardHandler(UpdateShardHandlerConfig.DEFAULT);
+    SolrMetricManager metricManager;
 
     public MockCoreContainer() {
       super(SolrXmlConfig.fromString(TEST_PATH(), "<solr/>"));
@@ -340,6 +343,7 @@
       httpShardHandlerFactory.init(new PluginInfo("shardHandlerFactory", Collections.emptyMap()));
       this.shardHandlerFactory = httpShardHandlerFactory;
       this.coreAdminHandler = new CoreAdminHandler();
+      this.metricManager = mock(SolrMetricManager.class);
     }
 
     @Override
@@ -356,5 +360,10 @@
       updateShardHandler.close();
       super.shutdown();
     }
-  }    
+
+    @Override
+    public SolrMetricManager getMetricManager() {
+      return metricManager;
+    }
+  }
 }
diff --git a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java
index 2169cc4..366e499 100644
--- a/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java
+++ b/solr/core/src/test/org/apache/solr/core/ResourceLoaderTest.java
@@ -50,8 +50,9 @@
 public class ResourceLoaderTest extends SolrTestCaseJ4 {
 
   public void testInstanceDir() throws Exception {
-    try (SolrResourceLoader loader = new SolrResourceLoader()) {
-      assertThat(loader.getInstancePath(), is(Paths.get("solr").toAbsolutePath()));
+    final Path dir = createTempDir();
+    try (SolrResourceLoader loader = new SolrResourceLoader(dir.toAbsolutePath())) {
+      assertThat(loader.getInstancePath(), is(dir.toAbsolutePath()));
     }
   }
 
@@ -213,7 +214,7 @@
   public void testCacheWrongType() throws Exception {
     clearCache();
 
-    SolrResourceLoader loader = new SolrResourceLoader();
+    SolrResourceLoader loader = new SolrResourceLoader(TEST_PATH().resolve("collection1"));
     @SuppressWarnings({"rawtypes"})
     Class[] params = { Map.class };
     Map<String,String> args = Map.of("minGramSize", "1", "maxGramSize", "2");
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
index d05a917..9d0b15a 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreContainer.java
@@ -85,6 +85,28 @@
     return ret;
   }
 
+  public void testSolrHomeAndResourceLoader() throws Exception {
+    // regardless of what sys prop may be set, the CoreContainer's init arg should be the definitive
+    // solr home -- and nothing i nthe call stack should be "setting" the sys prop to make that work...
+    final Path fakeSolrHome = createTempDir().toAbsolutePath();
+    System.setProperty(SOLR_HOME_PROP, fakeSolrHome.toString());
+    final Path realSolrHome = createTempDir().toAbsolutePath();
+    final CoreContainer cc = init(realSolrHome, CONFIGSETS_SOLR_XML);
+    try {
+
+      // instance dir & resource loader for the CC
+      assertEquals(realSolrHome.toString(), cc.getSolrHome());
+      assertEquals(realSolrHome, cc.getResourceLoader().getInstancePath());
+
+    } finally {
+      cc.shutdown();
+    }
+    assertEquals("Nothing in solr should be overriding the solr home sys prop in order to work!",
+                 fakeSolrHome.toString(),
+                 System.getProperty(SOLR_HOME_PROP));
+  }
+
+  
   @Test
   public void testShareSchema() throws Exception {
     System.setProperty("shareSchema", "true");
diff --git a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
index 5c5ba8f..65d1f41 100644
--- a/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
+++ b/solr/core/src/test/org/apache/solr/core/TestCoreDiscovery.java
@@ -55,7 +55,6 @@
   private final Path solrHomeDirectory = createTempDir();
 
   private void setMeUp(String alternateCoreDir) throws Exception {
-    System.setProperty("solr.solr.home", solrHomeDirectory.toAbsolutePath().toString());
     String xmlStr = SOLR_XML;
     if (alternateCoreDir != null) {
       xmlStr = xmlStr.replace("<solr>", "<solr> <str name=\"coreRootDirectory\">" + alternateCoreDir + "</str> ");
@@ -114,7 +113,7 @@
   }
 
   private CoreContainer init() throws Exception {
-    final CoreContainer container = new CoreContainer(SolrPaths.locateSolrHome(), new Properties());
+    final CoreContainer container = new CoreContainer(solrHomeDirectory, new Properties());
     try {
       container.load();
     } catch (Exception e) {
diff --git a/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java b/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
index 16be62c..699c4df 100644
--- a/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/index/WrapperMergePolicyFactoryTest.java
@@ -27,7 +27,7 @@
 /** Unit tests for {@link WrapperMergePolicyFactory}. */
 public class WrapperMergePolicyFactoryTest extends SolrTestCaseJ4 {
 
-  private final SolrResourceLoader resourceLoader = new SolrResourceLoader();
+  private final SolrResourceLoader resourceLoader = new SolrResourceLoader(createTempDir());
 
   public void testReturnsDefaultMergePolicyIfNoneSpecified() {
     final MergePolicyFactoryArgs args = new MergePolicyFactoryArgs();
diff --git a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
index 1a2c79c..91ec618 100644
--- a/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
+++ b/solr/core/src/test/org/apache/solr/metrics/SolrMetricManagerTest.java
@@ -177,59 +177,61 @@
   @Test
   public void testReporters() throws Exception {
 
-    SolrResourceLoader loader = new SolrResourceLoader();
-    SolrMetricManager metricManager = new SolrMetricManager();
+    try (SolrResourceLoader loader = new SolrResourceLoader(createTempDir())) {
+      SolrMetricManager metricManager = new SolrMetricManager();
 
-    PluginInfo[] plugins = new PluginInfo[] {
+      PluginInfo[] plugins = new PluginInfo[] {
         createPluginInfo("universal_foo", null, null),
         createPluginInfo("multigroup_foo", "jvm, node, core", null),
         createPluginInfo("multiregistry_foo", null, "solr.node, solr.core.collection1"),
         createPluginInfo("specific_foo", null, "solr.core.collection1"),
         createPluginInfo("node_foo", "node", null),
         createPluginInfo("core_foo", "core", null)
-    };
-    String tag = "xyz";
-    metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.node);
-    Map<String, SolrMetricReporter> reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
-    assertEquals(4, reporters.size());
-    assertTrue(reporters.containsKey("universal_foo@" + tag));
-    assertTrue(reporters.containsKey("multigroup_foo@" + tag));
-    assertTrue(reporters.containsKey("node_foo@" + tag));
-    assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
-
-    metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.core, "collection1");
-    reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1"));
-    assertEquals(5, reporters.size());
-    assertTrue(reporters.containsKey("universal_foo@" + tag));
-    assertTrue(reporters.containsKey("multigroup_foo@" + tag));
-    assertTrue(reporters.containsKey("specific_foo@" + tag));
-    assertTrue(reporters.containsKey("core_foo@" + tag));
-    assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
-
-    metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.jvm);
-    reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
-    assertEquals(2, reporters.size());
-    assertTrue(reporters.containsKey("universal_foo@" + tag));
-    assertTrue(reporters.containsKey("multigroup_foo@" + tag));
-
-    metricManager.removeRegistry("solr.jvm");
-    reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
-    assertEquals(0, reporters.size());
-
-    metricManager.removeRegistry("solr.node");
-    reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
-    assertEquals(0, reporters.size());
-
-    metricManager.removeRegistry("solr.core.collection1");
-    reporters = metricManager.getReporters(
-        SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1"));
-    assertEquals(0, reporters.size());
-
+      };
+      String tag = "xyz";
+      metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.node);
+      Map<String, SolrMetricReporter> reporters = metricManager.getReporters
+        (SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
+          
+      assertEquals(4, reporters.size());
+      assertTrue(reporters.containsKey("universal_foo@" + tag));
+      assertTrue(reporters.containsKey("multigroup_foo@" + tag));
+      assertTrue(reporters.containsKey("node_foo@" + tag));
+      assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
+      
+      metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.core, "collection1");
+      reporters = metricManager.getReporters
+        (SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1"));
+          
+      assertEquals(5, reporters.size());
+      assertTrue(reporters.containsKey("universal_foo@" + tag));
+      assertTrue(reporters.containsKey("multigroup_foo@" + tag));
+      assertTrue(reporters.containsKey("specific_foo@" + tag));
+      assertTrue(reporters.containsKey("core_foo@" + tag));
+      assertTrue(reporters.containsKey("multiregistry_foo@" + tag));
+      
+      metricManager.loadReporters(plugins, loader, null, null, tag, SolrInfoBean.Group.jvm);
+      reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
+                                             
+      assertEquals(2, reporters.size());
+      assertTrue(reporters.containsKey("universal_foo@" + tag));
+      assertTrue(reporters.containsKey("multigroup_foo@" + tag));
+      
+      metricManager.removeRegistry("solr.jvm");
+      reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.jvm));
+                                             
+      assertEquals(0, reporters.size());
+      
+      metricManager.removeRegistry("solr.node");
+      reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.node));
+                                             
+      assertEquals(0, reporters.size());
+      
+      metricManager.removeRegistry("solr.core.collection1");
+      reporters = metricManager.getReporters(SolrMetricManager.getRegistryName(SolrInfoBean.Group.core, "collection1"));
+                                             
+      assertEquals(0, reporters.size());
+    }
   }
 
   @Test
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java b/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java
index 7a0e519..d82abdb 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrCoreParser.java
@@ -37,20 +37,24 @@
 import org.apache.lucene.search.spans.SpanOrQuery;
 import org.apache.lucene.search.spans.SpanQuery;
 import org.apache.lucene.search.spans.SpanTermQuery;
-import org.apache.solr.SolrTestCase;
+import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.request.SolrQueryRequest;
+import org.junit.BeforeClass;
 
-public class TestSolrCoreParser extends SolrTestCase {
+public class TestSolrCoreParser extends SolrTestCaseJ4 {
 
+  @BeforeClass
+  public static void init() throws Exception {
+    initCore("solrconfig.xml","schema.xml");
+  }
+  
   private SolrCoreParser solrCoreParser;
 
   private CoreParser solrCoreParser() {
     if (solrCoreParser == null) {
       final String defaultField = "contents";
       final Analyzer analyzer = new MockAnalyzer(random(), MockTokenizer.WHITESPACE, true, MockTokenFilter.ENGLISH_STOPSET);
-      final SolrQueryRequest req = null;
-      solrCoreParser = new SolrCoreParser(defaultField, analyzer, req);
+      solrCoreParser = new SolrCoreParser(defaultField, analyzer, req());
       {
         final NamedList<String> args = new NamedList<>();
         args.add("HelloQuery", HelloQueryBuilder.class.getCanonicalName());
diff --git a/solr/core/src/test/org/apache/solr/search/TestXmlQParser.java b/solr/core/src/test/org/apache/solr/search/TestXmlQParser.java
index 76ed752..79207f0 100644
--- a/solr/core/src/test/org/apache/solr/search/TestXmlQParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestXmlQParser.java
@@ -21,18 +21,32 @@
 import org.apache.lucene.queryparser.xml.CoreParser;
 
 import org.apache.lucene.queryparser.xml.TestCoreParser;
+
 import org.apache.solr.util.StartupLoggingUtils;
+import org.apache.solr.util.TestHarness;
+
 import org.junit.AfterClass;
+import org.junit.BeforeClass;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+
 public class TestXmlQParser extends TestCoreParser {
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private CoreParser solrCoreParser;
-
+  private static TestHarness harness;
+    
+  @BeforeClass
+  public static void init() throws Exception {
+    // we just need to stub this out so we can construct a SolrCoreParser
+    harness = new TestHarness(TestHarness.buildTestNodeConfig(createTempDir()));
+  }
+  
   @AfterClass
   public static void shutdownLogger() throws Exception {
+    harness.close();
+    harness = null;
     StartupLoggingUtils.shutdown();
   }
 
@@ -42,7 +56,7 @@
       solrCoreParser = new SolrCoreParser(
           super.defaultField(),
           super.analyzer(),
-          null);
+          harness.getRequestFactory("/select", 0, 0).makeRequest());
     }
     return solrCoreParser;
   }
diff --git a/solr/core/src/test/org/apache/solr/security/MultiDestinationAuditLoggerTest.java b/solr/core/src/test/org/apache/solr/security/MultiDestinationAuditLoggerTest.java
index 6b3a51f..d20b6dd 100644
--- a/solr/core/src/test/org/apache/solr/security/MultiDestinationAuditLoggerTest.java
+++ b/solr/core/src/test/org/apache/solr/security/MultiDestinationAuditLoggerTest.java
@@ -17,6 +17,7 @@
 package org.apache.solr.security;
 
 import java.io.IOException;
+import java.nio.file.Paths;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
@@ -48,7 +49,7 @@
     plugins.add(conf2);
     config.put("plugins", plugins);
 
-    SolrResourceLoader loader = new SolrResourceLoader();
+    SolrResourceLoader loader = new SolrResourceLoader(Paths.get(""));
     al.inform(loader);
     al.init(config);
 
@@ -79,4 +80,4 @@
     assertEquals(1, config.size());
     al.close();
   }
-}
\ No newline at end of file
+}
diff --git a/solr/docker/CHANGES.md b/solr/docker/CHANGES.md
deleted file mode 100644
index 04a767c..0000000
--- a/solr/docker/CHANGES.md
+++ /dev/null
@@ -1,28 +0,0 @@
-This file lists release notes for this module.
-Prior to version 9, the module existed in another repository and changes were not tracked in this manner.
-You could browse the commit history here instead:
-https://github.com/docker-solr/docker-solr
- 
-9.0.0
-==================
-
-Improvements
-----------------------
-* SOLR-14001: Removed /var/solr initialization from the Dockerfile; depend on init_var_solr.sh instead.
-  This leads to more consistent behavior no matter how /var/solr is mounted.
-  * init_var_solr.sh is now invoked by docker-entrypoint.sh; not in a bunch of other places.
-  * as before, you can set NO_INIT_VAR_SOLR=1 to short-circuit this.
-  * init_var_solr.sh no longer echo's anything.  For verbosity, set VERBOSE=yes.
-  (David Smiley)
-
-* SOLR-14957: Add Prometheus Exporter to docker PATH. Fix classpath issues.
-  (Houston Putman)
-  
-* SOLR-14949: Ability to customize the FROM image when building.
-  (Houston Putman)
-
-Other Changes
-------------------
-* SOLR-14789: Migrate docker image creation from docker-solr repo to solr/docker. 
-  (Houston Putman, Martijn Koster, Tim Potter, David Smiley, janhoy, Mike Drob)
-
diff --git a/solr/solr-ref-guide/src/metrics-reporting.adoc b/solr/solr-ref-guide/src/metrics-reporting.adoc
index 9624165..ccc033f 100644
--- a/solr/solr-ref-guide/src/metrics-reporting.adoc
+++ b/solr/solr-ref-guide/src/metrics-reporting.adoc
@@ -57,6 +57,12 @@
 * System properties such as Java information, various installation directory paths, ports, and similar information. You can control what appears here by modifying `solr.xml`.
 // TODO for 7.0 fix this
 
+=== Overseer Registry
+
+This registry is returned at `solr.overseer` when run in SolrCloud mode and includes the following information. When making requests with the <<Metrics API>>, you can specify `&group=overseer` to limit to only these metrics.
+
+* size of the Overseer queues (collection work queue and cluster state update queue)
+
 === Node / CoreContainer Registry
 
 This registry is returned at `solr.node` and includes the following information. When making requests with the <<Metrics API>>, you can specify `&group=node` to limit to only these metrics.
diff --git a/solr/solr-ref-guide/src/solr-upgrade-notes.adoc b/solr/solr-ref-guide/src/solr-upgrade-notes.adoc
index a658af6..751f87c 100644
--- a/solr/solr-ref-guide/src/solr-upgrade-notes.adoc
+++ b/solr/solr-ref-guide/src/solr-upgrade-notes.adoc
@@ -24,7 +24,7 @@
 implementations. It is not a comprehensive list of all changes to Solr in any release.
 
 When planning your Solr upgrade, consider the customizations you have made to
-your system and review the {solr-javadocs}/changes//Changes.html[`CHANGES.txt`]
+your system and review the {solr-javadocs}/changes/Changes.html[`CHANGES.txt`]
 file found in your Solr package. That file includes all the changes and updates
 that may effect your existing implementation.
 
@@ -38,12 +38,20 @@
 
 If you are upgrading from 7.x, see the section <<Upgrading from 7.x Releases>> below.
 
+=== Solr 8.8
+
+*Removed Contribs*
+
+* The search results clustering contrib has been removed from 8.x Solr line due to lack
+  of Java 1.8 compatibility in the dependency providing on-line clustering of search results.
+  See SOLR-14981 for more details.
+
 === Solr 8.7
 
 See the https://cwiki.apache.org/confluence/display/SOLR/ReleaseNote87[8.7 Release Notes^]
 for an overview of the main new features of Solr 8.7.
 
-When upgrading to 8.6.x users should be aware of the following major changes from 8.6.
+When upgrading to 8.7.x users should be aware of the following major changes from 8.6.
 
 *Autoscaling*
 
@@ -128,9 +136,8 @@
 +
 Replace `localhost:8983` with your Solr endpoint.
 +
-```
+[source,text]
 curl -X POST -H 'Content-type:application/json'  -d '{set-cluster-policy : [], set-cluster-preferences : []}' http://localhost:8983/api/cluster/autoscaling
-```
 +
 This information is only relevant for users upgrading from 8.6.0. If upgrading from an earlier version to 8.6.1+, this warning can be ignored.
 
@@ -160,7 +167,9 @@
 
 *Autoscaling*
 
-* Solr now includes a default autoscaling policy.
+* **NOTE: The default autoscaling policy has been removed as of 8.6.1**
++
+Solr now includes a default autoscaling policy.
 This policy can be overridden with your custom rules or by specifying an empty policy to replace the default.
 
 * The ComputePlan action now supports a collection selector to identify collections based on collection properties to determine which collections should be operated on.
@@ -343,6 +352,7 @@
 be enabled with a system parameter passed at start up before it can be used.
 For details, please see the section <<package-manager.adoc#package-manager,Package Management>>.
 
+With this feature Solr's Blob Store functionality is now deprecated and will likely be removed in 9.0.
 
 *Security*
 
@@ -573,7 +583,7 @@
 
 == Upgrading from Pre-7.x Versions
 
-Users upgrading from versions of Solr prior to 7.x are strongly encouraged to consult {solr-javadocs}/changes//Changes.html[`CHANGES.txt`] for the details of _all_ changes since the version they are upgrading from.
+Users upgrading from versions of Solr prior to 7.x are strongly encouraged to consult {solr-javadocs}/changes/Changes.html[`CHANGES.txt`] for the details of _all_ changes since the version they are upgrading from.
 
 The upgrade from Solr 6.x to Solr 7.0 introduced several *major* changes that you should be aware of before upgrading. Please do a thorough review of the section <<major-changes-in-solr-7.adoc#major-changes-in-solr-7,Major Changes in Solr 7>> before starting your upgrade.
 
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java
index 174b24a..7cfaaff 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/QueryResponseTest.java
@@ -21,6 +21,7 @@
 import java.io.InputStreamReader;
 import java.io.Reader;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Paths;
 import java.time.Instant;
 import java.util.Date;
 import java.util.List;
@@ -49,7 +50,7 @@
   public void testRangeFacets() throws Exception {
     XMLResponseParser parser = new XMLResponseParser();
     NamedList<Object> response = null;
-    try (SolrResourceLoader loader = new SolrResourceLoader();
+    try (SolrResourceLoader loader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
          InputStream is = loader.openResource("solrj/sampleRangeFacetResponse.xml")) {
       assertNotNull(is);
 
@@ -111,7 +112,7 @@
   public void testGroupResponse() throws Exception {
     XMLResponseParser parser = new XMLResponseParser();
     NamedList<Object> response = null;
-    try (SolrResourceLoader loader = new SolrResourceLoader();
+    try (SolrResourceLoader loader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
          InputStream is = loader.openResource("solrj/sampleGroupResponse.xml")) {
       assertNotNull(is);
       try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
@@ -218,7 +219,7 @@
     XMLResponseParser parser = new XMLResponseParser();
     NamedList<Object> response = null;
 
-    try (SolrResourceLoader loader = new SolrResourceLoader();
+    try (SolrResourceLoader loader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
          InputStream is = loader.openResource("solrj/sampleSimpleGroupResponse.xml")) {
       assertNotNull(is);
       try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
@@ -262,7 +263,7 @@
   // commented out on: 24-Dec-2018   @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Sep-2018
   public void testIntervalFacetsResponse() throws Exception {
     XMLResponseParser parser = new XMLResponseParser();
-    try(SolrResourceLoader loader = new SolrResourceLoader()) {
+    try(SolrResourceLoader loader = new SolrResourceLoader(Paths.get("").toAbsolutePath())) {
       InputStream is = loader.openResource("solrj/sampleIntervalFacetsResponse.xml");
       assertNotNull(is);
       Reader in = new InputStreamReader(is, StandardCharsets.UTF_8);
@@ -308,7 +309,7 @@
     XMLResponseParser parser = new XMLResponseParser();
     NamedList<Object> response;
 
-    try (SolrResourceLoader loader = new SolrResourceLoader();
+    try (SolrResourceLoader loader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
          InputStream is = loader.openResource("solrj/sampleDebugResponse.xml")) {
           assertNotNull(is);
       try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java
index 5dd3ab3..1c52035 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/response/TestClusteringResponse.java
@@ -19,6 +19,7 @@
 import java.io.InputStreamReader;
 import java.io.Reader;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Paths;
 import java.util.Arrays;
 import java.util.List;
 
@@ -40,7 +41,7 @@
     NamedList<Object> response = null;
 
     /*Load a simple XML with the clustering response encoded in an XML format*/
-    try (SolrResourceLoader loader = new SolrResourceLoader();
+    try (SolrResourceLoader loader = new SolrResourceLoader(Paths.get("").toAbsolutePath());
          InputStream is = loader.openResource("solrj/sampleClusteringResponse.xml")) {
       assertNotNull(is);
       try (Reader in = new InputStreamReader(is, StandardCharsets.UTF_8)) {
diff --git a/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java b/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java
index 1a72981..6b27e5e 100644
--- a/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java
+++ b/solr/solrj/src/test/org/apache/solr/common/util/ContentStreamTest.java
@@ -25,6 +25,7 @@
 import java.io.Reader;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Paths;
 import java.util.zip.GZIPInputStream;
 import java.util.zip.GZIPOutputStream;
 
@@ -47,7 +48,8 @@
 
   public void testFileStream() throws IOException {
     File file = new File(createTempDir().toFile(), "README");
-    try (SolrResourceLoader srl = new SolrResourceLoader(); InputStream is = srl.openResource("solrj/README");
+    try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath());
+         InputStream is = srl.openResource("solrj/README");
          FileOutputStream os = new FileOutputStream(file)) {
       assertNotNull(is);
       IOUtils.copy(is, os);
@@ -70,7 +72,8 @@
   public void testFileStreamGZIP() throws IOException {
     File file = new File(createTempDir().toFile(), "README.gz");
 
-    try (SolrResourceLoader srl = new SolrResourceLoader(); InputStream is = srl.openResource("solrj/README");
+    try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath());
+         InputStream is = srl.openResource("solrj/README");
          FileOutputStream os = new FileOutputStream(file);
          GZIPOutputStream zos = new GZIPOutputStream(os)) {
       IOUtils.copy(is, zos);
@@ -95,7 +98,8 @@
   public void testURLStream() throws IOException {
     File file = new File(createTempDir().toFile(), "README");
 
-    try (SolrResourceLoader srl = new SolrResourceLoader(); InputStream is = srl.openResource("solrj/README");
+    try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath());
+         InputStream is = srl.openResource("solrj/README");
          FileOutputStream os = new FileOutputStream(file)) {
       IOUtils.copy(is, os);
     }
@@ -124,7 +128,8 @@
   public void testURLStreamGZIP() throws IOException {
     File file = new File(createTempDir().toFile(), "README.gz");
 
-    try (SolrResourceLoader srl = new SolrResourceLoader(); InputStream is = srl.openResource("solrj/README");
+    try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath());
+         InputStream is = srl.openResource("solrj/README");
          FileOutputStream os = new FileOutputStream(file);
          GZIPOutputStream zos = new GZIPOutputStream(os)) {
       IOUtils.copy(is, zos);
@@ -149,7 +154,8 @@
   public void testURLStreamCSVGZIPExtention() throws IOException {
     File file = new File(createTempDir().toFile(), "README.CSV.gz");
 
-    try (SolrResourceLoader srl = new SolrResourceLoader(); InputStream is = srl.openResource("solrj/README");
+    try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath());
+         InputStream is = srl.openResource("solrj/README");
          FileOutputStream os = new FileOutputStream(file);
          GZIPOutputStream zos = new GZIPOutputStream(os)) {
       IOUtils.copy(is, zos);
@@ -174,7 +180,8 @@
   public void testURLStreamJSONGZIPExtention() throws IOException {
     File file = new File(createTempDir().toFile(), "README.json.gzip");
 
-    try (SolrResourceLoader srl = new SolrResourceLoader(); InputStream is = srl.openResource("solrj/README");
+    try (SolrResourceLoader srl = new SolrResourceLoader(Paths.get("").toAbsolutePath());
+         InputStream is = srl.openResource("solrj/README");
          FileOutputStream os = new FileOutputStream(file);
          GZIPOutputStream zos = new GZIPOutputStream(os)) {
       IOUtils.copy(is, zos);
diff --git a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
index 61af73b..7aa14f4 100644
--- a/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
+++ b/solr/test-framework/src/java/org/apache/solr/SolrTestCaseJ4.java
@@ -818,6 +818,7 @@
 
   public static CoreContainer createCoreContainer(Path solrHome, String solrXML) {
     testSolrHome = requireNonNull(solrHome);
+    System.setProperty("solr.solr.home", solrHome.toAbsolutePath().toString());
     h = new TestHarness(solrHome, solrXML);
     lrf = h.getRequestFactory("", 0, 20, CommonParams.VERSION, "2.2");
     return h.getCoreContainer();
@@ -840,6 +841,7 @@
 
   public static CoreContainer createDefaultCoreContainer(Path solrHome) {
     testSolrHome = requireNonNull(solrHome);
+    System.setProperty("solr.solr.home", solrHome.toAbsolutePath().toString());
     h = new TestHarness("collection1", initAndGetDataDir().getAbsolutePath(), "solrconfig.xml", "schema.xml");
     lrf = h.getRequestFactory("", 0, 20, CommonParams.VERSION, "2.2");
     return h.getCoreContainer();
diff --git a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
index a2f8177..2f18f26 100644
--- a/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
+++ b/solr/test-framework/src/java/org/apache/solr/util/TestHarness.java
@@ -21,6 +21,7 @@
 import java.io.StringWriter;
 import java.nio.charset.StandardCharsets;
 import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
@@ -43,7 +44,6 @@
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
-import org.apache.solr.core.SolrPaths;
 import org.apache.solr.core.SolrXmlConfig;
 import org.apache.solr.handler.UpdateRequestHandler;
 import org.apache.solr.metrics.reporters.SolrJmxReporter;
@@ -133,6 +133,19 @@
       this(SolrTestCaseJ4.DEFAULT_TEST_CORENAME, dataDirectory, solrConfig, indexSchema);
   }
 
+  /** 
+   * Helper method to let us do some home sys prop check in delegated construtor.
+   * in "real" code SolrDispatchFilter takes care of checking this sys prop when building NodeConfig/CoreContainer
+   */
+  private static Path checkAndReturnSolrHomeSysProp() {
+    final String SOLR_HOME = "solr.solr.home";
+    final String home = System.getProperty(SOLR_HOME);
+    if (null == home) {
+      throw new IllegalStateException("This TestHarness constructor requires " + SOLR_HOME + " sys prop to be set by test first");
+    }
+    return Paths.get(home).toAbsolutePath().normalize();
+  }
+  
   /**
    * @param coreName to initialize
    * @param dataDir path for index data, will not be cleaned up
@@ -140,7 +153,7 @@
    * @param indexSchema schema resource name
    */
   public TestHarness(String coreName, String dataDir, String solrConfig, String indexSchema) {
-    this(buildTestNodeConfig(SolrPaths.locateSolrHome()),
+    this(buildTestNodeConfig(checkAndReturnSolrHomeSysProp()),
         new TestCoresLocator(coreName, dataDir, solrConfig, indexSchema));
     this.coreName = (coreName == null) ? SolrTestCaseJ4.DEFAULT_TEST_CORENAME : coreName;
   }
diff --git a/solr/test-framework/src/test/org/apache/solr/cloud/MiniSolrCloudClusterTest.java b/solr/test-framework/src/test/org/apache/solr/cloud/MiniSolrCloudClusterTest.java
index 37a2bfb..fcba6b4 100644
--- a/solr/test-framework/src/test/org/apache/solr/cloud/MiniSolrCloudClusterTest.java
+++ b/solr/test-framework/src/test/org/apache/solr/cloud/MiniSolrCloudClusterTest.java
@@ -19,6 +19,7 @@
 
 import java.io.IOException;
 import java.nio.file.Path;
+import java.nio.file.Paths;
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.Properties;
 
@@ -27,6 +28,8 @@
 import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.embedded.JettyConfig;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.core.SolrCore;
 import org.apache.solr.util.RevertDefaultThreadHandlerRule;
 import org.junit.ClassRule;
 import org.junit.Test;
@@ -34,6 +37,7 @@
 import org.junit.rules.TestRule;
 
 @LuceneTestCase.SuppressSysoutChecks(bugUrl = "Solr logs to JUL")
+@SolrTestCaseJ4.SuppressSSL(bugUrl = "https://issues.apache.org/jira/browse/SOLR-15026")
 public class MiniSolrCloudClusterTest extends SolrTestCaseJ4 {
 
   @ClassRule
@@ -99,6 +103,39 @@
     cluster.shutdown();
   }
 
+  public void testSolrHomeAndResourceLoaders() throws Exception {
+    final String SOLR_HOME_PROP = "solr.solr.home";
+    // regardless of what sys prop may be set, everything in the cluster should use solr home dirs under the 
+    // configured base dir -- and nothing in the call stack should be "setting" the sys prop to make that work...
+    final String fakeSolrHome = createTempDir().toAbsolutePath().toString();
+    System.setProperty(SOLR_HOME_PROP, fakeSolrHome);
+
+    // mock FS from createTempDir don't play nice using 'startsWith' when the solr stack reconsistutes the path from string
+    // so we have to go the string route here as well...
+    final Path workDir = Paths.get(createTempDir().toAbsolutePath().toString());
+    
+    final MiniSolrCloudCluster cluster = new MiniSolrCloudCluster(1, workDir, JettyConfig.builder().build());
+    try {
+      final JettySolrRunner jetty = cluster.getJettySolrRunners().get(0);
+      assertTrue(jetty.getCoreContainer().getSolrHome() + " vs " + workDir,
+                 // mock dirs from createTempDir() don't play nice with startsWith, so we have to use the string value
+                 Paths.get(jetty.getCoreContainer().getSolrHome()).startsWith(workDir));
+      assertEquals(jetty.getCoreContainer().getSolrHome(),
+                   jetty.getCoreContainer().getResourceLoader().getInstancePath().toAbsolutePath().toString());
+
+      assertTrue(CollectionAdminRequest.createCollection("test", 1,1).process(cluster.getSolrClient()).isSuccess());
+      final SolrCore core = jetty.getCoreContainer().getCores().get(0);
+      assertTrue(core.getInstancePath() + " vs " + workDir,
+                 core.getInstancePath().startsWith(workDir));
+      assertEquals(core.getInstancePath(),
+                   core.getResourceLoader().getInstancePath());
+    } finally {
+      cluster.shutdown();
+    }
+    assertEquals("There is no reason why anything should have set this sysprop",
+                 fakeSolrHome, System.getProperty(SOLR_HOME_PROP));
+  }
+  
   public void testMultipleClustersDiffZk() throws Exception {
     final MiniSolrCloudCluster x = new MiniSolrCloudCluster(1, createTempDir(), JettyConfig.builder().build());
     try {