Merge branch 'master' into jira/solr-12730
diff --git a/lucene/CHANGES.txt b/lucene/CHANGES.txt
index 9b0382b..59d81d0 100644
--- a/lucene/CHANGES.txt
+++ b/lucene/CHANGES.txt
@@ -215,6 +215,17 @@
 * LUCENE-8524: Add the Hangul Letter Araea (interpunct) as a separator in Nori's tokenizer.
   This change also removes empty terms and trim surface form in Nori's Korean dictionary. (Trey Jones, Jim Ferenczi)
 
+* LUCENE-8550: Fix filtering of coplanar points when creating linked list on
+  polygon tesselator. (Ignacio Vera)
+
+* LUCENE-8549: Polygon tessellator throws an error if some parts of the shape
+   could not be processed. (Ignacio Vera)
+
+* LUCENE-8540: Better handling of min/max values for Geo3d encoding. (Ignacio Vera)
+
+* LUCENE-8534: Fix incorrect computation for triangles intersecting polygon edges in
+  shape tessellation. (Ignacio Vera)
+
 New Features
 
 * LUCENE-8496: Selective indexing - modify BKDReader/BKDWriter to allow users
@@ -230,6 +241,11 @@
   https://github.com/snowballstem/snowball/blob/master/algorithms/arabic.sbl 
   (Ryadh Dahimene via Jim Ferenczi)
 
+* LUCENE-8554: Add new LatLonShapeLineQuery that queries indexed LatLonShape fields
+  by arbitrary lines. (Nick Knize)
+
+* LUCENE-8555: Add dateline crossing support to LatLonShapeBoundingBoxQuery. (Nick Knize)
+
 Improvements:
 
 * LUCENE-8521: Change LatLonShape encoding to 7 dimensions instead of 6; where the
diff --git a/lucene/core/src/java/org/apache/lucene/geo/EdgeTree.java b/lucene/core/src/java/org/apache/lucene/geo/EdgeTree.java
new file mode 100644
index 0000000..d954f88
--- /dev/null
+++ b/lucene/core/src/java/org/apache/lucene/geo/EdgeTree.java
@@ -0,0 +1,426 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.geo;
+
+import java.util.Arrays;
+import java.util.Comparator;
+
+import org.apache.lucene.index.PointValues.Relation;
+import org.apache.lucene.util.ArrayUtil;
+
+import static org.apache.lucene.geo.GeoUtils.lineCrossesLine;
+import static org.apache.lucene.geo.GeoUtils.orient;
+
+/**
+ * 2D line/polygon geometry implementation represented as a balanced interval tree of edges.
+ * <p>
+ * Construction takes {@code O(n log n)} time for sorting and tree construction.
+ * {@link #relate relate()} are {@code O(n)}, but for most
+ * practical lines and polygons are much faster than brute force.
+ * @lucene.internal
+ */
+public abstract class EdgeTree {
+  /** minimum latitude of this geometry's bounding box area */
+  public final double minLat;
+  /** maximum latitude of this geometry's bounding box area */
+  public final double maxLat;
+  /** minimum longitude of this geometry's bounding box area */
+  public final double minLon;
+  /** maximum longitude of this geometry's bounding box area */
+  public final double maxLon;
+
+  // each component is a node in an augmented 2d kd-tree: we alternate splitting between latitude/longitude,
+  // and pull up max values for both dimensions to each parent node (regardless of split).
+
+  /** maximum latitude of this component or any of its children */
+  protected double maxY;
+  /** maximum longitude of this component or any of its children */
+  protected double maxX;
+  /** which dimension was this node split on */
+  // TODO: its implicit based on level, but boolean keeps code simple
+  protected boolean splitX;
+
+  // child components, or null
+  protected EdgeTree left;
+  protected EdgeTree right;
+
+  /** root node of edge tree */
+  protected final Edge tree;
+
+  protected EdgeTree(final double minLat, final double maxLat, final double minLon, final double maxLon, double[] lats, double[] lons) {
+    this.minLat = minLat;
+    this.maxLat = maxLat;
+    this.minLon = minLon;
+    this.maxLon = maxLon;
+    this.maxY = maxLat;
+    this.maxX = maxLon;
+
+    // create interval tree of edges
+    this.tree = createTree(lats, lons);
+  }
+
+  /** Returns relation to the provided triangle */
+  public Relation relateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
+    // compute bounding box of triangle
+    double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
+    double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
+    double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
+    double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
+    if (minLat <= maxY && minLon <= maxX) {
+      Relation relation = internalComponentRelateTriangle(ax, ay, bx, by, cx, cy);
+      if (relation != Relation.CELL_OUTSIDE_QUERY) {
+        return relation;
+      }
+      if (left != null) {
+        relation = left.relateTriangle(ax, ay, bx, by, cx, cy);
+        if (relation != Relation.CELL_OUTSIDE_QUERY) {
+          return relation;
+        }
+      }
+      if (right != null && ((splitX == false && maxLat >= this.minLat) || (splitX && maxLon >= this.minLon))) {
+        relation = right.relateTriangle(ax, ay, bx, by, cx, cy);
+        if (relation != Relation.CELL_OUTSIDE_QUERY) {
+          return relation;
+        }
+      }
+    }
+    return Relation.CELL_OUTSIDE_QUERY;
+  }
+
+  /** Returns relation to the provided rectangle */
+  public Relation relate(double minLat, double maxLat, double minLon, double maxLon) {
+    if (minLat <= maxY && minLon <= maxX) {
+      Relation relation = internalComponentRelate(minLat, maxLat, minLon, maxLon);
+      if (relation != Relation.CELL_OUTSIDE_QUERY) {
+        return relation;
+      }
+      if (left != null) {
+        relation = left.relate(minLat, maxLat, minLon, maxLon);
+        if (relation != Relation.CELL_OUTSIDE_QUERY) {
+          return relation;
+        }
+      }
+      if (right != null && ((splitX == false && maxLat >= this.minLat) || (splitX && maxLon >= this.minLon))) {
+        relation = right.relate(minLat, maxLat, minLon, maxLon);
+        if (relation != Relation.CELL_OUTSIDE_QUERY) {
+          return relation;
+        }
+      }
+    }
+    return Relation.CELL_OUTSIDE_QUERY;
+  }
+
+  protected Relation componentRelate(double minLat, double maxLat, double minLon, double maxLon) {
+    return null;
+  }
+  protected Relation componentRelateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
+    return null;
+  }
+
+  private Relation internalComponentRelateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
+    // compute bounding box of triangle
+    double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
+    double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
+    double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
+    double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
+    if (maxLon < this.minLon || minLon > this.maxLon || maxLat < this.minLat || minLat > this.maxLat) {
+      return Relation.CELL_OUTSIDE_QUERY;
+    }
+
+    Relation shapeRelation = componentRelateTriangle(ax, ay, bx, by, cx, cy);
+    if (shapeRelation != null) {
+      return shapeRelation;
+    }
+
+    // we cross
+    if (tree.crossesTriangle(ax, ay, bx, by, cx, cy)) {
+      return Relation.CELL_CROSSES_QUERY;
+    }
+    return Relation.CELL_OUTSIDE_QUERY;
+  }
+
+
+  /** Returns relation to the provided rectangle for this component */
+  protected Relation internalComponentRelate(double minLat, double maxLat, double minLon, double maxLon) {
+    // if the bounding boxes are disjoint then the shape does not cross
+    if (maxLon < this.minLon || minLon > this.maxLon || maxLat < this.minLat || minLat > this.maxLat) {
+      return Relation.CELL_OUTSIDE_QUERY;
+    }
+    // if the rectangle fully encloses us, we cross.
+    if (minLat <= this.minLat && maxLat >= this.maxLat && minLon <= this.minLon && maxLon >= this.maxLon) {
+      return Relation.CELL_CROSSES_QUERY;
+    }
+
+    Relation shapeRelation = componentRelate(minLat, maxLat, minLon, maxLon);
+    if (shapeRelation != null) {
+      return shapeRelation;
+    }
+
+    // we cross
+    if (tree.crosses(minLat, maxLat, minLon, maxLon)) {
+      return Relation.CELL_CROSSES_QUERY;
+    }
+
+    return Relation.CELL_OUTSIDE_QUERY;
+  }
+
+  /** Creates tree from sorted components (with range low and high inclusive) */
+  protected static EdgeTree createTree(EdgeTree components[], int low, int high, boolean splitX) {
+    if (low > high) {
+      return null;
+    }
+    final int mid = (low + high) >>> 1;
+    if (low < high) {
+      Comparator<EdgeTree> comparator;
+      if (splitX) {
+        comparator = (left, right) -> {
+          int ret = Double.compare(left.minLon, right.minLon);
+          if (ret == 0) {
+            ret = Double.compare(left.maxX, right.maxX);
+          }
+          return ret;
+        };
+      } else {
+        comparator = (left, right) -> {
+          int ret = Double.compare(left.minLat, right.minLat);
+          if (ret == 0) {
+            ret = Double.compare(left.maxY, right.maxY);
+          }
+          return ret;
+        };
+      }
+      ArrayUtil.select(components, low, high + 1, mid, comparator);
+    }
+    // add midpoint
+    EdgeTree newNode = components[mid];
+    newNode.splitX = splitX;
+    // add children
+    newNode.left = createTree(components, low, mid - 1, !splitX);
+    newNode.right = createTree(components, mid + 1, high, !splitX);
+    // pull up max values to this node
+    if (newNode.left != null) {
+      newNode.maxX = Math.max(newNode.maxX, newNode.left.maxX);
+      newNode.maxY = Math.max(newNode.maxY, newNode.left.maxY);
+    }
+    if (newNode.right != null) {
+      newNode.maxX = Math.max(newNode.maxX, newNode.right.maxX);
+      newNode.maxY = Math.max(newNode.maxY, newNode.right.maxY);
+    }
+    return newNode;
+  }
+
+  /**
+   * Internal tree node: represents geometry edge from lat1,lon1 to lat2,lon2.
+   * The sort value is {@code low}, which is the minimum latitude of the edge.
+   * {@code max} stores the maximum latitude of this edge or any children.
+   */
+  static class Edge {
+    // lat-lon pair (in original order) of the two vertices
+    final double lat1, lat2;
+    final double lon1, lon2;
+    /** min of this edge */
+    final double low;
+    /** max latitude of this edge or any children */
+    double max;
+
+    /** left child edge, or null */
+    Edge left;
+    /** right child edge, or null */
+    Edge right;
+
+    Edge(double lat1, double lon1, double lat2, double lon2, double low, double max) {
+      this.lat1 = lat1;
+      this.lon1 = lon1;
+      this.lat2 = lat2;
+      this.lon2 = lon2;
+      this.low = low;
+      this.max = max;
+    }
+
+    /** Returns true if the triangle crosses any edge in this edge subtree */
+    boolean crossesTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
+      // compute bounding box of triangle
+      double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
+      double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
+      double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
+      double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
+
+      if (minLat <= max) {
+        double dy = lat1;
+        double ey = lat2;
+        double dx = lon1;
+        double ex = lon2;
+
+        // optimization: see if the rectangle is outside of the "bounding box" of the polyline at all
+        // if not, don't waste our time trying more complicated stuff
+        boolean outside = (dy < minLat && ey < minLat) ||
+            (dy > maxLat && ey > maxLat) ||
+            (dx < minLon && ex < minLon) ||
+            (dx > maxLon && ex > maxLon);
+
+        if (outside == false) {
+          // does triangle's first edge intersect polyline?
+          // ax, ay -> bx, by
+          if (lineCrossesLine(ax, ay, bx, by, dx, dy, ex, ey)) {
+            return true;
+          }
+
+          // does triangle's second edge intersect polyline?
+          // bx, by -> cx, cy
+          if (lineCrossesLine(bx, by, cx, cy, dx, dy, ex, ey)) {
+            return true;
+          }
+
+          // does triangle's third edge intersect polyline?
+          // cx, cy -> ax, ay
+          if (lineCrossesLine(cx, cy, ax, ay, dx, dy, ex, ey)) {
+            return true;
+          }
+        }
+
+        if (left != null) {
+          if (left.crossesTriangle(ax, ay, bx, by, cx, cy)) {
+            return true;
+          }
+        }
+
+        if (right != null && maxLat >= low) {
+          if (right.crossesTriangle(ax, ay, bx, by, cx, cy)) {
+            return true;
+          }
+        }
+      }
+      return false;
+    }
+
+    /** Returns true if the box crosses any edge in this edge subtree */
+    boolean crosses(double minLat, double maxLat, double minLon, double maxLon) {
+      // we just have to cross one edge to answer the question, so we descend the tree and return when we do.
+      if (minLat <= max) {
+        // we compute line intersections of every polygon edge with every box line.
+        // if we find one, return true.
+        // for each box line (AB):
+        //   for each poly line (CD):
+        //     intersects = orient(C,D,A) * orient(C,D,B) <= 0 && orient(A,B,C) * orient(A,B,D) <= 0
+        double cy = lat1;
+        double dy = lat2;
+        double cx = lon1;
+        double dx = lon2;
+
+        // optimization: see if the rectangle is outside of the "bounding box" of the polyline at all
+        // if not, don't waste our time trying more complicated stuff
+        boolean outside = (cy < minLat && dy < minLat) ||
+            (cy > maxLat && dy > maxLat) ||
+            (cx < minLon && dx < minLon) ||
+            (cx > maxLon && dx > maxLon);
+        // optimization: see if either end of the line segment is contained by the rectangle
+        if (Rectangle.containsPoint(cy, cx, minLat, maxLat, minLon, maxLon)
+            || Rectangle.containsPoint(dy, dx, minLat, maxLat, minLon, maxLon)) {
+          return true;
+        }
+
+        if (outside == false) {
+          // does box's top edge intersect polyline?
+          // ax = minLon, bx = maxLon, ay = maxLat, by = maxLat
+          if (orient(cx, cy, dx, dy, minLon, maxLat) * orient(cx, cy, dx, dy, maxLon, maxLat) <= 0 &&
+              orient(minLon, maxLat, maxLon, maxLat, cx, cy) * orient(minLon, maxLat, maxLon, maxLat, dx, dy) <= 0) {
+            return true;
+          }
+
+          // does box's right edge intersect polyline?
+          // ax = maxLon, bx = maxLon, ay = maxLat, by = minLat
+          if (orient(cx, cy, dx, dy, maxLon, maxLat) * orient(cx, cy, dx, dy, maxLon, minLat) <= 0 &&
+              orient(maxLon, maxLat, maxLon, minLat, cx, cy) * orient(maxLon, maxLat, maxLon, minLat, dx, dy) <= 0) {
+            return true;
+          }
+
+          // does box's bottom edge intersect polyline?
+          // ax = maxLon, bx = minLon, ay = minLat, by = minLat
+          if (orient(cx, cy, dx, dy, maxLon, minLat) * orient(cx, cy, dx, dy, minLon, minLat) <= 0 &&
+              orient(maxLon, minLat, minLon, minLat, cx, cy) * orient(maxLon, minLat, minLon, minLat, dx, dy) <= 0) {
+            return true;
+          }
+
+          // does box's left edge intersect polyline?
+          // ax = minLon, bx = minLon, ay = minLat, by = maxLat
+          if (orient(cx, cy, dx, dy, minLon, minLat) * orient(cx, cy, dx, dy, minLon, maxLat) <= 0 &&
+              orient(minLon, minLat, minLon, maxLat, cx, cy) * orient(minLon, minLat, minLon, maxLat, dx, dy) <= 0) {
+            return true;
+          }
+        }
+
+        if (left != null) {
+          if (left.crosses(minLat, maxLat, minLon, maxLon)) {
+            return true;
+          }
+        }
+
+        if (right != null && maxLat >= low) {
+          if (right.crosses(minLat, maxLat, minLon, maxLon)) {
+            return true;
+          }
+        }
+      }
+      return false;
+    }
+  }
+
+  /**
+   * Creates an edge interval tree from a set of geometry vertices.
+   * @return root node of the tree.
+   */
+  private static Edge createTree(double[] lats, double[] lons) {
+    Edge edges[] = new Edge[lats.length - 1];
+    for (int i = 1; i < lats.length; i++) {
+      double lat1 = lats[i-1];
+      double lon1 = lons[i-1];
+      double lat2 = lats[i];
+      double lon2 = lons[i];
+      edges[i - 1] = new Edge(lat1, lon1, lat2, lon2, Math.min(lat1, lat2), Math.max(lat1, lat2));
+    }
+    // sort the edges then build a balanced tree from them
+    Arrays.sort(edges, (left, right) -> {
+      int ret = Double.compare(left.low, right.low);
+      if (ret == 0) {
+        ret = Double.compare(left.max, right.max);
+      }
+      return ret;
+    });
+    return createTree(edges, 0, edges.length - 1);
+  }
+
+  /** Creates tree from sorted edges (with range low and high inclusive) */
+  private static Edge createTree(Edge edges[], int low, int high) {
+    if (low > high) {
+      return null;
+    }
+    // add midpoint
+    int mid = (low + high) >>> 1;
+    Edge newNode = edges[mid];
+    // add children
+    newNode.left = createTree(edges, low, mid - 1);
+    newNode.right = createTree(edges, mid + 1, high);
+    // pull up max values to this node
+    if (newNode.left != null) {
+      newNode.max = Math.max(newNode.max, newNode.left.max);
+    }
+    if (newNode.right != null) {
+      newNode.max = Math.max(newNode.max, newNode.right.max);
+    }
+    return newNode;
+  }
+}
diff --git a/lucene/core/src/java/org/apache/lucene/geo/GeoEncodingUtils.java b/lucene/core/src/java/org/apache/lucene/geo/GeoEncodingUtils.java
index 663cb2e..00b7252 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/GeoEncodingUtils.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/GeoEncodingUtils.java
@@ -43,6 +43,10 @@
   private static final double LON_SCALE = (0x1L<<BITS)/360.0D;
   private static final double LON_DECODE = 1/LON_SCALE;
 
+  public static final int MIN_LON_ENCODED = encodeLongitude(MIN_LON_INCL);
+  public static final int MAX_LON_ENCODED = encodeLongitude(MAX_LON_INCL);
+
+
   // No instance:
   private GeoEncodingUtils() {
   }
diff --git a/lucene/core/src/java/org/apache/lucene/geo/GeoUtils.java b/lucene/core/src/java/org/apache/lucene/geo/GeoUtils.java
index 468de93..0c73032 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/GeoUtils.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/GeoUtils.java
@@ -194,6 +194,20 @@
     }
   }
 
+  /** uses orient method to compute whether two line segments cross */
+  public static boolean lineCrossesLine(double a1x, double a1y, double b1x, double b1y, double a2x, double a2y, double b2x, double b2y) {
+    // shortcut: either "line" is actually a point
+    if ((a1x == b1x && a1y == b1y) || (a2x == b2x && a2y == b2y)) {
+      return false;
+    }
+
+    if (orient(a2x, a2y, b2x, b2y, a1x, a1y) * orient(a2x, a2y, b2x, b2y, b1x, b1y) <= 0 &&
+        orient(a1x, a1y, b1x, b1y, a2x, a2y) * orient(a1x, a1y, b1x, b1y, b2x, b2y) <= 0) {
+      return true;
+    }
+    return false;
+  }
+
   /**
    * used to define the orientation of 3 points
    * -1 = Clockwise
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java b/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
index 64a3784..fee23d0 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Polygon2D.java
@@ -16,72 +16,29 @@
  */
 package org.apache.lucene.geo;
 
-import java.util.Arrays;
-import java.util.Comparator;
-
 import org.apache.lucene.index.PointValues.Relation;
-import org.apache.lucene.util.ArrayUtil;
-
-import static org.apache.lucene.geo.GeoUtils.orient;
 
 /**
  * 2D polygon implementation represented as a balanced interval tree of edges.
  * <p>
- * Construction takes {@code O(n log n)} time for sorting and tree construction.
- * {@link #contains contains()} and {@link #relate relate()} are {@code O(n)}, but for most 
- * practical polygons are much faster than brute force.
- * <p>
  * Loosely based on the algorithm described in <a href="http://www-ma2.upc.es/geoc/Schirra-pointPolygon.pdf">
  * http://www-ma2.upc.es/geoc/Schirra-pointPolygon.pdf</a>.
  * @lucene.internal
  */
 // Both Polygon.contains() and Polygon.crossesSlowly() loop all edges, and first check that the edge is within a range.
-// we just organize the edges to do the same computations on the same subset of edges more efficiently. 
-public final class Polygon2D {
-  /** minimum latitude of this polygon's bounding box area */
-  public final double minLat;
-  /** maximum latitude of this polygon's bounding box area */
-  public final double maxLat;
-  /** minimum longitude of this polygon's bounding box area */
-  public final double minLon;
-  /** maximum longitude of this polygon's bounding box area */
-  public final double maxLon;
-  
+// we just organize the edges to do the same computations on the same subset of edges more efficiently.
+public final class Polygon2D extends EdgeTree {
   // each component/hole is a node in an augmented 2d kd-tree: we alternate splitting between latitude/longitude,
   // and pull up max values for both dimensions to each parent node (regardless of split).
-
-  /** maximum latitude of this component or any of its children */
-  private double maxY;
-  /** maximum longitude of this component or any of its children */
-  private double maxX;
-  /** which dimension was this node split on */
-  // TODO: its implicit based on level, but boolean keeps code simple
-  private boolean splitX;
-
-  // child components, or null
-  private Polygon2D left;
-  private Polygon2D right;
-  
   /** tree of holes, or null */
   private final Polygon2D holes;
-  
-  /** root node of edge tree */
-  private final Edge tree;
 
   private Polygon2D(Polygon polygon, Polygon2D holes) {
+    super(polygon.minLat, polygon.maxLat, polygon.minLon, polygon.maxLon, polygon.getPolyLats(), polygon.getPolyLons());
     this.holes = holes;
-    this.minLat = polygon.minLat;
-    this.maxLat = polygon.maxLat;
-    this.minLon = polygon.minLon;
-    this.maxLon = polygon.maxLon;
-    this.maxY = maxLat;
-    this.maxX = maxLon;
-    
-    // create interval tree of edges
-    this.tree = createTree(polygon.getPolyLats(), polygon.getPolyLons());
   }
 
-  /** 
+  /**
    * Returns true if the point is contained within this polygon.
    * <p>
    * See <a href="https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html">
@@ -93,96 +50,36 @@
         return true;
       }
       if (left != null) {
-        if (left.contains(latitude, longitude)) {
+        if (((Polygon2D)left).contains(latitude, longitude)) {
           return true;
         }
       }
       if (right != null && ((splitX == false && latitude >= minLat) || (splitX && longitude >= minLon))) {
-        if (right.contains(latitude, longitude)) {
+        if (((Polygon2D)right).contains(latitude, longitude)) {
           return true;
         }
       }
     }
     return false;
   }
-  
+
   /** Returns true if the point is contained within this polygon component. */
   private boolean componentContains(double latitude, double longitude) {
     // check bounding box
     if (latitude < minLat || latitude > maxLat || longitude < minLon || longitude > maxLon) {
       return false;
     }
-    
-    if (tree.contains(latitude, longitude)) {
+    if (contains(tree, latitude, longitude)) {
       if (holes != null && holes.contains(latitude, longitude)) {
         return false;
       }
       return true;
     }
-    
     return false;
   }
 
-  /** Returns relation to the provided triangle */
-  public Relation relateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
-    // compute bounding box of triangle
-    double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
-    double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
-    double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
-    double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
-    if (minLat <= maxY && minLon <= maxX) {
-      Relation relation = componentRelateTriangle(ax, ay, bx, by, cx, cy);
-      if (relation != Relation.CELL_OUTSIDE_QUERY) {
-        return relation;
-      }
-      if (left != null) {
-        relation = left.relateTriangle(ax, ay, bx, by, cx, cy);
-        if (relation != Relation.CELL_OUTSIDE_QUERY) {
-          return relation;
-        }
-      }
-      if (right != null && ((splitX == false && maxLat >= this.minLat) || (splitX && maxLon >= this.minLon))) {
-        relation = right.relateTriangle(ax, ay, bx, by, cx, cy);
-        if (relation != Relation.CELL_OUTSIDE_QUERY) {
-          return relation;
-        }
-      }
-    }
-    return Relation.CELL_OUTSIDE_QUERY;
-  }
-
-  /** Returns relation to the provided rectangle */
-  public Relation relate(double minLat, double maxLat, double minLon, double maxLon) {
-    if (minLat <= maxY && minLon <= maxX) {
-      Relation relation = componentRelate(minLat, maxLat, minLon, maxLon);
-      if (relation != Relation.CELL_OUTSIDE_QUERY) {
-        return relation;
-      }
-      if (left != null) {
-        relation = left.relate(minLat, maxLat, minLon, maxLon);
-        if (relation != Relation.CELL_OUTSIDE_QUERY) {
-          return relation;
-        }
-      }
-      if (right != null && ((splitX == false && maxLat >= this.minLat) || (splitX && maxLon >= this.minLon))) {
-        relation = right.relate(minLat, maxLat, minLon, maxLon);
-        if (relation != Relation.CELL_OUTSIDE_QUERY) {
-          return relation;
-        }
-      }
-    }
-    return Relation.CELL_OUTSIDE_QUERY;
-  }
-
-  private Relation componentRelateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
-    // compute bounding box of triangle
-    double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
-    double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
-    double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
-    double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
-    if (maxLon < this.minLon || minLon > this.maxLon || maxLat < this.minLat || minLat > this.maxLat) {
-      return Relation.CELL_OUTSIDE_QUERY;
-    }
+  @Override
+  protected Relation componentRelateTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
     // check any holes
     if (holes != null) {
       Relation holeRelation = holes.relateTriangle(ax, ay, bx, by, cx, cy);
@@ -202,24 +99,12 @@
     } else if (numCorners > 0) {
       return Relation.CELL_CROSSES_QUERY;
     }
-
-    // we cross
-    if (tree.crossesTriangle(ax, ay, bx, by, cx, cy)) {
-      return Relation.CELL_CROSSES_QUERY;
-    }
-    return Relation.CELL_OUTSIDE_QUERY;
+    return null;
   }
 
   /** Returns relation to the provided rectangle for this component */
-  private Relation componentRelate(double minLat, double maxLat, double minLon, double maxLon) {
-    // if the bounding boxes are disjoint then the shape does not cross
-    if (maxLon < this.minLon || minLon > this.maxLon || maxLat < this.minLat || minLat > this.maxLat) {
-      return Relation.CELL_OUTSIDE_QUERY;
-    }
-    // if the rectangle fully encloses us, we cross.
-    if (minLat <= this.minLat && maxLat >= this.maxLat && minLon <= this.minLon && maxLon >= this.maxLon) {
-      return Relation.CELL_CROSSES_QUERY;
-    }
+  @Override
+  protected Relation componentRelate(double minLat, double maxLat, double minLon, double maxLon) {
     // check any holes
     if (holes != null) {
       Relation holeRelation = holes.relate(minLat, maxLat, minLon, maxLon);
@@ -239,13 +124,7 @@
     } else if (numCorners > 0) {
       return Relation.CELL_CROSSES_QUERY;
     }
-    
-    // we cross
-    if (tree.crosses(minLat, maxLat, minLon, maxLon)) {
-      return Relation.CELL_CROSSES_QUERY;
-    }
-    
-    return Relation.CELL_OUTSIDE_QUERY;
+    return null;
   }
 
   private int numberOfTriangleCorners(double ax, double ay, double bx, double by, double cx, double cy) {
@@ -288,52 +167,7 @@
     }
     return containsCount;
   }
-  
-  /** Creates tree from sorted components (with range low and high inclusive) */
-  private static Polygon2D createTree(Polygon2D components[], int low, int high, boolean splitX) {
-    if (low > high) {
-      return null;
-    }
-    final int mid = (low + high) >>> 1;
-    if (low < high) {
-      Comparator<Polygon2D> comparator;
-      if (splitX) {
-        comparator = (left, right) -> {
-          int ret = Double.compare(left.minLon, right.minLon);
-          if (ret == 0) {
-            ret = Double.compare(left.maxX, right.maxX);
-          }
-          return ret;
-        };
-      } else {
-        comparator = (left, right) -> {
-          int ret = Double.compare(left.minLat, right.minLat);
-          if (ret == 0) {
-            ret = Double.compare(left.maxY, right.maxY);
-          }
-          return ret;
-        };
-      }
-      ArrayUtil.select(components, low, high + 1, mid, comparator);
-    }
-    // add midpoint
-    Polygon2D newNode = components[mid];
-    newNode.splitX = splitX;
-    // add children
-    newNode.left = createTree(components, low, mid - 1, !splitX);
-    newNode.right = createTree(components, mid + 1, high, !splitX);
-    // pull up max values to this node
-    if (newNode.left != null) {
-      newNode.maxX = Math.max(newNode.maxX, newNode.left.maxX);
-      newNode.maxY = Math.max(newNode.maxY, newNode.left.maxY);
-    }
-    if (newNode.right != null) {
-      newNode.maxX = Math.max(newNode.maxX, newNode.right.maxX);
-      newNode.maxY = Math.max(newNode.maxY, newNode.right.maxY);
-    }
-    return newNode;
-  }
-  
+
   /** Builds a Polygon2D from multipolygon */
   public static Polygon2D create(Polygon... polygons) {
     Polygon2D components[] = new Polygon2D[polygons.length];
@@ -346,253 +180,55 @@
       }
       components[i] = new Polygon2D(gon, holes);
     }
-    return createTree(components, 0, components.length - 1, false);
+    return (Polygon2D)createTree(components, 0, components.length - 1, false);
   }
 
-  /** 
-   * Internal tree node: represents polygon edge from lat1,lon1 to lat2,lon2.
-   * The sort value is {@code low}, which is the minimum latitude of the edge.
-   * {@code max} stores the maximum latitude of this edge or any children.
+  /**
+   * Returns true if the point crosses this edge subtree an odd number of times
+   * <p>
+   * See <a href="https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html">
+   * https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html</a> for more information.
    */
-  static final class Edge {
-    // lat-lon pair (in original order) of the two vertices
-    final double lat1, lat2;
-    final double lon1, lon2;
-    /** min of this edge */
-    final double low;
-    /** max latitude of this edge or any children */
-    double max;
-    
-    /** left child edge, or null */
-    Edge left;
-    /** right child edge, or null */
-    Edge right;
-
-    Edge(double lat1, double lon1, double lat2, double lon2, double low, double max) {
-      this.lat1 = lat1;
-      this.lon1 = lon1;
-      this.lat2 = lat2;
-      this.lon2 = lon2;
-      this.low = low;
-      this.max = max;
-    }
-    
-    /** 
-     * Returns true if the point crosses this edge subtree an odd number of times
-     * <p>
-     * See <a href="https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html">
-     * https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html</a> for more information.
-     */
-    // ported to java from https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html
-    // original code under the BSD license (https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#License%20to%20Use)
-    //
-    // Copyright (c) 1970-2003, Wm. Randolph Franklin
-    //
-    // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated 
-    // documentation files (the "Software"), to deal in the Software without restriction, including without limitation 
-    // the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and 
-    // to permit persons to whom the Software is furnished to do so, subject to the following conditions:
-    //
-    // 1. Redistributions of source code must retain the above copyright 
-    //    notice, this list of conditions and the following disclaimers.
-    // 2. Redistributions in binary form must reproduce the above copyright 
-    //    notice in the documentation and/or other materials provided with 
-    //    the distribution.
-    // 3. The name of W. Randolph Franklin may not be used to endorse or 
-    //    promote products derived from this Software without specific 
-    //    prior written permission. 
-    //
-    // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 
-    // TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 
-    // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF 
-    // CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS 
-    // IN THE SOFTWARE. 
-    boolean contains(double latitude, double longitude) {
-      // crossings algorithm is an odd-even algorithm, so we descend the tree xor'ing results along our path
-      boolean res = false;
-      if (latitude <= max) {
-        if (lat1 > latitude != lat2 > latitude) {
-          if (longitude < (lon1 - lon2) * (latitude - lat2) / (lat1 - lat2) + lon2) {
-            res = true;
-          }
-        }
-        if (left != null) {
-          res ^= left.contains(latitude, longitude);
-        }
-        if (right != null && latitude >= low) {
-          res ^= right.contains(latitude, longitude);
+  // ported to java from https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html
+  // original code under the BSD license (https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#License%20to%20Use)
+  //
+  // Copyright (c) 1970-2003, Wm. Randolph Franklin
+  //
+  // Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
+  // documentation files (the "Software"), to deal in the Software without restriction, including without limitation
+  // the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and
+  // to permit persons to whom the Software is furnished to do so, subject to the following conditions:
+  //
+  // 1. Redistributions of source code must retain the above copyright
+  //    notice, this list of conditions and the following disclaimers.
+  // 2. Redistributions in binary form must reproduce the above copyright
+  //    notice in the documentation and/or other materials provided with
+  //    the distribution.
+  // 3. The name of W. Randolph Franklin may not be used to endorse or
+  //    promote products derived from this Software without specific
+  //    prior written permission.
+  //
+  // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
+  // TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+  // THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF
+  // CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
+  // IN THE SOFTWARE.
+  private static boolean contains(Edge tree, double latitude, double longitude) {
+    // crossings algorithm is an odd-even algorithm, so we descend the tree xor'ing results along our path
+    boolean res = false;
+    if (latitude <= tree.max) {
+      if (tree.lat1 > latitude != tree.lat2 > latitude) {
+        if (longitude < (tree.lon1 - tree.lon2) * (latitude - tree.lat2) / (tree.lat1 - tree.lat2) + tree.lon2) {
+          res = true;
         }
       }
-      return res;
-    }
-
-    /** Returns true if the triangle crosses any edge in this edge subtree */
-    boolean crossesTriangle(double ax, double ay, double bx, double by, double cx, double cy) {
-      // compute bounding box of triangle
-      double minLat = StrictMath.min(StrictMath.min(ay, by), cy);
-      double minLon = StrictMath.min(StrictMath.min(ax, bx), cx);
-      double maxLat = StrictMath.max(StrictMath.max(ay, by), cy);
-      double maxLon = StrictMath.max(StrictMath.max(ax, bx), cx);
-
-      if (minLat <= max) {
-        double dy = lat1;
-        double ey = lat2;
-        double dx = lon1;
-        double ex = lon2;
-
-        // optimization: see if the rectangle is outside of the "bounding box" of the polyline at all
-        // if not, don't waste our time trying more complicated stuff
-        boolean outside = (dy < minLat && ey < minLat) ||
-            (dy > maxLat && ey > maxLat) ||
-            (dx < minLon && ex < minLon) ||
-            (dx > maxLon && ex > maxLon);
-
-        if (outside == false) {
-          // does triangle's first edge intersect polyline?
-          // ax, ay -> bx, by
-          if (orient(dx, dy, ex, ey, ax, ay) * orient(dx, dy, ex, ey, bx, by) <= 0 &&
-              orient(ax, ay, bx, by, dx, dy) * orient(ax, ay, bx, by, ex, ey) <= 0) {
-            return true;
-          }
-
-          // does triangle's second edge intersect polyline?
-          // bx, by -> cx, cy
-          if (orient(dx, dy, ex, ey, bx, by) * orient(dx, dy, ex, ey, cx, cy) <= 0 &&
-              orient(bx, by, cx, cy, dx, dy) * orient(bx, by, cx, cy, ex, ey) <= 0) {
-            return true;
-          }
-
-          // does triangle's third edge intersect polyline?
-          // cx, cy -> ax, ay
-          if (orient(dx, dy, ex, ey, cx, cy) * orient(dx, dy, ex, ey, ax, ay) <= 0 &&
-              orient(cx, cy, ax, ay, dx, dy) * orient(cx, cy, ax, ay, ex, ey) <= 0) {
-            return true;
-          }
-        }
-
-        if (left != null) {
-          if (left.crossesTriangle(ax, ay, bx, by, cx, cy)) {
-            return true;
-          }
-        }
-
-        if (right != null && maxLat >= low) {
-          if (right.crossesTriangle(ax, ay, bx, by, cx, cy)) {
-            return true;
-          }
-        }
+      if (tree.left != null) {
+        res ^= contains(tree.left, latitude, longitude);
       }
-      return false;
-    }
-
-    /** Returns true if the box crosses any edge in this edge subtree */
-    boolean crosses(double minLat, double maxLat, double minLon, double maxLon) {
-      // we just have to cross one edge to answer the question, so we descend the tree and return when we do.
-      if (minLat <= max) {
-        // we compute line intersections of every polygon edge with every box line.
-        // if we find one, return true.
-        // for each box line (AB):
-        //   for each poly line (CD):
-        //     intersects = orient(C,D,A) * orient(C,D,B) <= 0 && orient(A,B,C) * orient(A,B,D) <= 0
-        double cy = lat1;
-        double dy = lat2;
-        double cx = lon1;
-        double dx = lon2;
-        
-        // optimization: see if the rectangle is outside of the "bounding box" of the polyline at all
-        // if not, don't waste our time trying more complicated stuff
-        boolean outside = (cy < minLat && dy < minLat) ||
-                          (cy > maxLat && dy > maxLat) ||
-                          (cx < minLon && dx < minLon) ||
-                          (cx > maxLon && dx > maxLon);
-        if (outside == false) {
-          // does box's top edge intersect polyline?
-          // ax = minLon, bx = maxLon, ay = maxLat, by = maxLat
-          if (orient(cx, cy, dx, dy, minLon, maxLat) * orient(cx, cy, dx, dy, maxLon, maxLat) <= 0 &&
-              orient(minLon, maxLat, maxLon, maxLat, cx, cy) * orient(minLon, maxLat, maxLon, maxLat, dx, dy) <= 0) {
-            return true;
-          }
-
-          // does box's right edge intersect polyline?
-          // ax = maxLon, bx = maxLon, ay = maxLat, by = minLat
-          if (orient(cx, cy, dx, dy, maxLon, maxLat) * orient(cx, cy, dx, dy, maxLon, minLat) <= 0 &&
-              orient(maxLon, maxLat, maxLon, minLat, cx, cy) * orient(maxLon, maxLat, maxLon, minLat, dx, dy) <= 0) {
-            return true;
-          }
-
-          // does box's bottom edge intersect polyline?
-          // ax = maxLon, bx = minLon, ay = minLat, by = minLat
-          if (orient(cx, cy, dx, dy, maxLon, minLat) * orient(cx, cy, dx, dy, minLon, minLat) <= 0 &&
-              orient(maxLon, minLat, minLon, minLat, cx, cy) * orient(maxLon, minLat, minLon, minLat, dx, dy) <= 0) {
-            return true;
-          }
-
-          // does box's left edge intersect polyline?
-          // ax = minLon, bx = minLon, ay = minLat, by = maxLat
-          if (orient(cx, cy, dx, dy, minLon, minLat) * orient(cx, cy, dx, dy, minLon, maxLat) <= 0 &&
-              orient(minLon, minLat, minLon, maxLat, cx, cy) * orient(minLon, minLat, minLon, maxLat, dx, dy) <= 0) {
-            return true;
-          }
-        }
-        
-        if (left != null) {
-          if (left.crosses(minLat, maxLat, minLon, maxLon)) {
-            return true;
-          }
-        }
-        
-        if (right != null && maxLat >= low) {
-          if (right.crosses(minLat, maxLat, minLon, maxLon)) {
-            return true;
-          }
-        }
+      if (tree.right != null && latitude >= tree.low) {
+        res ^= contains(tree.right, latitude, longitude);
       }
-      return false;
     }
-  }
-
-  /** 
-   * Creates an edge interval tree from a set of polygon vertices.
-   * @return root node of the tree.
-   */
-  private static Edge createTree(double polyLats[], double polyLons[]) {
-    Edge edges[] = new Edge[polyLats.length - 1];
-    for (int i = 1; i < polyLats.length; i++) {
-      double lat1 = polyLats[i-1];
-      double lon1 = polyLons[i-1];
-      double lat2 = polyLats[i];
-      double lon2 = polyLons[i];
-      edges[i - 1] = new Edge(lat1, lon1, lat2, lon2, Math.min(lat1, lat2), Math.max(lat1, lat2));
-    }
-    // sort the edges then build a balanced tree from them
-    Arrays.sort(edges, (left, right) -> {
-      int ret = Double.compare(left.low, right.low);
-      if (ret == 0) {
-        ret = Double.compare(left.max, right.max);
-      }
-      return ret;
-    });
-    return createTree(edges, 0, edges.length - 1);
-  }
-
-  /** Creates tree from sorted edges (with range low and high inclusive) */
-  private static Edge createTree(Edge edges[], int low, int high) {
-    if (low > high) {
-      return null;
-    }
-    // add midpoint
-    int mid = (low + high) >>> 1;
-    Edge newNode = edges[mid];
-    // add children
-    newNode.left = createTree(edges, low, mid - 1);
-    newNode.right = createTree(edges, mid + 1, high);
-    // pull up max values to this node
-    if (newNode.left != null) {
-      newNode.max = Math.max(newNode.max, newNode.left.max);
-    }
-    if (newNode.right != null) {
-      newNode.max = Math.max(newNode.max, newNode.right.max);
-    }
-    return newNode;
+    return res;
   }
 }
diff --git a/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java b/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java
index a8200c6..45d437d 100644
--- a/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java
+++ b/lucene/core/src/java/org/apache/lucene/geo/Rectangle.java
@@ -87,6 +87,13 @@
     return maxLon < minLon;
   }
 
+  /** returns true if rectangle (defined by minLat, maxLat, minLon, maxLon) contains the lat lon point */
+  public static boolean containsPoint(final double lat, final double lon,
+                                      final double minLat, final double maxLat,
+                                      final double minLon, final double maxLon) {
+    return lat >= minLat && lat <= maxLat && lon >= minLon && lon <= maxLon;
+  }
+
   /** Compute Bounding Box for a circle using WGS-84 parameters */
   public static Rectangle fromPointDistance(final double centerLat, final double centerLon, final double radiusMeters) {
     checkLatitude(centerLat);
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
index 1d17b10..7c074cf 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShape.java
@@ -84,7 +84,6 @@
 
     // create "flat" triangles
     double aLat, bLat, aLon, bLon, temp;
-    double size;
     for (int i = 0, j = 1; j < numPoints; ++i, ++j) {
       aLat = line.getLat(i);
       aLon = line.getLon(i);
@@ -107,25 +106,32 @@
           bLon = temp;
         }
       }
-      size = StrictMath.sqrt(StrictMath.pow(aLat - bLat, 2d) + StrictMath.pow(aLon - bLon, 2d));
-      fields.add(new LatLonTriangle(fieldName, aLat, aLon, bLat, bLon, aLat, aLon, size));
+      fields.add(new LatLonTriangle(fieldName, aLat, aLon, bLat, bLon, aLat, aLon));
     }
     return fields.toArray(new Field[fields.size()]);
   }
 
   /** create indexable fields for point geometry */
   public static Field[] createIndexableFields(String fieldName, double lat, double lon) {
-    return new Field[] {new LatLonTriangle(fieldName, lat, lon, lat, lon, lat, lon, 0d)};
+    return new Field[] {new LatLonTriangle(fieldName, lat, lon, lat, lon, lat, lon)};
   }
 
   /** create a query to find all polygons that intersect a defined bounding box
-   *  note: does not currently support dateline crossing boxes
-   * todo split dateline crossing boxes into two queries like {@link LatLonPoint#newBoxQuery}
    **/
   public static Query newBoxQuery(String field, QueryRelation queryRelation, double minLatitude, double maxLatitude, double minLongitude, double maxLongitude) {
     return new LatLonShapeBoundingBoxQuery(field, queryRelation, minLatitude, maxLatitude, minLongitude, maxLongitude);
   }
 
+  /** create a query to find all polygons that intersect a provided linestring (or array of linestrings)
+   *  note: does not support dateline crossing
+   **/
+  public static Query newLineQuery(String field, QueryRelation queryRelation, Line... lines) {
+    return new LatLonShapeLineQuery(field, queryRelation, lines);
+  }
+
+  /** create a query to find all polygons that intersect a provided polygon (or array of polygons)
+   *  note: does not support dateline crossing
+   **/
   public static Query newPolygonQuery(String field, QueryRelation queryRelation, Polygon... polygons) {
     return new LatLonShapePolygonQuery(field, queryRelation, polygons);
   }
@@ -135,7 +141,7 @@
    */
   private static class LatLonTriangle extends Field {
 
-    LatLonTriangle(String name, double aLat, double aLon, double bLat, double bLon, double cLat, double cLon, double size) {
+    LatLonTriangle(String name, double aLat, double aLon, double bLat, double bLon, double cLat, double cLon) {
       super(name, TYPE);
       setTriangleValue(encodeLongitude(aLon), encodeLatitude(aLat), encodeLongitude(bLon), encodeLatitude(bLat), encodeLongitude(cLon), encodeLatitude(cLat));
     }
@@ -178,6 +184,9 @@
     }
   }
 
+  /** encodes bounding box value of triangle. Note the encoding uses 64bit encoding, but the bounding box only needs
+   * 32bits, so we pad w/ zeros to take advantage of prefix compression.
+   */
   public static void encodeTriangleBoxVal(int encodedVal, byte[] bytes, int offset) {
     long val = (long)(encodedVal ^ 0x80000000);
     val &= 0x00000000FFFFFFFFL;
@@ -185,6 +194,7 @@
     NumericUtils.longToSortableBytes(val, bytes, offset);
   }
 
+  /** counterpart to {@link #encodeTriangleBoxVal}; decodes encoded triangle bounding box values */
   public static int decodeTriangleBoxVal(byte[] encoded, int offset) {
     long val = NumericUtils.sortableBytesToLong(encoded, offset);
     int result = (int)(val & 0x00000000FFFFFFFF);
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
index cb8f9a1..b4f7f4b 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeBoundingBoxQuery.java
@@ -18,13 +18,15 @@
 
 import java.util.Arrays;
 
-import org.apache.lucene.geo.Polygon;
+import org.apache.lucene.geo.Rectangle;
 import org.apache.lucene.geo.Tessellator;
 import org.apache.lucene.index.PointValues.Relation;
 import org.apache.lucene.util.FutureArrays;
 import org.apache.lucene.util.NumericUtils;
 
 import static org.apache.lucene.document.LatLonShape.BYTES;
+import static org.apache.lucene.geo.GeoEncodingUtils.MAX_LON_ENCODED;
+import static org.apache.lucene.geo.GeoEncodingUtils.MIN_LON_ENCODED;
 import static org.apache.lucene.geo.GeoEncodingUtils.decodeLatitude;
 import static org.apache.lucene.geo.GeoEncodingUtils.decodeLongitude;
 import static org.apache.lucene.geo.GeoEncodingUtils.encodeLatitude;
@@ -37,12 +39,13 @@
  * Finds all previously indexed shapes that intersect the specified bounding box.
  *
  * <p>The field must be indexed using
- * {@link org.apache.lucene.document.LatLonShape#createIndexableFields(String, Polygon)} added per document.
+ * {@link org.apache.lucene.document.LatLonShape#createIndexableFields} added per document.
  *
  *  @lucene.experimental
  **/
 final class LatLonShapeBoundingBoxQuery extends LatLonShapeQuery {
   final byte[] bbox;
+  final byte[] west;
   final int minX;
   final int maxX;
   final int minY;
@@ -50,23 +53,59 @@
 
   public LatLonShapeBoundingBoxQuery(String field, LatLonShape.QueryRelation queryRelation, double minLat, double maxLat, double minLon, double maxLon) {
     super(field, queryRelation);
-    if (minLon > maxLon) {
-      throw new IllegalArgumentException("dateline crossing bounding box queries are not supported for [" + field + "]");
-    }
+
     this.bbox = new byte[4 * LatLonShape.BYTES];
-    this.minX = encodeLongitudeCeil(minLon);
-    this.maxX = encodeLongitude(maxLon);
+    int minXenc = encodeLongitudeCeil(minLon);
+    int maxXenc = encodeLongitude(maxLon);
     this.minY = encodeLatitudeCeil(minLat);
     this.maxY = encodeLatitude(maxLat);
-    LatLonShape.encodeTriangleBoxVal(this.minY, bbox, 0);
-    LatLonShape.encodeTriangleBoxVal(this.minX, bbox, BYTES);
-    LatLonShape.encodeTriangleBoxVal(this.maxY, bbox, 2 * BYTES);
-    LatLonShape.encodeTriangleBoxVal(this.maxX, bbox, 3 * BYTES);
+
+    if (minLon > maxLon == true) {
+      // crossing dateline is split into east/west boxes
+      this.west = new byte[4 * LatLonShape.BYTES];
+      this.minX = minXenc;
+      this.maxX = maxXenc;
+      encode(MIN_LON_ENCODED, this.maxX, this.minY, this.maxY, this.west);
+      encode(this.minX, MAX_LON_ENCODED, this.minY, this.maxY, this.bbox);
+    } else {
+      // encodeLongitudeCeil may cause minX to be > maxX iff
+      // the delta between the longtude < the encoding resolution
+      if (minXenc > maxXenc) {
+        minXenc = maxXenc;
+      }
+      this.west = null;
+      this.minX = minXenc;
+      this.maxX = maxXenc;
+      encode(this.minX, this.maxX, this.minY, this.maxY, bbox);
+    }
+  }
+
+  /** encodes a bounding box into the provided byte array */
+  private static void encode(final int minX, final int maxX, final int minY, final int maxY, byte[] b) {
+    if (b == null) {
+      b = new byte[4 * LatLonShape.BYTES];
+    }
+    LatLonShape.encodeTriangleBoxVal(minY, b, 0);
+    LatLonShape.encodeTriangleBoxVal(minX, b, BYTES);
+    LatLonShape.encodeTriangleBoxVal(maxY, b, 2 * BYTES);
+    LatLonShape.encodeTriangleBoxVal(maxX, b, 3 * BYTES);
   }
 
   @Override
   protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
                                             int maxXOffset, int maxYOffset, byte[] maxTriangle) {
+    Relation eastRelation = compareBBoxToRangeBBox(this.bbox, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
+    if (this.crossesDateline() && eastRelation == Relation.CELL_OUTSIDE_QUERY) {
+      return compareBBoxToRangeBBox(this.west, minXOffset, minYOffset, minTriangle, maxXOffset, maxYOffset, maxTriangle);
+    }
+
+    return eastRelation;
+  }
+
+  /** static utility method to compare a bbox with a range of triangles (just the bbox of the triangle collection) */
+  protected static Relation compareBBoxToRangeBBox(final byte[] bbox,
+                                                   int minXOffset, int minYOffset, byte[] minTriangle,
+                                                   int maxXOffset, int maxYOffset, byte[] maxTriangle) {
     // check bounding box (DISJOINT)
     if (FutureArrays.compareUnsigned(minTriangle, minXOffset, minXOffset + BYTES, bbox, 3 * BYTES, 4 * BYTES) > 0 ||
         FutureArrays.compareUnsigned(maxTriangle, maxXOffset, maxXOffset + BYTES, bbox, BYTES, 2 * BYTES) < 0 ||
@@ -87,6 +126,7 @@
   /** returns true if the query matches the encoded triangle */
   @Override
   protected boolean queryMatches(byte[] t) {
+    // decode indexed triangle
     long a = NumericUtils.sortableBytesToLong(t, 4 * LatLonShape.BYTES);
     long b = NumericUtils.sortableBytesToLong(t, 5 * LatLonShape.BYTES);
     long c = NumericUtils.sortableBytesToLong(t, 6 * LatLonShape.BYTES);
@@ -99,32 +139,60 @@
     int cY = (int)(c & 0x00000000FFFFFFFFL);
 
     if (queryRelation == LatLonShape.QueryRelation.WITHIN) {
-      return queryContains(aX, aY) && queryContains(bX, bY) && queryContains(cX, cY);
+      return queryContainsTriangle(aX, aY, bX, bY, cX, cY);
     }
     return queryMatches(aX, aY, bX, bY, cX, cY);
   }
 
-  private boolean queryContains(int x, int y) {
+  private boolean queryContainsTriangle(int ax, int ay, int bx, int by, int cx, int cy) {
+    if (this.crossesDateline() == true) {
+      return bboxContainsTriangle(ax, ay, bx, by, cx, cy, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
+          || bboxContainsTriangle(ax, ay, bx, by, cx, cy, this.minX, MAX_LON_ENCODED, this.minY, this.maxY);
+    }
+    return bboxContainsTriangle(ax, ay, bx, by, cx, cy, minX, maxX, minY, maxY);
+  }
+
+  /** static utility method to check if a bounding box contains a point */
+  private static boolean bboxContainsPoint(int x, int y, int minX, int maxX, int minY, int maxY) {
     return (x < minX || x > maxX || y < minY || y > maxY) == false;
   }
 
-  private boolean queryContains(int ax, int ay, int bx, int by, int cx, int cy) {
-    return queryContains(ax, ay) || queryContains(bx, by) || queryContains(cx, cy);
+  /** static utility method to check if a bounding box contains a triangle */
+  private static boolean bboxContainsTriangle(int ax, int ay, int bx, int by, int cx, int cy,
+                                              int minX, int maxX, int minY, int maxY) {
+    return bboxContainsPoint(ax, ay, minX, maxX, minY, maxY)
+        && bboxContainsPoint(bx, by, minX, maxX, minY, maxY)
+        && bboxContainsPoint(cx, cy, minX, maxX, minY, maxY);
+  }
+
+  /** instance method to check if query box contains point */
+  private boolean queryContainsPoint(int x, int y) {
+    if (this.crossesDateline() == true) {
+      return bboxContainsPoint(x, y, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
+          || bboxContainsPoint(x, y, this.minX, MAX_LON_ENCODED, this.minY, this.maxY);
+    }
+    return bboxContainsPoint(x, y, this.minX, this.maxX, this.minY, this.maxY);
   }
 
   protected boolean queryMatches(int aX, int aY, int bX, int bY, int cX, int cY) {
     // 1. query contains any triangle points
-    if (queryContains(aX, aY, bX, bY, cX, cY)) {
+    if (queryContainsPoint(aX, aY) || queryContainsPoint(bX, bY) || queryContainsPoint(cX, cY)) {
       return true;
     }
 
+    // compute bounding box of triangle
     int tMinX = StrictMath.min(StrictMath.min(aX, bX), cX);
     int tMaxX = StrictMath.max(StrictMath.max(aX, bX), cX);
     int tMinY = StrictMath.min(StrictMath.min(aY, bY), cY);
     int tMaxY = StrictMath.max(StrictMath.max(aY, bY), cY);
 
     // 2. check bounding boxes are disjoint
-    if (tMaxX < minX || tMinX > maxX || tMinY > maxY || tMaxY < minY) {
+    if (this.crossesDateline() == true) {
+      if (boxesAreDisjoint(tMinX, tMaxX, tMinY, tMaxY, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
+          && boxesAreDisjoint(tMinX, tMaxX, tMinY, tMaxY, this.minX, MAX_LON_ENCODED, this.minY, this.maxY)) {
+        return false;
+      }
+    } else if (tMaxX < minX || tMinX > maxX || tMinY > maxY || tMaxY < minY) {
       return false;
     }
 
@@ -139,7 +207,6 @@
       return true;
     }
 
-
     // 4. last ditch effort: check crossings
     if (queryIntersects(aX, aY, bX, bY, cX, cY)) {
       return true;
@@ -148,7 +215,30 @@
   }
 
   /** returns true if the edge (defined by (ax, ay) (bx, by)) intersects the query */
-  private boolean edgeIntersectsQuery(double ax, double ay, double bx, double by) {
+  private static boolean edgeIntersectsBox(int ax, int ay, int bx, int by,
+                                           int minX, int maxX, int minY, int maxY) {
+    // shortcut: if edge is a point (occurs w/ Line shapes); simply check bbox w/ point
+    if (ax == bx && ay == by) {
+      return Rectangle.containsPoint(ay, ax, minY, maxY, minX, maxX);
+    }
+
+    // shortcut: check if either of the end points fall inside the box
+    if (bboxContainsPoint(ax, ay, minX, maxX, minY, maxY)
+        || bboxContainsPoint(bx, by, minX, maxX, minY, maxY)) {
+      return true;
+    }
+
+    // shortcut: check bboxes of edges are disjoint
+    if (boxesAreDisjoint(Math.min(ax, bx), Math.max(ax, bx), Math.min(ay, by), Math.max(ay, by),
+        minX, maxX, minY, maxY)) {
+      return false;
+    }
+
+    // shortcut: edge is a point
+    if (ax == bx && ay == by) {
+      return false;
+    }
+
     // top
     if (orient(ax, ay, bx, by, minX, maxY) * orient(ax, ay, bx, by, maxX, maxY) <= 0 &&
         orient(minX, maxY, maxX, maxY, ax, ay) * orient(minX, maxY, maxX, maxY, bx, by) <= 0) {
@@ -175,6 +265,15 @@
     return false;
   }
 
+  /** returns true if the edge (defined by (ax, ay) (bx, by)) intersects the query */
+  private boolean edgeIntersectsQuery(int ax, int ay, int bx, int by) {
+    if (this.crossesDateline() == true) {
+      return edgeIntersectsBox(ax, ay, bx, by, MIN_LON_ENCODED, this.maxX, this.minY, this.maxY)
+          || edgeIntersectsBox(ax, ay, bx, by, this.minX, MAX_LON_ENCODED, this.minY, this.maxY);
+    }
+    return edgeIntersectsBox(ax, ay, bx, by, this.minX, this.maxX, this.minY, this.maxY);
+  }
+
   /** returns true if the query intersects the provided triangle (in encoded space) */
   private boolean queryIntersects(int ax, int ay, int bx, int by, int cx, int cy) {
     // check each edge of the triangle against the query
@@ -186,6 +285,16 @@
     return false;
   }
 
+  /** utility method to check if two boxes are disjoint */
+  public static boolean boxesAreDisjoint(final int aMinX, final int aMaxX, final int aMinY, final int aMaxY,
+                                          final int bMinX, final int bMaxX, final int bMinY, final int bMaxY) {
+    return (aMaxX < bMinX || aMinX > bMaxX || aMaxY < bMinY || aMinY > bMaxY);
+  }
+
+  public boolean crossesDateline() {
+    return minX > maxX;
+  }
+
   @Override
   public boolean equals(Object o) {
     return sameClassAs(o) && equalsTo(getClass().cast(o));
@@ -193,13 +302,16 @@
 
   @Override
   protected boolean equalsTo(Object o) {
-    return super.equalsTo(o) && Arrays.equals(bbox, ((LatLonShapeBoundingBoxQuery)o).bbox);
+    return super.equalsTo(o)
+        && Arrays.equals(bbox, ((LatLonShapeBoundingBoxQuery)o).bbox)
+        && Arrays.equals(west, ((LatLonShapeBoundingBoxQuery)o).west);
   }
 
   @Override
   public int hashCode() {
     int hash = super.hashCode();
     hash = 31 * hash + Arrays.hashCode(bbox);
+    hash = 31 * hash + Arrays.hashCode(west);
     return hash;
   }
 
@@ -221,6 +333,9 @@
     sb.append(decodeLongitude(minX));
     sb.append(" TO ");
     sb.append(decodeLongitude(maxX));
+    if (maxX < minX) {
+      sb.append(" [crosses dateline!]");
+    }
     sb.append(")");
     return sb.toString();
   }
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeLineQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeLineQuery.java
new file mode 100644
index 0000000..e49b4ec
--- /dev/null
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeLineQuery.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.document;
+
+import java.util.Arrays;
+
+import org.apache.lucene.document.LatLonShape.QueryRelation;
+import org.apache.lucene.geo.GeoEncodingUtils;
+import org.apache.lucene.geo.Line;
+import org.apache.lucene.geo.Line2D;
+import org.apache.lucene.index.PointValues.Relation;
+import org.apache.lucene.util.NumericUtils;
+
+/**
+ * Finds all previously indexed shapes that intersect the specified arbitrary {@code Line}.
+ * <p>
+ * Note:
+ * <ul>
+ *    <li>{@code QueryRelation.WITHIN} queries are not yet supported</li>
+ *    <li>Dateline crossing is not yet supported</li>
+ * </ul>
+ * <p>
+ * todo:
+ * <ul>
+ *   <li>Add distance support for buffered queries</li>
+ * </ul>
+ * <p>The field must be indexed using
+ * {@link org.apache.lucene.document.LatLonShape#createIndexableFields} added per document.
+ *
+ *  @lucene.experimental
+ **/
+final class LatLonShapeLineQuery extends LatLonShapeQuery {
+  final Line[] lines;
+  final private Line2D line2D;
+
+  public LatLonShapeLineQuery(String field, QueryRelation queryRelation, Line... lines) {
+    super(field, queryRelation);
+    /** line queries do not support within relations, only intersects and disjoint */
+    if (queryRelation == QueryRelation.WITHIN) {
+      throw new IllegalArgumentException("LatLonShapeLineQuery does not support " + QueryRelation.WITHIN + " queries");
+    }
+
+    if (lines == null) {
+      throw new IllegalArgumentException("lines must not be null");
+    }
+    if (lines.length == 0) {
+      throw new IllegalArgumentException("lines must not be empty");
+    }
+    for (int i = 0; i < lines.length; ++i) {
+      if (lines[i] == null) {
+        throw new IllegalArgumentException("line[" + i + "] must not be null");
+      } else if (lines[i].minLon > lines[i].maxLon) {
+        throw new IllegalArgumentException("LatLonShapeLineQuery does not currently support querying across dateline.");
+      }
+    }
+    this.lines = lines.clone();
+    this.line2D = Line2D.create(lines);
+  }
+
+  @Override
+  protected Relation relateRangeBBoxToQuery(int minXOffset, int minYOffset, byte[] minTriangle,
+                                                        int maxXOffset, int maxYOffset, byte[] maxTriangle) {
+    double minLat = GeoEncodingUtils.decodeLatitude(LatLonShape.decodeTriangleBoxVal(minTriangle, minYOffset));
+    double minLon = GeoEncodingUtils.decodeLongitude(LatLonShape.decodeTriangleBoxVal(minTriangle, minXOffset));
+    double maxLat = GeoEncodingUtils.decodeLatitude(LatLonShape.decodeTriangleBoxVal(maxTriangle, maxYOffset));
+    double maxLon = GeoEncodingUtils.decodeLongitude(LatLonShape.decodeTriangleBoxVal(maxTriangle, maxXOffset));
+
+    // check internal node against query
+    return line2D.relate(minLat, maxLat, minLon, maxLon);
+  }
+
+  @Override
+  protected boolean queryMatches(byte[] t) {
+    long a = NumericUtils.sortableBytesToLong(t, 4 * LatLonShape.BYTES);
+    long b = NumericUtils.sortableBytesToLong(t, 5 * LatLonShape.BYTES);
+    long c = NumericUtils.sortableBytesToLong(t, 6 * LatLonShape.BYTES);
+
+    int aX = (int)((a >>> 32) & 0x00000000FFFFFFFFL);
+    int bX = (int)((b >>> 32) & 0x00000000FFFFFFFFL);
+    int cX = (int)((c >>> 32) & 0x00000000FFFFFFFFL);
+    int aY = (int)(a & 0x00000000FFFFFFFFL);
+    int bY = (int)(b & 0x00000000FFFFFFFFL);
+    int cY = (int)(c & 0x00000000FFFFFFFFL);
+
+    double alat = GeoEncodingUtils.decodeLatitude(aY);
+    double alon = GeoEncodingUtils.decodeLongitude(aX);
+    double blat = GeoEncodingUtils.decodeLatitude(bY);
+    double blon = GeoEncodingUtils.decodeLongitude(bX);
+    double clat = GeoEncodingUtils.decodeLatitude(cY);
+    double clon = GeoEncodingUtils.decodeLongitude(cX);
+
+    if (queryRelation == LatLonShape.QueryRelation.WITHIN) {
+      return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) == Relation.CELL_INSIDE_QUERY;
+    }
+    // INTERSECTS
+    return line2D.relateTriangle(alon, alat, blon, blat, clon, clat) != Relation.CELL_OUTSIDE_QUERY;
+  }
+
+  @Override
+  public String toString(String field) {
+    final StringBuilder sb = new StringBuilder();
+    sb.append(getClass().getSimpleName());
+    sb.append(':');
+    if (this.field.equals(field) == false) {
+      sb.append(" field=");
+      sb.append(this.field);
+      sb.append(':');
+    }
+    sb.append("Line(" + lines[0].toGeoJSON() + ")");
+    return sb.toString();
+  }
+
+  @Override
+  protected boolean equalsTo(Object o) {
+    return super.equalsTo(o) && Arrays.equals(lines, ((LatLonShapeLineQuery)o).lines);
+  }
+
+  @Override
+  public int hashCode() {
+    int hash = super.hashCode();
+    hash = 31 * hash + Arrays.hashCode(lines);
+    return hash;
+  }
+}
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
index a587112..2b342a8 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapePolygonQuery.java
@@ -29,7 +29,7 @@
  * Finds all previously indexed shapes that intersect the specified arbitrary.
  *
  * <p>The field must be indexed using
- * {@link org.apache.lucene.document.LatLonShape#createIndexableFields(String, Polygon)} added per document.
+ * {@link org.apache.lucene.document.LatLonShape#createIndexableFields} added per document.
  *
  *  @lucene.experimental
  **/
diff --git a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeQuery.java b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeQuery.java
index be6b758..454b2b8 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeQuery.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/document/LatLonShapeQuery.java
@@ -271,8 +271,8 @@
     return Relation.CELL_CROSSES_QUERY;
   }
 
-  /** utility class for implementing constant score logic specifig to INTERSECT, WITHIN, and DISJOINT */
-  protected static abstract class RelationScorerSupplier extends ScorerSupplier {
+  /** utility class for implementing constant score logic specific to INTERSECT, WITHIN, and DISJOINT */
+  private static abstract class RelationScorerSupplier extends ScorerSupplier {
     PointValues values;
     IntersectVisitor visitor;
     long cost = -1;
diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java
index c7e626d..489e5cf 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Line.java
@@ -98,6 +98,16 @@
     return lons[vertex];
   }
 
+  /** Returns a copy of the internal latitude array */
+  public double[] getLats() {
+    return lats.clone();
+  }
+
+  /** Returns a copy of the internal longitude array */
+  public double[] getLons() {
+    return lons.clone();
+  }
+
   @Override
   public boolean equals(Object o) {
     if (this == o) return true;
diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Line2D.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Line2D.java
new file mode 100644
index 0000000..0f9441f
--- /dev/null
+++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Line2D.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.lucene.geo;
+
+/**
+ * 2D line implementation represented as a balanced interval tree of edges.
+ * <p>
+ * Line {@code Line2D} Construction takes {@code O(n log n)} time for sorting and tree construction.
+ * {@link #relate relate()} are {@code O(n)}, but for most practical lines are much faster than brute force.
+ * @lucene.internal
+ */
+public final class Line2D extends EdgeTree {
+
+  private Line2D(Line line) {
+    super(line.minLat, line.maxLat, line.minLon, line.maxLon, line.getLats(), line.getLons());
+  }
+
+  /** create a Line2D edge tree from provided array of Linestrings */
+  public static Line2D create(Line... lines) {
+    Line2D components[] = new Line2D[lines.length];
+    for (int i = 0; i < components.length; ++i) {
+      components[i] = new Line2D(lines[i]);
+    }
+    return (Line2D)createTree(components, 0, components.length - 1, false);
+  }
+}
\ No newline at end of file
diff --git a/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java b/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java
index c68a9df..345a73b 100644
--- a/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java
+++ b/lucene/sandbox/src/java/org/apache/lucene/geo/Tessellator.java
@@ -124,15 +124,11 @@
     // Link points into the circular doubly-linked list in the specified winding order
     if (windingOrder == polygon.getWindingOrder()) {
       for (int i = 0; i < polygon.numPoints(); ++i) {
-        if (lastNode == null || filter(polygon, i, lastNode) == false) {
-          lastNode = insertNode(polygon, startIndex++, i, lastNode);
-        }
+        lastNode = insertNode(polygon, startIndex++, i, lastNode);
       }
     } else {
       for (int i = polygon.numPoints() - 1; i >= 0; --i) {
-        if (lastNode == null || filter(polygon, i, lastNode) == false) {
-          lastNode = insertNode(polygon, startIndex++, i, lastNode);
-        }
+        lastNode = insertNode(polygon, startIndex++, i, lastNode);
       }
     }
     // if first and last node are the same then remove the end node and set lastNode to the start
@@ -142,7 +138,7 @@
     }
 
     // Return the last node in the Doubly-Linked List
-    return lastNode;
+    return filterPoints(lastNode, null);
   }
 
   /** Links every hole into the outer loop, producing a single-ring polygon without holes. **/
@@ -321,7 +317,10 @@
               continue earcut;
             case SPLIT:
               // as a last resort, try splitting the remaining polygon into two
-              splitEarcut(currEar, tessellation, mortonOptimized);
+              if (splitEarcut(currEar, tessellation, mortonOptimized) == false) {
+                //we could not process all points. Tessellation failed
+                tessellation.clear();
+              }
               break;
           }
           break;
@@ -422,6 +421,7 @@
 
       // a self-intersection where edge (v[i-1],v[i]) intersects (v[i+1],v[i+2])
       if (isVertexEquals(a, b) == false
+          && isIntersectingPolygon(a, a.getX(), a.getY(), b.getX(), b.getY()) == false
           && linesIntersect(a.getX(), a.getY(), node.getX(), node.getY(), nextNode.getX(), nextNode.getY(), b.getX(), b.getY())
           && isLocallyInside(a, b) && isLocallyInside(b, a)) {
         // Return the triangulated vertices to the tessellation
@@ -438,8 +438,8 @@
     return node;
   }
 
-  /** Attempt to split a polygon and independently triangulate each side **/
-  private static final void splitEarcut(final Node start, final List<Triangle> tessellation, final boolean mortonIndexed) {
+  /** Attempt to split a polygon and independently triangulate each side. Return true if the polygon was splitted **/
+  private static final boolean splitEarcut(final Node start, final List<Triangle> tessellation, final boolean mortonIndexed) {
     // Search for a valid diagonal that divides the polygon into two.
     Node searchNode = start;
     Node nextNode;
@@ -454,15 +454,20 @@
           searchNode = filterPoints(searchNode, searchNode.next);
           splitNode  = filterPoints(splitNode, splitNode.next);
           // Attempt to earcut both of the resulting polygons
+          if (mortonIndexed) {
+            sortByMortonWithReset(searchNode);
+            sortByMortonWithReset(splitNode);
+          }
           earcutLinkedList(searchNode, tessellation, State.INIT, mortonIndexed);
           earcutLinkedList(splitNode,  tessellation, State.INIT, mortonIndexed);
           // Finish the iterative search
-          return;
+          return true;
         }
         diagonal = diagonal.next;
       }
       searchNode = searchNode.next;
     } while (searchNode != start);
+    return false;
   }
 
   /** Links two polygon vertices using a bridge. **/
@@ -538,7 +543,9 @@
       if(node.getX() != x0 && node.getY() != y0 && nextNode.getX() != x0
           && nextNode.getY() != y0 && node.getX() != x1 && node.getY() != y1
           && nextNode.getX() != x1 && nextNode.getY() != y1) {
-        return linesIntersect(node.getX(), node.getY(), nextNode.getX(), nextNode.getY(), x0, y0, x1, y1);
+        if (linesIntersect(node.getX(), node.getY(), nextNode.getX(), nextNode.getY(), x0, y0, x1, y1)) {
+          return true;
+        }
       }
       node = nextNode;
     } while (node != start);
@@ -553,6 +560,17 @@
         && (area(bX0, bY0, bX1, bY1, aX0, aY0) > 0) != (area(bX0, bY0, bX1, bY1, aX1, aY1) > 0);
   }
 
+  /** Interlinks polygon nodes in Z-Order. It reset the values on the z values**/
+  private static final void sortByMortonWithReset(Node start) {
+    Node next = start;
+    do {
+      next.previousZ = next.previous;
+      next.nextZ = next.next;
+      next = next.next;
+    } while (next != start);
+    sortByMorton(start);
+  }
+
   /** Interlinks polygon nodes in Z-Order. **/
   private static final void sortByMorton(Node start) {
     start.previousZ.nextZ = null;
@@ -619,19 +637,6 @@
     } while (numMerges > 1);
   }
 
-  /** utility method to filter a single duplicate or colinear triangle */
-  private static boolean filter(final Polygon polygon, final int i, final Node node) {
-    final double x = polygon.getPolyLon(i);
-    final double y = polygon.getPolyLat(i);
-    final boolean equal = (x == node.getX() && y == node.getY());
-    if (equal == true) {
-      return true;
-    } else if (node.previous == node || node.previous.previous == node) {
-      return false;
-    }
-    return area(node.previous.previous.getX(), node.previous.previous.getY(), node.previous.getX(), node.previous.getY(), x, y) == 0d;
-  }
-
   /** Eliminate colinear/duplicate points from the doubly linked list */
   private static final Node filterPoints(final Node start, Node end) {
     if (start == null) {
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
index 9c9462f..942979b 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/BaseLatLonShapeTestCase.java
@@ -25,6 +25,7 @@
 import org.apache.lucene.document.LatLonShape.QueryRelation;
 import org.apache.lucene.geo.GeoTestUtil;
 import org.apache.lucene.geo.Line;
+import org.apache.lucene.geo.Line2D;
 import org.apache.lucene.geo.Polygon;
 import org.apache.lucene.geo.Polygon2D;
 import org.apache.lucene.geo.Rectangle;
@@ -59,9 +60,13 @@
 import static org.apache.lucene.geo.GeoTestUtil.nextLatitude;
 import static org.apache.lucene.geo.GeoTestUtil.nextLongitude;
 
+/** base test class for {@link TestLatLonLineShapeQueries}, {@link TestLatLonPointShapeQueries},
+ * and {@link TestLatLonPolygonShapeQueries} */
 public abstract class BaseLatLonShapeTestCase extends LuceneTestCase {
 
+  /** name of the LatLonShape indexed field */
   protected static final String FIELD_NAME = "shape";
+  private static final QueryRelation[] POINT_LINE_RELATIONS = {QueryRelation.INTERSECTS, QueryRelation.DISJOINT};
 
   protected abstract ShapeType getShapeType();
 
@@ -69,22 +74,27 @@
     return getShapeType().nextShape();
   }
 
+  /** quantizes a latitude value to be consistent with index encoding */
   protected double quantizeLat(double rawLat) {
     return decodeLatitude(encodeLatitude(rawLat));
   }
 
+  /** quantizes a provided latitude value rounded up to the nearest encoded integer */
   protected double quantizeLatCeil(double rawLat) {
     return decodeLatitude(encodeLatitudeCeil(rawLat));
   }
 
+  /** quantizes a longitude value to be consistent with index encoding */
   protected double quantizeLon(double rawLon) {
     return decodeLongitude(encodeLongitude(rawLon));
   }
 
+  /** quantizes a provided longitude value rounded up to the nearest encoded integer */
   protected double quantizeLonCeil(double rawLon) {
     return decodeLongitude(encodeLongitudeCeil(rawLon));
   }
 
+  /** quantizes a provided polygon to be consistent with the index encoding */
   protected Polygon quantizePolygon(Polygon polygon) {
     double[] lats = new double[polygon.numPoints()];
     double[] lons = new double[polygon.numPoints()];
@@ -95,6 +105,7 @@
     return new Polygon(lats, lons);
   }
 
+  /** quantizes a provided linestring to be consistent with the index encoding */
   protected Line quantizeLine(Line line) {
     double[] lats = new double[line.numPoints()];
     double[] lons = new double[line.numPoints()];
@@ -105,8 +116,30 @@
     return new Line(lats, lons);
   }
 
+  /** use {@link GeoTestUtil#nextPolygon()} to create a random line; TODO: move to GeoTestUtil */
+  public Line nextLine() {
+    Polygon poly = GeoTestUtil.nextPolygon();
+    double[] lats = new double[poly.numPoints() - 1];
+    double[] lons = new double[lats.length];
+    System.arraycopy(poly.getPolyLats(), 0, lats, 0, lats.length);
+    System.arraycopy(poly.getPolyLons(), 0, lons, 0, lons.length);
+
+    return new Line(lats, lons);
+  }
+
+  /**
+   * return a semi-random line used for queries
+   *
+   * note: shapes parameter may be used to ensure some queries intersect indexed shapes
+   **/
+  protected Line randomQueryLine(Object... shapes) {
+    return nextLine();
+  }
+
+  /** creates the array of LatLonShape.Triangle values that are used to index the shape */
   protected abstract Field[] createIndexableFields(String field, Object shape);
 
+  /** adds a shape to a provided document */
   private void addShapeToDoc(String field, Document doc, Object shape) {
     Field[] fields = createIndexableFields(field, shape);
     for (Field f : fields) {
@@ -114,10 +147,17 @@
     }
   }
 
+  /** factory method to create a new bounding box query */
   protected Query newRectQuery(String field, QueryRelation queryRelation, double minLat, double maxLat, double minLon, double maxLon) {
     return LatLonShape.newBoxQuery(field, queryRelation, minLat, maxLat, minLon, maxLon);
   }
 
+  /** factory method to create a new line query */
+  protected Query newLineQuery(String field, QueryRelation queryRelation, Line... lines) {
+    return LatLonShape.newLineQuery(field, queryRelation, lines);
+  }
+
+  /** factory method to create a new polygon query */
   protected Query newPolygonQuery(String field, QueryRelation queryRelation, Polygon... polygons) {
     return LatLonShape.newPolygonQuery(field, queryRelation, polygons);
   }
@@ -196,7 +236,9 @@
 
     // test random bbox queries
     verifyRandomBBoxQueries(reader, shapes);
-    // test random polygon queires
+    // test random line queries
+    verifyRandomLineQueries(reader, shapes);
+    // test random polygon queries
     verifyRandomPolygonQueries(reader, shapes);
 
     IOUtils.close(w, reader, dir);
@@ -212,8 +254,8 @@
         addShapeToDoc(FIELD_NAME, doc, shapes[id]);
       }
       w.addDocument(doc);
-      if (id > 0 && randomInt(100) == 42) {
-        int idToDelete = randomInt(id);
+      if (id > 0 && random().nextInt(100) == 42) {
+        int idToDelete = random().nextInt(id);
         w.deleteDocuments(new Term("id", ""+idToDelete));
         deleted.add(idToDelete);
         if (VERBOSE) {
@@ -227,6 +269,7 @@
     }
   }
 
+  /** test random generated bounding boxes */
   protected void verifyRandomBBoxQueries(IndexReader reader, Object... shapes) throws Exception {
     IndexSearcher s = newSearcher(reader);
 
@@ -241,21 +284,106 @@
       }
 
       // BBox
-      Rectangle rect;
-      // quantizing the bbox may end up w/ bounding boxes crossing dateline...
-      // todo add support for bounding boxes crossing dateline
-      while (true) {
-        rect = GeoTestUtil.nextBoxNotCrossingDateline();
-        if (decodeLongitude(encodeLongitudeCeil(rect.minLon)) <= decodeLongitude(encodeLongitude(rect.maxLon)) &&
-            decodeLatitude(encodeLatitudeCeil(rect.minLat)) <= decodeLatitude(encodeLatitude(rect.maxLat))) {
-          break;
-        }
-      }
+      Rectangle rect = GeoTestUtil.nextBox();
       QueryRelation queryRelation = RandomPicks.randomFrom(random(), QueryRelation.values());
       Query query = newRectQuery(FIELD_NAME, queryRelation, rect.minLat, rect.maxLat, rect.minLon, rect.maxLon);
 
       if (VERBOSE) {
-        System.out.println("  query=" + query);
+        System.out.println("  query=" + query + ", relation=" + queryRelation);
+      }
+
+      final FixedBitSet hits = new FixedBitSet(maxDoc);
+      s.search(query, new SimpleCollector() {
+
+        private int docBase;
+
+        @Override
+        public ScoreMode scoreMode() {
+          return ScoreMode.COMPLETE_NO_SCORES;
+        }
+
+        @Override
+        protected void doSetNextReader(LeafReaderContext context) throws IOException {
+          docBase = context.docBase;
+        }
+
+        @Override
+        public void collect(int doc) throws IOException {
+          hits.set(docBase+doc);
+        }
+      });
+
+      boolean fail = false;
+      NumericDocValues docIDToID = MultiDocValues.getNumericValues(reader, "id");
+      for (int docID = 0; docID < maxDoc; ++docID) {
+        assertEquals(docID, docIDToID.nextDoc());
+        int id = (int) docIDToID.longValue();
+        boolean expected;
+        double qMinLon = quantizeLonCeil(rect.minLon);
+        double qMaxLon = quantizeLon(rect.maxLon);
+        if (liveDocs != null && liveDocs.get(docID) == false) {
+          // document is deleted
+          expected = false;
+        } else if (shapes[id] == null) {
+          expected = false;
+        } else {
+          // check quantized poly against quantized query
+          if (qMinLon > qMaxLon && rect.crossesDateline() == false) {
+            // if the quantization creates a false dateline crossing (because of encodeCeil):
+            // then do not use encodeCeil
+            qMinLon = quantizeLon(rect.minLon);
+          }
+          expected = getValidator(queryRelation).testBBoxQuery(quantizeLatCeil(rect.minLat), quantizeLat(rect.maxLat), qMinLon, qMaxLon, shapes[id]);
+        }
+
+        if (hits.get(docID) != expected) {
+          StringBuilder b = new StringBuilder();
+
+          if (expected) {
+            b.append("FAIL: id=" + id + " should match but did not\n");
+          } else {
+            b.append("FAIL: id=" + id + " should not match but did\n");
+          }
+          b.append("  relation=" + queryRelation + "\n");
+          b.append("  query=" + query + " docID=" + docID + "\n");
+          b.append("  shape=" + shapes[id] + "\n");
+          b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
+          b.append("  rect=Rectangle(lat=" + quantizeLatCeil(rect.minLat) + " TO " + quantizeLat(rect.maxLat) + " lon=" + qMinLon + " TO " + quantizeLon(rect.maxLon) + ")\n");          if (true) {
+            fail("wrong hit (first of possibly more):\n\n" + b);
+          } else {
+            System.out.println(b.toString());
+            fail = true;
+          }
+        }
+      }
+      if (fail) {
+        fail("some hits were wrong");
+      }
+    }
+  }
+
+  /** test random generated lines */
+  protected void verifyRandomLineQueries(IndexReader reader, Object... shapes) throws Exception {
+    IndexSearcher s = newSearcher(reader);
+
+    final int iters = atLeast(75);
+
+    Bits liveDocs = MultiBits.getLiveDocs(s.getIndexReader());
+    int maxDoc = s.getIndexReader().maxDoc();
+
+    for (int iter = 0; iter < iters; ++iter) {
+      if (VERBOSE) {
+        System.out.println("\nTEST: iter=" + (iter + 1) + " of " + iters + " s=" + s);
+      }
+
+      // line
+      Line queryLine = randomQueryLine(shapes);
+      Line2D queryLine2D = Line2D.create(queryLine);
+      QueryRelation queryRelation = RandomPicks.randomFrom(random(), POINT_LINE_RELATIONS);
+      Query query = newLineQuery(FIELD_NAME, queryRelation, queryLine);
+
+      if (VERBOSE) {
+        System.out.println("  query=" + query + ", relation=" + queryRelation);
       }
 
       final FixedBitSet hits = new FixedBitSet(maxDoc);
@@ -291,9 +419,7 @@
         } else if (shapes[id] == null) {
           expected = false;
         } else {
-          // check quantized poly against quantized query
-          expected = getValidator(queryRelation).testBBoxQuery(quantizeLatCeil(rect.minLat), quantizeLat(rect.maxLat),
-              quantizeLonCeil(rect.minLon), quantizeLon(rect.maxLon), shapes[id]);
+          expected = getValidator(queryRelation).testLineQuery(queryLine2D, shapes[id]);
         }
 
         if (hits.get(docID) != expected) {
@@ -308,7 +434,7 @@
           b.append("  query=" + query + " docID=" + docID + "\n");
           b.append("  shape=" + shapes[id] + "\n");
           b.append("  deleted?=" + (liveDocs != null && liveDocs.get(docID) == false));
-          b.append("  rect=Rectangle(" + quantizeLatCeil(rect.minLat) + " TO " + quantizeLat(rect.maxLat) + " lon=" + quantizeLonCeil(rect.minLon) + " TO " + quantizeLon(rect.maxLon) + ")\n");
+          b.append("  queryPolygon=" + queryLine.toGeoJSON());
           if (true) {
             fail("wrong hit (first of possibly more):\n\n" + b);
           } else {
@@ -323,6 +449,7 @@
     }
   }
 
+  /** test random generated polygons */
   protected void verifyRandomPolygonQueries(IndexReader reader, Object... shapes) throws Exception {
     IndexSearcher s = newSearcher(reader);
 
@@ -343,7 +470,7 @@
       Query query = newPolygonQuery(FIELD_NAME, queryRelation, queryPolygon);
 
       if (VERBOSE) {
-        System.out.println("  query=" + query);
+        System.out.println("  query=" + query + ", relation=" + queryRelation);
       }
 
       final FixedBitSet hits = new FixedBitSet(maxDoc);
@@ -481,9 +608,11 @@
     }
   }
 
+  /** validator class used to test query results against "ground truth" */
   protected abstract class Validator {
     protected QueryRelation queryRelation = QueryRelation.INTERSECTS;
     public abstract boolean testBBoxQuery(double minLat, double maxLat, double minLon, double maxLon, Object shape);
+    public abstract boolean testLineQuery(Line2D line2d, Object shape);
     public abstract boolean testPolygonQuery(Polygon2D poly2d, Object shape);
 
     public void setRelation(QueryRelation relation) {
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java
index 9a91232..3919e17 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonLineShapeQueries.java
@@ -16,12 +16,19 @@
  */
 package org.apache.lucene.document;
 
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import org.apache.lucene.document.LatLonShape.QueryRelation;
+import org.apache.lucene.geo.EdgeTree;
+import org.apache.lucene.geo.GeoTestUtil;
+import org.apache.lucene.geo.GeoUtils;
 import org.apache.lucene.geo.Line;
-import org.apache.lucene.geo.Polygon;
+import org.apache.lucene.geo.Line2D;
 import org.apache.lucene.geo.Polygon2D;
 import org.apache.lucene.index.PointValues.Relation;
 
+import static org.apache.lucene.geo.GeoUtils.MAX_LON_INCL;
+import static org.apache.lucene.geo.GeoUtils.MIN_LON_INCL;
+
 /** random bounding box and polygon query tests for random generated {@link Line} types */
 public class TestLatLonLineShapeQueries extends BaseLatLonShapeTestCase {
 
@@ -33,6 +40,32 @@
   }
 
   @Override
+  protected Line randomQueryLine(Object... shapes) {
+    if (random().nextInt(100) == 42) {
+      // we want to ensure some cross, so randomly generate lines that share vertices with the indexed point set
+      int maxBound = (int)Math.floor(shapes.length * 0.1d);
+      if (maxBound < 2) {
+        maxBound = shapes.length;
+      }
+      double[] lats = new double[RandomNumbers.randomIntBetween(random(), 2, maxBound)];
+      double[] lons = new double[lats.length];
+      for (int i = 0, j = 0; j < lats.length && i < shapes.length; ++i, ++j) {
+        Line l = (Line) (shapes[i]);
+        if (random().nextBoolean() && l != null) {
+          int v = random().nextInt(l.numPoints() - 1);
+          lats[j] = l.getLat(v);
+          lons[j] = l.getLon(v);
+        } else {
+          lats[j] = GeoTestUtil.nextLatitude();
+          lons[j] = GeoTestUtil.nextLongitude();
+        }
+      }
+      return new Line(lats, lons);
+    }
+    return nextLine();
+  }
+
+  @Override
   protected Field[] createIndexableFields(String field, Object line) {
     return LatLonShape.createIndexableFields(field, (Line)line);
   }
@@ -49,14 +82,42 @@
       Line l = (Line)shape;
       if (queryRelation == QueryRelation.WITHIN) {
         // within: bounding box of shape should be within query box
-        return minLat <= quantizeLat(l.minLat) && maxLat >= quantizeLat(l.maxLat)
-            && minLon <= quantizeLon(l.minLon) && maxLon >= quantizeLon(l.maxLon);
+        double lMinLat = quantizeLat(l.minLat);
+        double lMinLon = quantizeLon(l.minLon);
+        double lMaxLat = quantizeLat(l.maxLat);
+        double lMaxLon = quantizeLon(l.maxLon);
+
+        if (minLon > maxLon) {
+          // crosses dateline:
+          return minLat <= lMinLat && maxLat >= lMaxLat
+              && ((GeoUtils.MIN_LON_INCL <= lMinLon && maxLon >= lMaxLon)
+              || (minLon <= lMinLon && GeoUtils.MAX_LON_INCL >= lMaxLon));
+        }
+        return minLat <= lMinLat && maxLat >= lMaxLat
+            && minLon <= lMinLon && maxLon >= lMaxLon;
       }
 
-      // to keep it simple we convert the bbox into a polygon and use poly2d
-      Polygon2D p = Polygon2D.create(new Polygon[] {new Polygon(new double[] {minLat, minLat, maxLat, maxLat, minLat},
-          new double[] {minLon, maxLon, maxLon, minLon, minLon})});
-      return testLine(p, l);
+      Line2D line = Line2D.create(quantizeLine(l));
+      Relation r;
+      if (minLon > maxLon) {
+        // crosses dateline:
+        r = line.relate(minLat, maxLat, MIN_LON_INCL, maxLon);
+        if (r == Relation.CELL_OUTSIDE_QUERY) {
+          r = line.relate(minLat, maxLat, minLon, MAX_LON_INCL);
+        }
+      } else {
+        r = line.relate(minLat, maxLat, minLon, maxLon);
+      }
+
+      if (queryRelation == QueryRelation.DISJOINT) {
+        return r == Relation.CELL_OUTSIDE_QUERY;
+      }
+      return r != Relation.CELL_OUTSIDE_QUERY;
+    }
+
+    @Override
+    public boolean testLineQuery(Line2D line2d, Object shape) {
+      return testLine(line2d, (Line) shape);
     }
 
     @Override
@@ -64,7 +125,7 @@
       return testLine(poly2d, (Line) shape);
     }
 
-    private boolean testLine(Polygon2D queryPoly, Line line) {
+    private boolean testLine(EdgeTree queryPoly, Line line) {
       double ax, ay, bx, by, temp;
       Relation r;
       for (int i = 0, j = 1; j < line.numPoints(); ++i, ++j) {
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
index df924fe..96b026c 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPointShapeQueries.java
@@ -16,7 +16,12 @@
  */
 package org.apache.lucene.document;
 
+import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
 import org.apache.lucene.document.LatLonShape.QueryRelation;
+import org.apache.lucene.geo.EdgeTree;
+import org.apache.lucene.geo.GeoTestUtil;
+import org.apache.lucene.geo.Line;
+import org.apache.lucene.geo.Line2D;
 import org.apache.lucene.geo.Polygon2D;
 import org.apache.lucene.index.PointValues.Relation;
 
@@ -36,6 +41,31 @@
   }
 
   @Override
+  protected Line randomQueryLine(Object... shapes) {
+    if (random().nextInt(100) == 42) {
+      // we want to ensure some cross, so randomly generate lines that share vertices with the indexed point set
+      int maxBound = (int)Math.floor(shapes.length * 0.1d);
+      if (maxBound < 2) {
+        maxBound = shapes.length;
+      }
+      double[] lats = new double[RandomNumbers.randomIntBetween(random(), 2, maxBound)];
+      double[] lons = new double[lats.length];
+      for (int i = 0, j = 0; j < lats.length && i < shapes.length; ++i, ++j) {
+        Point p = (Point) (shapes[i]);
+        if (random().nextBoolean() && p != null) {
+          lats[j] = p.lat;
+          lons[j] = p.lon;
+        } else {
+          lats[j] = GeoTestUtil.nextLatitude();
+          lons[j] = GeoTestUtil.nextLongitude();
+        }
+      }
+      return new Line(lats, lons);
+    }
+    return nextLine();
+  }
+
+  @Override
   protected Field[] createIndexableFields(String field, Object point) {
     Point p = (Point)point;
     return LatLonShape.createIndexableFields(field, p.lat, p.lon);
@@ -51,9 +81,13 @@
     @Override
     public boolean testBBoxQuery(double minLat, double maxLat, double minLon, double maxLon, Object shape) {
       Point p = (Point)shape;
-      double lat = decodeLatitude(encodeLatitude(p.lat));
-      double lon = decodeLongitude(encodeLongitude(p.lon));
-      boolean isDisjoint = lat < minLat || lat > maxLat || lon < minLon || lon > maxLon;
+      double lat = quantizeLat(p.lat);
+      double lon = quantizeLon(p.lon);
+      boolean isDisjoint = lat < minLat || lat > maxLat;
+
+      isDisjoint = isDisjoint || ((minLon > maxLon)
+          ? lon < minLon && lon > maxLon
+          : lon < minLon || lon > maxLon);
       if (queryRelation == QueryRelation.DISJOINT) {
         return isDisjoint;
       }
@@ -61,12 +95,20 @@
     }
 
     @Override
+    public boolean testLineQuery(Line2D line2d, Object shape) {
+      return testPoint(line2d, (Point) shape);
+    }
+
+    @Override
     public boolean testPolygonQuery(Polygon2D poly2d, Object shape) {
-      Point p = (Point) shape;
+      return testPoint(poly2d, (Point) shape);
+    }
+
+    private boolean testPoint(EdgeTree tree, Point p) {
       double lat = decodeLatitude(encodeLatitude(p.lat));
       double lon = decodeLongitude(encodeLongitude(p.lon));
       // for consistency w/ the query we test the point as a triangle
-      Relation r = poly2d.relateTriangle(lon, lat, lon, lat, lon, lat);
+      Relation r = tree.relateTriangle(lon, lat, lon, lat, lon, lat);
       if (queryRelation == QueryRelation.WITHIN) {
         return r == Relation.CELL_INSIDE_QUERY;
       } else if (queryRelation == QueryRelation.DISJOINT) {
@@ -75,8 +117,4 @@
       return r != Relation.CELL_OUTSIDE_QUERY;
     }
   }
-
-  @Override
-  public void testRandomTiny() throws Exception {
-  }
 }
diff --git a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
index 03837a0..24cba64 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/document/TestLatLonPolygonShapeQueries.java
@@ -19,11 +19,16 @@
 import java.util.List;
 
 import org.apache.lucene.document.LatLonShape.QueryRelation;
+import org.apache.lucene.geo.EdgeTree;
+import org.apache.lucene.geo.Line2D;
 import org.apache.lucene.geo.Polygon;
 import org.apache.lucene.geo.Polygon2D;
 import org.apache.lucene.geo.Tessellator;
 import org.apache.lucene.index.PointValues.Relation;
 
+import static org.apache.lucene.geo.GeoUtils.MAX_LON_INCL;
+import static org.apache.lucene.geo.GeoUtils.MIN_LON_INCL;
+
 /** random bounding box and polygon query tests for random indexed {@link Polygon} types */
 public class TestLatLonPolygonShapeQueries extends BaseLatLonShapeTestCase {
 
@@ -66,12 +71,32 @@
       Polygon p = (Polygon)shape;
       if (queryRelation == QueryRelation.WITHIN) {
         // within: bounding box of shape should be within query box
-        return minLat <= quantizeLat(p.minLat) && maxLat >= quantizeLat(p.maxLat)
-            && minLon <= quantizeLon(p.minLon) && maxLon >= quantizeLon(p.maxLon);
+        double pMinLat = quantizeLat(p.minLat);
+        double pMinLon = quantizeLon(p.minLon);
+        double pMaxLat = quantizeLat(p.maxLat);
+        double pMaxLon = quantizeLon(p.maxLon);
+
+        if (minLon > maxLon) {
+          // crosses dateline:
+          return minLat <= pMinLat && maxLat >= pMaxLat
+              && ((MIN_LON_INCL <= pMinLon && maxLon >= pMaxLon)
+              ||  (minLon <= pMinLon && MAX_LON_INCL >= pMaxLon));
+        }
+        return minLat <= pMinLat && maxLat >= pMaxLat
+            && minLon <= pMinLon && maxLon >= pMaxLon;
       }
 
       Polygon2D poly = Polygon2D.create(quantizePolygon(p));
-      Relation r = poly.relate(minLat, maxLat, minLon, maxLon);
+      Relation r;
+      if (minLon > maxLon) {
+        // crosses dateline:
+        r = poly.relate(minLat, maxLat, MIN_LON_INCL, maxLon);
+        if (r == Relation.CELL_OUTSIDE_QUERY) {
+          r = poly.relate(minLat, maxLat, minLon, MAX_LON_INCL);
+        }
+      } else {
+        r = poly.relate(minLat, maxLat, minLon, maxLon);
+      }
       if (queryRelation == QueryRelation.DISJOINT) {
         return r == Relation.CELL_OUTSIDE_QUERY;
       }
@@ -79,11 +104,20 @@
     }
 
     @Override
+    public boolean testLineQuery(Line2D query, Object shape) {
+      return testPolygon(query, (Polygon) shape);
+    }
+
+    @Override
     public boolean testPolygonQuery(Polygon2D query, Object shape) {
-      List<Tessellator.Triangle> tessellation = Tessellator.tessellate((Polygon) shape);
+      return testPolygon(query, (Polygon) shape);
+    }
+
+    private boolean testPolygon(EdgeTree tree, Polygon shape) {
+      List<Tessellator.Triangle> tessellation = Tessellator.tessellate(shape);
       for (Tessellator.Triangle t : tessellation) {
         // we quantize the triangle for consistency with the index
-        Relation r = query.relateTriangle(quantizeLon(t.getLon(0)), quantizeLat(t.getLat(0)),
+        Relation r = tree.relateTriangle(quantizeLon(t.getLon(0)), quantizeLat(t.getLat(0)),
             quantizeLon(t.getLon(1)), quantizeLat(t.getLat(1)),
             quantizeLon(t.getLon(2)), quantizeLat(t.getLat(2)));
         if (queryRelation == QueryRelation.DISJOINT) {
diff --git a/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java b/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java
index 055e025..82ba5b4 100644
--- a/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java
+++ b/lucene/sandbox/src/test/org/apache/lucene/geo/TestTessellator.java
@@ -64,4 +64,36 @@
     List<Tessellator.Triangle> result = Tessellator.tessellate(polygons[0]);
     assertEquals(result.size(), 84);
   }
+
+  public void testLUCENE8534() throws ParseException {
+    String geoJson = "{\"type\":\"Polygon\",\"coordinates\":[[[168.412605,-32.061828],[168.41260500337557,-32.06164814731918],[168.263154,-32.061754],[168.263074,-31.795333],[168.2631866330167,-31.79533292075007],[168.26293615809584,-31.55183198959802],[168.26271862830876,-31.55183199836296]," +
+        "[168.26260885857246,-31.79551898342183],[168.262799,-31.795519],[168.262922,-32.061969],[168.113391,-32.061955],[168.1136947020627,-31.797506925167987],[168.1134623401242,-31.7975067304478],[168.112867,-32.061933],[167.96342,-32.061572],[167.964447,-31.795078],[167.96462554945853,-31.79507843013861]," +
+        "[167.96521264500555,-31.551376165945904],[167.965145,-31.551376],[167.9663078329189,-31.287013079577566],[167.966251,-31.287013],[167.9664724470441,-31.186852765132446],[167.966135,-31.286996],[167.96583002270634,-31.28699509215832],[167.96514242732414,-31.530648904745615],[167.96518,-31.530649]," +
+        "[167.964244373485,-31.795342905910022],[167.964267,-31.795343],[167.963051,-32.06191],[167.813527,-32.061286],[167.81515841152935,-31.796764131690956],[167.815107,-31.796764],[167.8163675951437,-31.55101526478777],[167.81635023954297,-31.551015225373174],[167.814827,-31.796834]," +
+        "[167.81479823247224,-31.796833898826222],[167.813495,-32.061159],[167.664068,-32.060513],[167.66581,-31.794011],[167.6658519100183,-31.794011179736117],[167.6677495759609,-31.550438401064135],[167.667432,-31.550437],[167.66930180157829,-31.286073839134556],[167.669105,-31.286073],[167.670807,-31.019532]," +
+        "[167.818843,-31.020159],[167.8175723936035,-31.284543327213736],[167.81766095836642,-31.284543526532044],[167.818971,-31.020062],[167.967033,-31.020499],[167.96703262843647,-31.020609267886275],[168.114968,-31.020815],[168.1149445990616,-31.05814524188174],[168.114978,-31.020912],[168.26306,-31.021035]," +
+        "[168.2631849793437,-31.203987591682104],[168.263163,-31.021002],[168.411259,-31.020914],[168.41125954741193,-31.02123593258559],[168.5589863328454,-31.020786105561243],[168.558986,-31.020705],[168.707027,-31.020199],[168.70828992266655,-31.242361611483734],[168.707298,-31.020426],[168.855538,-31.019789]," +
+        "[168.85713808565947,-31.284233200286536],[168.857209,-31.284233],[168.8583969293829,-31.54547348363567],[168.86057,-31.796021],[168.86004803213373,-31.796023826818654],[168.862202,-32.060514],[168.712722,-32.061376],[168.71099229524427,-31.796760977737968],[168.7108263042178,-31.79676167516991],[168.712468,-32.061301]," +
+        "[168.56291,-32.061787],[168.561684,-31.795261],[168.56198761104602,-31.795260018704994],[168.560821,-31.530975],[168.56092374559077,-31.530974570518158],[168.56001677082173,-31.287057906497665],[168.5597021283975,-31.287058866102726],[168.5607530382453,-31.530880020491022],[168.560769,-31.53088]," +
+        "[168.56079128925168,-31.539754620482725],[168.560842,-31.55152],[168.56082083893278,-31.551520031401303],[168.56143311036655,-31.7953001584517],[168.561622,-31.7953],[168.562045,-32.0617],[168.412605,-32.061828]]," +
+        "[[168.41212499436773,-31.68171617103951],[168.41200593405762,-31.551740860609502],[168.411912,-31.551741],[168.41154546767467,-31.416898111348704],[168.41158059852074,-31.53102923335134],[168.411729,-31.531029],[168.41212499436773,-31.68171617103951]]," +
+        "[[168.7083938476212,-31.28652950649234],[168.70945084576658,-31.485690997091577],[168.70886199577689,-31.28667838236468],[168.708488,-31.28668],[168.7084873259438,-31.28652918474386],[168.7083938476212,-31.28652950649234]]," +
+        "[[168.71121460687698,-31.795031659971823],[168.71136127361123,-31.79503081865431],[168.71038567290682,-31.657182838382653],[168.71121460687698,-31.795031659971823]]," +
+        "[[167.81624041598312,-31.53023516975434],[167.81634270442586,-31.530235525706665],[167.81676369867318,-31.434841665952604],[167.81624041598312,-31.53023516975434]]]}";
+    Polygon[] polygons =Polygon.fromGeoJSON(geoJson);
+    List<Tessellator.Triangle> result = Tessellator.tessellate(polygons[0]);
+    assertEquals(113, result.size());
+  }
+
+  public void testInvalidPolygon()  throws Exception {
+    String wkt = "POLYGON((0 0, 1 1, 0 1, 1 0, 0 0))";
+    Polygon polygon = (Polygon)SimpleWKTShapeParser.parse(wkt);
+    expectThrows( IllegalArgumentException.class, () -> {Tessellator.tessellate(polygon); });
+  }
+
+  public void testLUCENE8550()  throws Exception {
+    String wkt = "POLYGON((24.04725 59.942,24.04825 59.94125,24.04875 59.94125,24.04875 59.94175,24.048 59.9425,24.0475 59.94275,24.0465 59.94225,24.046 59.94225,24.04575 59.9425,24.04525 59.94225,24.04725 59.942))";
+    Polygon polygon = (Polygon)SimpleWKTShapeParser.parse(wkt);
+    assertTrue(Tessellator.tessellate(polygon).size() == 8);
+  }
 }
\ No newline at end of file
diff --git a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DUtil.java b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DUtil.java
index 88537f4..4e1ecb5 100644
--- a/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DUtil.java
+++ b/lucene/spatial3d/src/java/org/apache/lucene/spatial3d/Geo3DUtil.java
@@ -80,6 +80,10 @@
   /** Returns smallest double that would encode to int x. */
   // NOTE: keep this package private!!
   static double decodeValueFloor(int x) {
+    assert x <= MAX_ENCODED_VALUE && x >= MIN_ENCODED_VALUE;
+    if (x == MIN_ENCODED_VALUE) {
+      return -MAX_VALUE;
+    }
     return x * DECODE;
   }
   
@@ -105,7 +109,10 @@
   /** Returns largest double that would encode to int x. */
   // NOTE: keep this package private!!
   static double decodeValueCeil(int x) {
-    assert x < Integer.MAX_VALUE;
+    assert x <= MAX_ENCODED_VALUE && x >= MIN_ENCODED_VALUE;
+    if (x == MAX_ENCODED_VALUE) {
+      return MAX_VALUE;
+    }
     return Math.nextDown((x+1) * DECODE);
   }
   
diff --git a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
index df67a8d..66fa3cd 100644
--- a/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
+++ b/lucene/spatial3d/src/test/org/apache/lucene/spatial3d/TestGeo3DPoint.java
@@ -1204,6 +1204,40 @@
     }
   }
 
+  public void testMinValueQuantization(){
+    int encoded = Geo3DUtil.MIN_ENCODED_VALUE;
+    double minValue= -PlanetModel.WGS84.getMaximumMagnitude();
+    //Normal encoding
+    double decoded = Geo3DUtil.decodeValue(encoded);
+    assertEquals(minValue, decoded, 0d);
+    assertEquals(encoded, Geo3DUtil.encodeValue(decoded));
+    //Encoding floor
+    double decodedFloor = Geo3DUtil.decodeValueFloor(encoded);
+    assertEquals(minValue, decodedFloor, 0d);
+    assertEquals(encoded, Geo3DUtil.encodeValue(decodedFloor));
+    //Encoding ceiling
+    double decodedCeiling = Geo3DUtil.decodeValueCeil(encoded);
+    assertTrue(decodedCeiling > minValue);
+    assertEquals(encoded, Geo3DUtil.encodeValue(decodedCeiling));
+  }
+
+  public void testMaxValueQuantization(){
+    int encoded = Geo3DUtil.MAX_ENCODED_VALUE;
+    double maxValue= PlanetModel.WGS84.getMaximumMagnitude();
+    //Normal encoding
+    double decoded = Geo3DUtil.decodeValue(encoded);
+    assertEquals(maxValue, decoded, 0d);
+    assertEquals(encoded, Geo3DUtil.encodeValue(decoded));
+    //Encoding floor
+    double decodedFloor = Geo3DUtil.decodeValueFloor(encoded);
+    assertTrue(decodedFloor <  maxValue);
+    assertEquals(encoded, Geo3DUtil.encodeValue(decodedFloor));
+    //Encoding ceiling
+    double decodedCeiling = Geo3DUtil.decodeValueCeil(encoded);
+    assertEquals(maxValue, decodedCeiling, 0d);
+    assertEquals(encoded, Geo3DUtil.encodeValue(decodedCeiling));
+  }
+
   // poached from TestGeoEncodingUtils.testLatitudeQuantization:
 
   /**
@@ -1215,10 +1249,10 @@
     Random random = random();
     for (int i = 0; i < 10000; i++) {
       int encoded = random.nextInt();
-      if (encoded < Geo3DUtil.MIN_ENCODED_VALUE) {
+      if (encoded <= Geo3DUtil.MIN_ENCODED_VALUE) {
         continue;
       }
-      if (encoded > Geo3DUtil.MAX_ENCODED_VALUE) {
+      if (encoded >= Geo3DUtil.MAX_ENCODED_VALUE) {
         continue;
       }
       double min = encoded * Geo3DUtil.DECODE;
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 2c79aaa..a5616bf 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -16,88 +16,6 @@
 servlet container in the directory named "example".
 See the Solr tutorial at https://lucene.apache.org/solr/guide/solr-tutorial.html
 
-==================  8.0.0 ==================
-
-Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
-
-Versions of Major Components
----------------------
-Apache Tika 1.19.1
-Carrot2 3.16.0
-Velocity 1.7 and Velocity Tools 2.0
-Apache ZooKeeper 3.4.11
-Jetty 9.4.11.v20180605
-
-Upgrade Notes
-----------------------
-
-* LUCENE-7996: The 'func' query parser now returns scores that are equal to 0
-  when a negative value is produced. This change is due to the fact that
-  Lucene now requires scores to be positive. (Adrien Grand)
-
-* SOLR-11882: SolrMetric registries retained references to SolrCores when closed. A
-  change of SolrMetricMAnager.registerGauge and SolrMetricProducer.initializeMetrics
-  method signatures was required to fix it. Third party components that use this API
-  need to be updated. (Eros Taborelli, Erick Erickson, ab)
-
-* LUCENE-8267: Memory codecs have been removed from the codebase (MemoryPostings,
-  MemoryDocValues). If you used postingsFormat="Memory" or docValuesFormat="Memory"
-  then either remove it to use the default or experiment with one of the others. (Dawid Weiss)
-
-* SOLR-12586: The date format patterns used by ParseDateFieldUpdateProcessorFactory (present in "schemaless mode")
-  are now interpreted by Java 8's java.time.DateTimeFormatter instead of Joda Time.  The pattern language is very
-  similar but not the same.  Typically, simply update the pattern by changing an uppercase 'Z' to lowercase 'z' and
-  that's it.  For the current recommended set of patterns in schemaless mode, see "Schemaless Mode" in the ref guide,
-  or simply examine the default configSet.  Also note that the set of patterns (formats) here have
-  expanded from before to subsume those patterns previously handled by the "extract" contrib (Solr Cell / Tika).
-  (David Smiley, Bar Rotstein)
-
-* SOLR-12593: The "extraction" contrib (Solr Cell) no longer does any date parsing, and thus no longer has the
-  "date.formats" configuration.  To ensure date strings are properly parsed, use ParseDateFieldUpdateProcessorFactory
-  (an URP) commonly registered with the name "parse-date" in "schemaless mode".  (David Smiley, Bar Rotstein)
-
-* SOLR-12754: The UnifiedHighlighter hl.weightMatches now defaults to true.  If there are unforseen highlight problems,
-  this may be the culprit.
-
-New Features
-----------------------
-
-* SOLR-12591: Expand the set of recognized date format patterns of schemaless mode to subsume those handled by the
-  "extract" contrib (Solr Cell / Tika).  This is primarily a change in configuration of the default configSet for more
-  patterns, but also included enabling "lenient" parsing in ParseDateFieldUpdateProcessorFactory.  The default
-  locale was changed from ROOT to en_US since well-known patterns assume this locale.
-  (David Smiley, Bar Rotstein)
-
-* SOLR-12879: MinHash query parser that builds queries providing a measure of Jaccard similarity (Andy Hind via Tommaso Teofili)
-
-* SOLR-12593: The default configSet now includes an "ignored_*" dynamic field.  (David Smiley)
-
-Optimizations
-----------------------
-
-* SOLR-12725: ParseDateFieldUpdateProcessorFactory should reuse ParsePosition. (ab)
-
-Other Changes
-----------------------
-
-* SOLR-12614: Make "Nodes" view the default in AdminUI "Cloud" tab (janhoy)
-
-* SOLR-12586: Upgrade ParseDateFieldUpdateProcessorFactory (present in "schemaless mode") to use Java 8's
-  java.time.DateTimeFormatter instead of Joda time (see upgrade notes).  "Lenient" is enabled.  Removed Joda Time dependency.
-  (David Smiley, Bar Rotstein)
-
-* SOLR-5163: edismax now throws an exception when qf refers to a nonexistent field (Charles Sanders, David Smiley)
-
-* SOLR-12805: Store previous term (generation) of replica when start recovery process (Cao Manh Dat)
-
-* SOLR-12652: Remove SolrMetricManager.overridableRegistryName method (Peter Somogyi via David Smiley)
-
-* LUCENE-8513: SlowCompositeReaderWrapper now uses MultiTerms directly instead of MultiFields (David Smiley)
-
-* SOLR-11812: Remove backward compatibility of old LIR implementation in 8.0 (Cao Manh Dat)
-
-* SOLR-12620: Remove the Admin UI Cloud -> Graph (Radial) view (janhoy)
-
 ==================  7.6.0 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
@@ -129,6 +47,8 @@
   be set to -1 during collection creation to fall back to the old behavior of unlimited maxShardsPerNode when using
   autoscaling policy.
 
+* SOLR-12861: Added a Solr factory for ByteBuffersDirectory, which will replace deprecated RAMDirectory in Solr 9.0.
+
 New Features
 ----------------------
 
@@ -170,6 +90,8 @@
 
 * SOLR-12862: Add log10 Stream Evaluator and allow the pow Stream Evaluator to accept a vector of exponents (Joel Bernstein)
 
+* SOLR-12938: Cluster Status returns results for aliases, instead of throwing exceptions (Gus Heck)
+
 Other Changes
 ----------------------
 
@@ -190,6 +112,8 @@
 
 * SOLR-12793: Move TestCloudJSONFacetJoinDomain amd TestCloudJSONFacetSKG to the facet test package (Varun Thacker)
 
+* SOLR-12861: Add Solr factory for ByteBuffersDirectory.
+
 Bug Fixes
 ----------------------
 
@@ -224,6 +148,11 @@
 
 * SOLR-7557: Fix parsing of child documents using queryAndStreamResponse (Marvin Bredal Lillehaug/Stian Østerhaug via janhoy)
 
+* SOLR-12875: fix ArrayIndexOutOfBoundsException when unique(field) or uniqueBlock(_root_) is 
+used with DVHASH method in json.facet. (Tim Underwood via Mikhail Khludnev)
+
+* SOLR-12023: Autoscaling policy engine shuffles replicas needlessly (noble)
+
 Improvements
 ----------------------
 
@@ -512,12 +441,13 @@
 * SOLR-11654: Time Routed Alias will now route documents to the ideal shard of a collection, thus avoiding a hop.
   Usually documents were already routed well but not always.  (Gus Heck, David Smiley)
 
+* SOLR-12305: When a replica is applying updates, some kind of updates can skip buffering for faster recovery.
+  (Cao Manh Dat)
+
 * SOLR-11598: The export handler does not limit users to 4 sort fields and is now unlimited. However the speed at
   which we can export is directly proportional to the number of sort fields specified. This change also allows streaming
   expressions to group by on more than 4 fields. (Aroop Ganguly, Amrit Sarkar, Varun Thacker)
 
-* SOLR-12305: When a replica is applying updates, some kind of updates can skip buffering for faster recovery.
-  (Cao Manh Dat)
 
 * SOLR-12509: Improve SplitShardCmd performance and reliability. A new method of splitting has been
   introduced (splitMethod=link) which uses hard-linking of index files when possible, resulting in
@@ -567,8 +497,6 @@
 
 * SOLR-12617: Remove Commons BeanUtils as a dependency (Varun Thacker)
 
-* SOLR-11008: Use a lighter config for MetricsHandlerTest and ensure the core is up before the test starts (Varun Thacker)
-
 * SOLR-11766: Move Streaming Expressions section in Ref Guide to be a top-level section. (Cassandra Targett)
 
 * SOLR-12656: ShardSplitTest should extend AbstractFullDistribZkTestBase instead of BasicDistributedZkTest. (shalin)
@@ -678,6 +606,11 @@
 * SOLR-9480: A new 'relatedness()' aggregate function for JSON Faceting to enable building Semantic
   Knowledge Graphs. (Trey Grainger, hossman)
 
+* SOLR-11453: Configuring slowQueryThresholdMillis logs slow requests to a separate file - solr_slow_requests.log.
+  (Shawn Heisey, Remko Popma, Varun Thacker)
+
+* SOLR-12401: Add getValue() and setValue() Stream Evaluators (Joel Bernstein, janhoy)
+
 * SOLR-12378: Support missing versionField on indexed docs in DocBasedVersionConstraintsURP.
   (Oliver Bates, Michael Braun via Mark Miller)
 
@@ -694,11 +627,6 @@
 * SOLR-12328: JSON Facet API: Domain change with graph query.
   (Daniel Meehl, Kevin Watters, yonik)
 
-* SOLR-11453: Configuring slowQueryThresholdMillis logs slow requests to a separate file - solr_slow_requests.log.
-  (Shawn Heisey, Remko Popma, Varun Thacker)
-
-* SOLR-12401: Add getValue() and setValue() Stream Evaluators (Joel Bernstein, janhoy)
-
 * SOLR-11779, SOLR-12438: Basic long-term collection of aggregated metrics. Historical data is
   maintained as multi-resolution time series using round-robin databases in the '.system'
   collection. New /admin/metrics/history API allows retrieval of this data in numeric
@@ -759,6 +687,11 @@
 
 * SOLR-11929: UpdateLog metrics are not initialized on core reload.  (ab, Steve Rowe)
 
+* SOLR-11882: SolrMetric registries retained references to SolrCores when closed. A
+  change of SolrMetricMAnager.registerGauge and SolrMetricProducer.initializeMetrics
+  method signatures was required to fix it. Third party components may continue to use the old API
+  but should be updated to avoid this bug (Eros Taborelli, Erick Erickson, ab)
+
 * SOLR-12199: TestReplicationHandler.doTestRepeater(): TEST_PORT interpolation failure:
   Server refused connection at: http://127.0.0.1:TEST_PORT/solr  (Mikhail Khludnev, Dawid Weiss, Steve Rowe)
 
@@ -843,14 +776,17 @@
 * SOLR-3567: Spellcheck custom parameters not being passed through due to wrong prefix creation.
   (Josh Lucas via shalin)
 
-* SOLR-12358: Autoscaling suggestions fail randomly with sorting (noble)
-
 * SOLR-12294: update processors loaded from runtime jars fail to load if they are specified
-  in an update processor chain (noble)
+   in an update processor chain (noble)
+
+* SOLR-12358: Autoscaling suggestions fail randomly with sorting (noble)
 
 * SOLR-12314: Use http timeout's defined in solr.xml for creating ConcurrentUpdateSolrClient during
   indexing requests between leader and replica ( Mark Miller, Varun Thacker)
 
+* SOLR-12290: Do not close any servlet streams and improve our servlet stream closing prevention code for users
+  and devs. (Mark Miller)
+
 * SOLR-12374: SnapShooter.getIndexCommit can forget to decref the searcher; though it's not clear in practice when.
   (David Smiley)
 
@@ -898,7 +834,7 @@
 * SOLR-11880: Avoid creating new exceptions for every request made to MDCAwareThreadPoolExecutor by distributed
   search and update operations. (Varun Thacker, shalin)
 
-* SOLR-12375: Optimize Lucene ScoreMode use:
+* SOLR-12375: Optimize Lucene needsScore / ScoreMode use:
   A non-cached filter query could be told incorrectly that scores were needed.
   The /export (ExportQParserPlugin) would declare incorrectly that scores are needed.
   Expanded docs (expand component) could be told incorrectly that scores are needed.  (David Smiley)
@@ -991,8 +927,6 @@
 * SOLR-12435: Fix bin/solr help and ref guide text to describe ZK_HOST in solr.in.sh/solr.in.cmd
   as an alternative to -z cmdline param. (Steve Rowe)
 
-* SOLR-12428: Solr LTR jar now included in _default configset's solrconfig.xml (Ishan Chattopadhyaya)
-
 ==================  7.3.1 ==================
 
 Consult the LUCENE_CHANGES.txt file for additional, low level, changes in this release.
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
index e41297e..27daec2 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
@@ -91,7 +91,7 @@
     super(TriggerEventType.INDEXSIZE, name);
     TriggerUtils.validProperties(validProperties,
         ABOVE_BYTES_PROP, ABOVE_DOCS_PROP, BELOW_BYTES_PROP, BELOW_DOCS_PROP,
-        COLLECTIONS_PROP, MAX_OPS_PROP, SPLIT_FUZZ_PROP);
+        COLLECTIONS_PROP, MAX_OPS_PROP, SPLIT_METHOD_PROP, SPLIT_FUZZ_PROP);
   }
 
   @Override
@@ -171,7 +171,7 @@
     } catch (Exception e) {
       throw new TriggerValidationException(getName(), MAX_OPS_PROP, "invalid value: '" + maxOpsStr + "': " + e.getMessage());
     }
-    String methodStr = (String)properties.getOrDefault(CommonAdminParams.SPLIT_METHOD, SolrIndexSplitter.SplitMethod.LINK.toLower());
+    String methodStr = (String)properties.getOrDefault(CommonAdminParams.SPLIT_METHOD, SolrIndexSplitter.SplitMethod.REWRITE.toLower());
     splitMethod = SolrIndexSplitter.SplitMethod.get(methodStr);
     if (splitMethod == null) {
       throw new TriggerValidationException(getName(), SPLIT_METHOD_PROP, "Unknown value '" + CommonAdminParams.SPLIT_METHOD +
diff --git a/solr/core/src/java/org/apache/solr/core/ByteBuffersDirectoryFactory.java b/solr/core/src/java/org/apache/solr/core/ByteBuffersDirectoryFactory.java
new file mode 100644
index 0000000..ba27650
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/ByteBuffersDirectoryFactory.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core;
+
+import java.io.IOException;
+
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.LockFactory;
+import org.apache.lucene.store.ByteBuffersDirectory;
+import org.apache.lucene.store.SingleInstanceLockFactory;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.SolrException.ErrorCode;
+
+/**
+ * Factory to instantiate {@link org.apache.lucene.store.ByteBuffersDirectory}
+ */
+public class ByteBuffersDirectoryFactory extends EphemeralDirectoryFactory {
+
+  @Override
+  protected LockFactory createLockFactory(String rawLockType) throws IOException {
+    if (!(rawLockType == null || DirectoryFactory.LOCK_TYPE_SINGLE.equalsIgnoreCase(rawLockType.trim()))) {
+      throw new SolrException(ErrorCode.FORBIDDEN,
+          "ByteBuffersDirectory can only be used with the '"+DirectoryFactory.LOCK_TYPE_SINGLE+"' lock factory type.");
+    }
+    return new SingleInstanceLockFactory();
+  }
+
+  @Override
+  protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) throws IOException {
+    return new ByteBuffersDirectory(lockFactory);
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
index 9ebac77..4daae31 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ClusterStatus.java
@@ -26,6 +26,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.Aliases;
@@ -92,6 +93,17 @@
       collectionsMap = Collections.singletonMap(collection, clusterState.getCollectionOrNull(collection));
     }
 
+    boolean isAlias = aliasVsCollections.containsKey(collection);
+    boolean didNotFindCollection = collectionsMap.get(collection) == null;
+
+    if (didNotFindCollection && isAlias) {
+      // In this case this.collection is an alias name not a collection
+      // get all collections and filter out collections not in the alias
+      collectionsMap = clusterState.getCollectionsMap().entrySet().stream()
+          .filter((entry) -> aliasVsCollections.get(collection).contains(entry.getKey()))
+          .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
+    }
+
     NamedList<Object> collectionProps = new SimpleOrderedMap<>();
 
     for (Map.Entry<String, DocCollection> entry : collectionsMap.entrySet()) {
diff --git a/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java b/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java
index 01f8875..e01c958 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/FacetComponent.java
@@ -650,7 +650,7 @@
       (fieldToOverRequest, FacetParams.FACET_SORT, defaultSort);
 
     int shardLimit = requestedLimit + offset;
-    int shardMinCount = requestedMinCount;
+    int shardMinCount = Math.min(requestedMinCount, 1);
 
     // per-shard mincount & overrequest
     if ( FacetParams.FACET_SORT_INDEX.equals(sort) && 
@@ -670,7 +670,6 @@
       if ( 0 < requestedLimit ) {
         shardLimit = doOverRequestMath(shardLimit, overRequestRatio, overRequestCount);
       }
-      shardMinCount = Math.min(requestedMinCount, 1);
     } 
     sreq.params.set(paramStart + FacetParams.FACET_LIMIT, shardLimit);
     sreq.params.set(paramStart + FacetParams.FACET_PIVOT_MINCOUNT, shardMinCount);
diff --git a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java
index 5c2b07f..d9aeaea 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/PivotFacetFieldValueCollection.java
@@ -118,14 +118,36 @@
    */
   public List<PivotFacetValue> getNextLevelValuesToRefine() {
     final int numRefinableValues = getExplicitValuesListSize();
-    if (facetFieldOffset < numRefinableValues) {
-      final int offsetPlusCount = (facetFieldLimit >= 0) 
-        ? Math.min(facetFieldLimit + facetFieldOffset, numRefinableValues) 
-        : numRefinableValues;
-      return getExplicitValuesList().subList(facetFieldOffset,  offsetPlusCount);
-    } else {
+    if (numRefinableValues < facetFieldOffset) {
       return Collections.<PivotFacetValue>emptyList();
     }
+    
+    final int offsetPlusCount = (facetFieldLimit >= 0) 
+      ? Math.min(facetFieldLimit + facetFieldOffset, numRefinableValues) 
+      : numRefinableValues;
+    
+    if (1 < facetFieldMinimumCount && facetFieldSort.equals(FacetParams.FACET_SORT_INDEX)) {
+      // we have to skip any values that (still) don't meet the mincount
+      //
+      // TODO: in theory we could avoid this extra check by trimming sooner (SOLR-6331)
+      // but since that's a destructive op that blows away the `valuesMap` which we (might?) still need
+      // (and pre-emptively skips the offsets) we're avoiding re-working that optimization
+      // for now until/unless someone gives it more careful thought...
+      final List<PivotFacetValue> results = new ArrayList<>(numRefinableValues);
+      for (PivotFacetValue pivotValue : explicitValues) {
+        if (pivotValue.getCount() >= facetFieldMinimumCount) {
+          results.add(pivotValue);
+          if (numRefinableValues <= results.size()) {
+            break;
+          }
+        }
+      }
+      return results;
+    }
+    
+    // in the non "sort==count OR mincount==1" situation, we can just return the first N values
+    // because any viable candidate is already in the top N
+    return getExplicitValuesList().subList(facetFieldOffset,  offsetPlusCount);
   }
   
   /**
diff --git a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java
index 4c0b244..c246c21 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/FacetFieldProcessorByHashDV.java
@@ -430,12 +430,20 @@
     // Our countAcc is virtual, so this is not needed:
     // countAcc.incrementCount(slot, 1);
 
-    super.collectFirstPhase(segDoc, slot, slotNum -> {
-        Comparable value = calc.bitsToValue(val);
-        return new SlotContext(sf.getType().getFieldQuery(null, sf, calc.formatValue(value)));
-      });
+    super.collectFirstPhase(segDoc, slot, slotContext);
   }
 
+  /**
+   * SlotContext to use during all {@link SlotAcc} collection.
+   *
+   * This avoids a memory allocation for each invocation of collectValFirstPhase.
+   */
+  private IntFunction<SlotContext> slotContext = (slotNum) -> {
+    long val = table.vals[slotNum];
+    Comparable value = calc.bitsToValue(val);
+    return new SlotContext(sf.getType().getFieldQuery(null, sf, calc.formatValue(value)));
+  };
+
   private void doRehash(LongCounts table) {
     if (collectAcc == null && allBucketsAcc == null) return;
 
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java
index c2bfec7..42ddbb5 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueBlockAgg.java
@@ -53,7 +53,13 @@
       Arrays.fill(counts, 0);
       Arrays.fill(lastSeenValuesPerSlot, Integer.MIN_VALUE);
     }
-    
+
+    @Override
+    public void resize(Resizer resizer) {
+      lastSeenValuesPerSlot = resizer.resize(lastSeenValuesPerSlot, Integer.MIN_VALUE);
+      super.resize(resizer);
+    }
+
     @Override
     public Object getValue(int slot) throws IOException {
       return counts[slot];
diff --git a/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java b/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java
index 17575fb..5e85fd1 100644
--- a/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java
+++ b/solr/core/src/java/org/apache/solr/search/facet/UniqueSlotAcc.java
@@ -148,5 +148,8 @@
   @Override
   public void resize(Resizer resizer) {
     arr = resizer.resize(arr, null);
+    if (counts != null) {
+      counts = resizer.resize(counts, 0);
+    }
   }
 }
\ No newline at end of file
diff --git a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
index f2be2a1..ba20c9a 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/CloneFieldUpdateProcessorFactory.java
@@ -188,6 +188,7 @@
   /** @see #dest */
   private Pattern pattern = null;
 
+  @SuppressWarnings("WeakerAccess")
   protected final FieldNameSelector getSourceSelector() {
     if (null != srcSelector) return srcSelector;
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
index 46f77c0..59be08a 100644
--- a/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/DistribCursorPagingTest.java
@@ -253,12 +253,14 @@
                     "fl", "id",
                     "facet", "true",
                     "facet.field", "str",
+                    "facet.mincount", "1",
                     "json.nl", "map",
                     "sort", intsort + " asc, id asc");
     rsp = query(p(params, CURSOR_MARK_PARAM, cursorMark));
     assertNumFound(8, rsp);
     assertStartsAt(0, rsp);
     assertDocList(rsp, 7, 0, 3);
+    assertEquals(3, rsp.getFacetField("str").getValues().size());
     assertEquals("a", rsp.getFacetField("str").getValues().get(0).getName());
     assertEquals(4, rsp.getFacetField("str").getValues().get(0).getCount());
     cursorMark = assertHashNextCursorMark(rsp);
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
index adbaf2b..97ae487 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCloudPivotFacet.java
@@ -81,6 +81,11 @@
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  // because floating point addition can depende on the order of operations, we ignore
+  // any stats that can be lossy -- the purpose of testing stats here is just to sanity check
+  // that the basic hooks between pivot faceting and stats.field work, and these let us do that
+  private static final String USE_STATS = "count=true missing=true min=true max=true";
+  
   // param used by test purely for tracing & validation
   private static String TRACE_MIN = "_test_min";
   // param used by test purely for tracing & validation
@@ -110,8 +115,6 @@
   //commented 2-Aug-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // 28-June-2018
   public void test() throws Exception {
 
-    sanityCheckAssertNumerics();
-
     waitForThingsToLevelOut(30000); // TODO: why would we have to wait?
     // 
     handle.clear();
@@ -141,7 +144,6 @@
     final String[] fieldNames = fieldNameSet.toArray(new String[fieldNameSet.size()]);
     Arrays.sort(fieldNames); // need determinism when picking random fields
 
-
     for (int i = 0; i < 5; i++) {
 
       String q = "*:*";
@@ -161,11 +163,11 @@
         // if we are doing stats, then always generated the same # of STATS_FIELD
         // params, using multiple tags from a fixed set, but with diff fieldName values.
         // later, each pivot will randomly pick a tag.
-        baseP.add(StatsParams.STATS_FIELD, "{!key=sk1 tag=st1,st2}" +
+        baseP.add(StatsParams.STATS_FIELD, "{!key=sk1 tag=st1,st2 "+USE_STATS+"}" +
                   pickRandomStatsFields(fieldNames));
-        baseP.add(StatsParams.STATS_FIELD, "{!key=sk2 tag=st2,st3}" +
+        baseP.add(StatsParams.STATS_FIELD, "{!key=sk2 tag=st2,st3 "+USE_STATS+"}" +
                   pickRandomStatsFields(fieldNames));
-        baseP.add(StatsParams.STATS_FIELD, "{!key=sk3 tag=st3,st4}" +
+        baseP.add(StatsParams.STATS_FIELD, "{!key=sk3 tag=st3,st4 "+USE_STATS+"}" +
                   pickRandomStatsFields(fieldNames));
         // NOTE: there's a chance that some of those stats field names
         // will be the same, but if so, all the better to test that edge case
@@ -387,21 +389,18 @@
         // regular stats, compare everything...
 
         assert actualStats != null;
-        String msg = " of " + statsKey + " => " + message;
+        try {
+          String msg = " of " + statsKey;
+          
+          // no wiggle room, these should always be exactly equals, regardless of field type
+          assertEquals("Count" + msg, pivotStats.getCount(), actualStats.getCount());
+          assertEquals("Missing" + msg, pivotStats.getMissing(), actualStats.getMissing());
+          assertEquals("Min" + msg, pivotStats.getMin(), actualStats.getMin());
+          assertEquals("Max" + msg, pivotStats.getMax(), actualStats.getMax());
 
-        // no wiggle room, these should always be exactly equals, regardless of field type
-        assertEquals("Count" + msg, pivotStats.getCount(), actualStats.getCount());
-        assertEquals("Missing" + msg, pivotStats.getMissing(), actualStats.getMissing());
-        assertEquals("Min" + msg, pivotStats.getMin(), actualStats.getMin());
-        assertEquals("Max" + msg, pivotStats.getMax(), actualStats.getMax());
-
-        // precision loss can affect these in some field types depending on shards used
-        // and the order that values are accumulated
-        assertNumerics("Sum" + msg, pivotStats.getSum(), actualStats.getSum());
-        assertNumerics("Mean" + msg, pivotStats.getMean(), actualStats.getMean());
-        assertNumerics("Stddev" + msg, pivotStats.getStddev(), actualStats.getStddev());
-        assertNumerics("SumOfSquares" + msg, 
-                      pivotStats.getSumOfSquares(), actualStats.getSumOfSquares());
+        } catch (AssertionError e) {
+          throw new AssertionError("Stats: Pivot[" + pivotStats + "] <==> Actual[" + actualStats + "]  => " + message, e);
+        }
       }
     }
 
@@ -691,147 +690,6 @@
   }
 
   /**
-   * Given two objects returned as stat values asserts that they are they are either both <code>null</code> 
-   * or all of the following are true:
-   * <ul>
-   *  <li>They have the exact same class</li>
-   *  <li>They are both Numbers or they are both Dates -- in the later case, their millisecond's 
-   *      since epoch are used for all subsequent comparisons
-   *  </li>
-   *  <li>Either:
-   *   <ul>
-   *    <li>They are Integer or Long objects with the exact same <code>longValue()</code></li>
-   *    <li>They are Float or Double objects and their <code>doubleValue()</code>s
-   *        are equally-ish with a "small" epsilon (relative to the scale of the expected value)
-   *    </li>
-   *   </ul>
-   *  </li>
-   * <ul>
-   *
-   * @see Date#getTime
-   * @see Number#doubleValue
-   * @see Number#longValue
-   * @see #assertEquals(String,double,double,double)
-   */
-  private void assertNumerics(String msg, Object expected, Object actual) {
-    if (null == expected || null == actual) {
-      assertEquals(msg, expected, actual);
-      return;
-    }
-    
-    assertEquals(msg + " ... values do not have the same type: " + expected + " vs " + actual,
-                 expected.getClass(), actual.getClass());
-
-    if (expected instanceof Date) {
-      expected = ((Date)expected).getTime();
-      actual = ((Date)actual).getTime();
-      msg = msg + " (w/dates converted to ms)";
-    }
-    
-    assertTrue(msg + " ... expected is not a Number: " + 
-               expected + "=>" + expected.getClass(),
-               expected instanceof Number);
-        
-    if (expected instanceof Long || expected instanceof Integer) {
-      assertEquals(msg, ((Number)expected).longValue(), ((Number)actual).longValue());
-      
-    } else if (expected instanceof Float || expected instanceof Double) {
-      // compute an epsilon relative to the size of the expected value
-      double expect = ((Number)expected).doubleValue();
-      double epsilon = Math.abs(expect * 0.1E-7D);
-
-      assertEquals(msg, expect, ((Number)actual).doubleValue(), epsilon);
-      
-    } else {
-      fail(msg + " ... where did this come from: " + expected.getClass());
-    }
-  }
-
-  /**
-   * test the test
-   */
-  private void sanityCheckAssertNumerics() {
-    
-    assertNumerics("Null?", null, null);
-    assertNumerics("large a",
-        2.3005390038169265E9,
-        2.300539003816927E9);
-    assertNumerics("large b",
-        1.2722582464444444E9,
-        1.2722582464444442E9);
-    assertNumerics("small",
-        2.3005390038169265E-9,
-        2.300539003816927E-9);
-    
-    assertNumerics("large a negative",
-        -2.3005390038169265E9,
-        -2.300539003816927E9);
-    assertNumerics("large b negative",
-        -1.2722582464444444E9,
-        -1.2722582464444442E9);
-    assertNumerics("small negative",
-        -2.3005390038169265E-9,
-        -2.300539003816927E-9);
-    
-    assertNumerics("high long", Long.MAX_VALUE, Long.MAX_VALUE);
-    assertNumerics("high int", Integer.MAX_VALUE, Integer.MAX_VALUE);
-    assertNumerics("low long", Long.MIN_VALUE, Long.MIN_VALUE);
-    assertNumerics("low int", Integer.MIN_VALUE, Integer.MIN_VALUE);
-
-    // NOTE: can't use 'fail' in these try blocks, because we are catching AssertionError
-    // (ie: the code we are expecting to 'fail' is an actual test assertion generator)
-    
-    for (Object num : new Object[] { new Date(42), 42, 42L, 42.0F }) {
-      try {
-        assertNumerics("non-null", null, num);
-        throw new RuntimeException("did not get assertion failure when expected was null");
-      } catch (AssertionError e) {}
-      
-      try {
-        assertNumerics("non-null", num, null);
-        throw new RuntimeException("did not get assertion failure when actual was null");
-      } catch (AssertionError e) {}
-    }
-  
-    try {
-      assertNumerics("non-number", "foo", 42);
-      throw new RuntimeException("did not get assertion failure when expected was non-number");
-    } catch (AssertionError e) {}
-
-    try {
-      assertNumerics("non-number", 42, "foo");
-      throw new RuntimeException("did not get assertion failure when actual was non-number");
-    } catch (AssertionError e) {}
-  
-    try {
-      assertNumerics("diff",
-          2.3005390038169265E9,
-          2.267272520100462E9);
-      throw new RuntimeException("did not get assertion failure when args are big & too diff");
-    } catch (AssertionError e) {}
-    try {
-      assertNumerics("diff",
-          2.3005390038169265E-9,
-          2.267272520100462E-9);
-      throw new RuntimeException("did not get assertion failure when args are small & too diff");
-    } catch (AssertionError e) {}
-  
-    try {
-      assertNumerics("diff long", Long.MAX_VALUE, Long.MAX_VALUE-1);
-      throw new RuntimeException("did not get assertion failure when args are diff longs");
-    } catch (AssertionError e) {}
-    try {
-      assertNumerics("diff int", Integer.MAX_VALUE, Integer.MAX_VALUE-1);
-      throw new RuntimeException("did not get assertion failure when args are diff ints");
-    } catch (AssertionError e) {}
-    try {
-      assertNumerics("diff date", new Date(42), new Date(43));
-      throw new RuntimeException("did not get assertion failure when args are diff dates");
-    } catch (AssertionError e) {}
-
-  }
-
-  /**
    * @see #assertNumFound
    * @see #assertPivotCountsAreCorrect(SolrParams,SolrParams)
    */
diff --git a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java
index 574ae37..531e154 100644
--- a/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java
+++ b/solr/core/src/test/org/apache/solr/cloud/api/collections/TestCollectionAPI.java
@@ -418,13 +418,17 @@
 
   private void clusterStatusAliasTest() throws Exception  {
     try (CloudSolrClient client = createCloudClient(null)) {
+      // create an alias named myalias
       ModifiableSolrParams params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.CREATEALIAS.toString());
       params.set("name", "myalias");
       params.set("collections", DEFAULT_COLLECTION + "," + COLLECTION_NAME);
       SolrRequest request = new QueryRequest(params);
       request.setPath("/admin/collections");
+
       client.request(request);
+
+      // request a collection that's part of an alias
       params = new ModifiableSolrParams();
       params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
       params.set("collection", DEFAULT_COLLECTION);
@@ -433,7 +437,6 @@
 
       NamedList<Object> rsp = client.request(request);
 
-
       NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
       assertNotNull("Cluster state should not be null", cluster);
       Map<String, String> aliases = (Map<String, String>) cluster.get("aliases");
@@ -448,6 +451,38 @@
       assertEquals("conf1", collection.get("configName"));
       List<String> collAlias = (List<String>) collection.get("aliases");
       assertEquals("Aliases not found", Lists.newArrayList("myalias"), collAlias);
+
+      // status request on the alias itself
+      params = new ModifiableSolrParams();
+      params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
+      params.set("collection", "myalias");
+      request = new QueryRequest(params);
+      request.setPath("/admin/collections");
+
+      // SOLR-12938 - this should NOT cause an exception
+      rsp = client.request(request);
+
+      cluster = (NamedList<Object>) rsp.get("cluster");
+      assertNotNull("Cluster state should not be null", cluster);
+      collections = (NamedList<Object>) cluster.get("collections");
+      assertNotNull("Collections should not be null in cluster state", collections);
+      assertNotNull(collections.get(DEFAULT_COLLECTION));
+      assertNotNull(collections.get(COLLECTION_NAME));
+
+      // status request on something neither an alias nor a collection itself
+      params = new ModifiableSolrParams();
+      params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
+      params.set("collection", "notAnAliasOrCollection");
+      request = new QueryRequest(params);
+      request.setPath("/admin/collections");
+
+      // SOLR-12938 - this should still cause an exception
+      try {
+        client.request(request);
+        fail("requesting status for 'notAnAliasOrCollection' should cause an exception from CLUSTERSTATUS" );
+      } catch (RuntimeException e) {
+        // success
+      }
     }
   }
 
diff --git a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java
index 76b1a55..314ddbd 100644
--- a/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java
+++ b/solr/core/src/test/org/apache/solr/cloud/autoscaling/IndexSizeTriggerTest.java
@@ -46,6 +46,7 @@
 import org.apache.solr.common.SolrInputDocument;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.params.CollectionParams;
+import org.apache.solr.common.params.CommonAdminParams;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.NamedList;
@@ -208,6 +209,9 @@
         } else {
           fail("unexpected shard name " + p.second());
         }
+        Map<String, Object> params = (Map<String, Object>)op.getHints().get(Suggester.Hint.PARAMS);
+        assertNotNull("params are null: " + op, params);
+        assertEquals("splitMethod: " + op, "rewrite", params.get(CommonAdminParams.SPLIT_METHOD));
       }
       assertTrue("shard1 should be split", shard1);
       assertTrue("shard2 should be split", shard2);
@@ -854,6 +858,83 @@
     assertEquals("number of ops: " + ops, 3, ops.size());
   }
 
+  @Test
+  public void testSplitMethodConfig() throws Exception {
+    String collectionName = "testSplitMethod_collection";
+    CollectionAdminRequest.Create create = CollectionAdminRequest.createCollection(collectionName,
+        "conf", 2, 2).setMaxShardsPerNode(2);
+    create.process(solrClient);
+    CloudTestUtils.waitForState(cloudManager, "failed to create " + collectionName, collectionName,
+        CloudTestUtils.clusterShape(2, 2, false, true));
+
+    long waitForSeconds = 3 + random().nextInt(5);
+    Map<String, Object> props = createTriggerProps(waitForSeconds);
+    props.put(CommonAdminParams.SPLIT_METHOD, "link");
+    try (IndexSizeTrigger trigger = new IndexSizeTrigger("index_size_trigger6")) {
+      trigger.configure(loader, cloudManager, props);
+      trigger.init();
+      trigger.setProcessor(noFirstRunProcessor);
+      trigger.run();
+
+      for (int i = 0; i < 25; i++) {
+        SolrInputDocument doc = new SolrInputDocument("id", "id-" + i);
+        solrClient.add(collectionName, doc);
+      }
+      solrClient.commit(collectionName);
+
+      AtomicBoolean fired = new AtomicBoolean(false);
+      AtomicReference<TriggerEvent> eventRef = new AtomicReference<>();
+      trigger.setProcessor(event -> {
+        if (fired.compareAndSet(false, true)) {
+          eventRef.set(event);
+          long currentTimeNanos = timeSource.getTimeNs();
+          long eventTimeNanos = event.getEventTime();
+          long waitForNanos = TimeUnit.NANOSECONDS.convert(waitForSeconds, TimeUnit.SECONDS) - WAIT_FOR_DELTA_NANOS;
+          if (currentTimeNanos - eventTimeNanos <= waitForNanos) {
+            fail("processor was fired before the configured waitFor period: currentTimeNanos=" + currentTimeNanos + ", eventTimeNanos=" +  eventTimeNanos + ",waitForNanos=" + waitForNanos);
+          }
+        } else {
+          fail("IndexSizeTrigger was fired more than once!");
+        }
+        return true;
+      });
+      trigger.run();
+      TriggerEvent ev = eventRef.get();
+      // waitFor delay - should not produce any event yet
+      assertNull("waitFor not elapsed but produced an event", ev);
+      timeSource.sleep(TimeUnit.MILLISECONDS.convert(waitForSeconds + 1, TimeUnit.SECONDS));
+      trigger.run();
+      ev = eventRef.get();
+      assertNotNull("should have fired an event", ev);
+      List<TriggerEvent.Op> ops = (List<TriggerEvent.Op>) ev.getProperty(TriggerEvent.REQUESTED_OPS);
+      assertNotNull("should contain requestedOps", ops);
+      assertEquals("number of ops: " + ops, 2, ops.size());
+      boolean shard1 = false;
+      boolean shard2 = false;
+      for (TriggerEvent.Op op : ops) {
+        assertEquals(CollectionParams.CollectionAction.SPLITSHARD, op.getAction());
+        Set<Pair<String, String>> hints = (Set<Pair<String, String>>)op.getHints().get(Suggester.Hint.COLL_SHARD);
+        assertNotNull("hints", hints);
+        assertEquals("hints", 1, hints.size());
+        Pair<String, String> p = hints.iterator().next();
+        assertEquals(collectionName, p.first());
+        if (p.second().equals("shard1")) {
+          shard1 = true;
+        } else if (p.second().equals("shard2")) {
+          shard2 = true;
+        } else {
+          fail("unexpected shard name " + p.second());
+        }
+        Map<String, Object> params = (Map<String, Object>)op.getHints().get(Suggester.Hint.PARAMS);
+        assertNotNull("params are null: " + op, params);
+        assertEquals("splitMethod: " + op, "link", params.get(CommonAdminParams.SPLIT_METHOD));
+      }
+      assertTrue("shard1 should be split", shard1);
+      assertTrue("shard2 should be split", shard2);
+    }
+
+  }
+
   private Map<String, Object> createTriggerProps(long waitForSeconds) {
     Map<String, Object> props = new HashMap<>();
     props.put("event", "indexSize");
diff --git a/solr/core/src/test/org/apache/solr/core/ByteBuffersDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/ByteBuffersDirectoryFactoryTest.java
new file mode 100644
index 0000000..45c7a15
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/core/ByteBuffersDirectoryFactoryTest.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.core;
+
+import java.io.IOException;
+
+import org.apache.lucene.store.Directory;
+import org.apache.lucene.store.LockFactory;
+import org.apache.lucene.store.ByteBuffersDirectory;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.core.DirectoryFactory.DirContext;
+
+/**
+ * Test-case for ByteBuffersDirectoryFactory
+ */
+public class ByteBuffersDirectoryFactoryTest extends SolrTestCaseJ4 {
+
+  public void testOpenReturnsTheSameForSamePath() throws IOException {
+    final Directory directory = new ByteBuffersDirectory();
+    ByteBuffersDirectoryFactory factory = new ByteBuffersDirectoryFactory()  {
+      @Override
+      protected Directory create(String path, LockFactory lockFactory, DirContext dirContext) {
+        return directory;
+      }
+    };
+    String path = "/fake/path";
+    Directory dir1 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE);
+    Directory dir2 = factory.get(path, DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE);
+    assertEquals("ByteBuffersDirectoryFactory should not create new instance of ByteBuffersDirectory " +
+        "every time open() is called for the same path", dir1, dir2);
+
+    factory.release(dir1);
+    factory.release(dir2);
+    factory.close();
+  }
+
+  public void testOpenSucceedForEmptyDir() throws IOException {
+    ByteBuffersDirectoryFactory factory = new ByteBuffersDirectoryFactory();
+    Directory dir = factory.get("/fake/path", DirContext.DEFAULT, DirectoryFactory.LOCK_TYPE_SINGLE);
+    assertNotNull("ByteBuffersDirectoryFactory should create ByteBuffersDirectory even if the path doesn't lead " +
+        "to index directory on the file system", dir);
+    factory.release(dir);
+    factory.close();
+  }
+
+  public void testIndexRetrieve() throws Exception {
+    System.setProperty("solr.directoryFactory", "solr.ByteBuffersDirectoryFactory");
+    initCore("solrconfig-minimal.xml","schema-minimal.xml");
+    DirectoryFactory factory = h.getCore().getDirectoryFactory();
+    assertTrue("Found: " + factory.getClass().getName(), factory instanceof ByteBuffersDirectoryFactory);
+    for (int i = 0 ; i < 5 ; ++i) {
+      assertU(adoc("id", "" + i, "a_s", "_" + i + "_"));
+    }
+    assertU(commit());
+    assertQ(req("q", "a_s:_0_"), "//result[@numFound = '1']");
+    deleteCore();
+  }
+}
diff --git a/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java
index 8714054..5c6bce7 100644
--- a/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/CachingDirectoryFactoryTest.java
@@ -49,8 +49,11 @@
   
   @Test
   public void stressTest() throws Exception {
-    final CachingDirectoryFactory df = new RAMDirectoryFactory();
-    
+    doStressTest(new RAMDirectoryFactory());
+    doStressTest(new ByteBuffersDirectoryFactory());
+  }
+  
+  private void doStressTest(final CachingDirectoryFactory df) throws Exception {
     List<Thread> threads = new ArrayList<>();
     int threadCount = 11;
     for (int i = 0; i < threadCount; i++) {
diff --git a/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java b/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java
index 869a4d2..dfe50bd 100755
--- a/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java
+++ b/solr/core/src/test/org/apache/solr/core/DirectoryFactoryTest.java
@@ -67,45 +67,54 @@
   }
 
   @Test
-  public void testGetDataHome() throws Exception {
+  public void testGetDataHomeRAMDirectory() throws Exception {
+    doTestGetDataHome(RAMDirectoryFactory.class);
+  }
+  
+  @Test
+  public void testGetDataHomeByteBuffersDirectory() throws Exception {
+    doTestGetDataHome(ByteBuffersDirectoryFactory.class);
+  }
+  
+  private void doTestGetDataHome(Class<? extends DirectoryFactory> directoryFactoryClass) throws Exception {
     NodeConfig config = loadNodeConfig("/solr/solr-solrDataHome.xml");
     CoreContainer cc = new CoreContainer(config);
     Properties cp = cc.getContainerProperties();
-    RAMDirectoryFactory rdf = new RAMDirectoryFactory();
-    rdf.initCoreContainer(cc);
-    rdf.init(new NamedList());
+    DirectoryFactory df = directoryFactoryClass.newInstance();
+    df.initCoreContainer(cc);
+    df.init(new NamedList());
 
     // No solr.data.home property set. Absolute instanceDir
-    assertDataHome("/tmp/inst1/data", "/tmp/inst1", rdf, cc);
+    assertDataHome("/tmp/inst1/data", "/tmp/inst1", df, cc);
 
     // Simulate solr.data.home set in solrconfig.xml <directoryFactory> tag
     NamedList args = new NamedList();
     args.add("solr.data.home", "/solrdata/");
-    rdf.init(args);
-    assertDataHome("/solrdata/inst_dir/data", "inst_dir", rdf, cc);
+    df.init(args);
+    assertDataHome("/solrdata/inst_dir/data", "inst_dir", df, cc);
     
     // solr.data.home set with System property, and relative path
     System.setProperty("solr.data.home", "solrdata");
     config = loadNodeConfig("/solr/solr-solrDataHome.xml");
     cc = new CoreContainer(config);
-    rdf = new RAMDirectoryFactory();
-    rdf.initCoreContainer(cc);
-    rdf.init(new NamedList());
-    assertDataHome(solrHome.resolve("solrdata/inst_dir/data").toAbsolutePath().toString(), "inst_dir", rdf, cc);
+    df = directoryFactoryClass.newInstance();
+    df.initCoreContainer(cc);
+    df.init(new NamedList());
+    assertDataHome(solrHome.resolve("solrdata/inst_dir/data").toAbsolutePath().toString(), "inst_dir", df, cc);
     // Test parsing last component of instanceDir, and using custom dataDir
-    assertDataHome(solrHome.resolve("solrdata/myinst/mydata").toAbsolutePath().toString(), "/path/to/myinst", rdf, cc, "dataDir", "mydata");
+    assertDataHome(solrHome.resolve("solrdata/myinst/mydata").toAbsolutePath().toString(), "/path/to/myinst", df, cc, "dataDir", "mydata");
     // solr.data.home set but also solrDataHome set in solr.xml, which should override the former
     System.setProperty("test.solr.data.home", "/foo");
     config = loadNodeConfig("/solr/solr-solrDataHome.xml");
     cc = new CoreContainer(config);
-    rdf = new RAMDirectoryFactory();
-    rdf.initCoreContainer(cc);
-    rdf.init(new NamedList());
-    assertDataHome("/foo/inst_dir/data", "inst_dir", rdf, cc);
+    df = directoryFactoryClass.newInstance();
+    df.initCoreContainer(cc);
+    df.init(new NamedList());
+    assertDataHome("/foo/inst_dir/data", "inst_dir", df, cc);
   }
 
-  private void assertDataHome(String expected, String instanceDir, RAMDirectoryFactory rdf, CoreContainer cc, String... properties) throws IOException {
-    String dataHome = rdf.getDataHome(new CoreDescriptor("core_name", Paths.get(instanceDir), cc.containerProperties, cc.isZooKeeperAware(), properties));
+  private void assertDataHome(String expected, String instanceDir, DirectoryFactory df, CoreContainer cc, String... properties) throws IOException {
+    String dataHome = df.getDataHome(new CoreDescriptor("core_name", Paths.get(instanceDir), cc.containerProperties, cc.isZooKeeperAware(), properties));
     assertEquals(Paths.get(expected).toAbsolutePath(), Paths.get(dataHome).toAbsolutePath());
   }
 
diff --git a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
index eb6f54d..cc31135 100644
--- a/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/component/DistributedFacetPivotLargeTest.java
@@ -17,6 +17,7 @@
 package org.apache.solr.handler.component;
 
 import java.io.IOException;
+import java.util.Arrays;
 import java.util.Date;
 import java.util.List;
 
@@ -264,6 +265,93 @@
       }
     }
 
+    // check of a single level pivot using sort=index w/mincount big enough
+    // to triggers per-shard mincount > num docs on one shard
+    // (beefed up test of same with nested pivot below)
+    for (int limit : Arrays.asList(4, 444444, -1)) {
+      SolrParams p = params("q", "*:*",
+                            "rows", "0",
+                            // skip place_s:Nplaceholder buckets
+                            "fq","-hiredate_dt:\"2012-10-01T12:30:00Z\"", 
+                            // skip company_t:compHolderN buckets from twoShard
+                            "fq","-(+company_t:compHolder* +real_b:true)",
+                            "facet","true",
+                            "facet.pivot","place_s",
+                            FacetParams.FACET_PIVOT_MINCOUNT, "50",
+                            FacetParams.FACET_LIMIT, ""+limit,
+                            "facet.sort", "index");
+      rsp = null;
+      try {
+        rsp = query( p );
+        assertPivot("place_s", "cardiff", 107, rsp.getFacetPivot().get("place_s").get(0));
+        // - zeroShard  = 50 ... above per-shard min of 50/(numShards=4)
+        // - oneShard   =  5 ... below per-shard min of 50/(numShards=4) .. should be refined
+        // - twoShard   = 52 ... above per-shard min of 50/(numShards=4)
+        // = threeShard =  0 ... should be refined and still match nothing
+      } catch (AssertionError ae) {
+        throw new AssertionError(ae.getMessage() + ": " + p.toString() + " ==> " + rsp, ae);
+      }
+    }
+    
+    // test permutations of mincount & limit with sort=index
+    // (there is a per-shard optimization on mincount when sort=index is used)
+    for (int limit : Arrays.asList(4, 444444, -1)) {
+      SolrParams p = params("q", "*:*",
+                            "rows", "0",
+                            // skip place_s:Nplaceholder buckets
+                            "fq","-hiredate_dt:\"2012-10-01T12:30:00Z\"", 
+                            // skip company_t:compHolderN buckets from twoShard
+                            "fq","-(+company_t:compHolder* +real_b:true)",
+                            "facet","true",
+                            "facet.pivot","place_s,company_t",
+                            FacetParams.FACET_PIVOT_MINCOUNT, "50",
+                            FacetParams.FACET_LIMIT, ""+limit,
+                            "facet.sort", "index");
+      rsp = null;
+      try {
+        rsp = query( p );
+        pivots = rsp.getFacetPivot().get("place_s,company_t");
+        firstPlace = pivots.get(0);
+        assertPivot("place_s", "cardiff", 107, firstPlace);
+        //
+        assertPivot("company_t", "bbc",      101, firstPlace.getPivot().get(0)); 
+        assertPivot("company_t", "honda",     50, firstPlace.getPivot().get(1)); 
+        assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(2)); 
+        assertPivot("company_t", "polecat",   52, firstPlace.getPivot().get(3)); 
+      } catch (AssertionError ae) {
+        throw new AssertionError(ae.getMessage() + ": " + p.toString() + " ==> " + rsp, ae);
+      }
+    }
+
+    { // similar to the test above, but now force a restriction on the over request and allow
+      // terms that are early in index sort -- but don't meet the mincount overall -- to be considered
+      // in the first phase. (SOLR-12954)
+      SolrParams p = params("q", "*:*",
+                            "rows", "0",
+                            // skip company_t:compHolderN buckets from twoShard
+                            "fq","-(+company_t:compHolder* +real_b:true)",
+                            "facet","true",
+                            "facet.pivot","place_s,company_t",
+                            // the (50) Nplaceholder place_s values exist in 6 each on oneShard
+                            FacetParams.FACET_PIVOT_MINCOUNT, ""+(6 * shardsArr.length),
+                            FacetParams.FACET_LIMIT, "4",
+                            "facet.sort", "index");
+      rsp = null;
+      try {
+        rsp = query( p ); 
+        pivots = rsp.getFacetPivot().get("place_s,company_t");
+        firstPlace = pivots.get(0);
+        assertPivot("place_s", "cardiff", 107, firstPlace);
+        //
+        assertPivot("company_t", "bbc",      101, firstPlace.getPivot().get(0)); 
+        assertPivot("company_t", "honda",     50, firstPlace.getPivot().get(1)); 
+        assertPivot("company_t", "microsoft", 56, firstPlace.getPivot().get(2)); 
+        assertPivot("company_t", "polecat",   52, firstPlace.getPivot().get(3)); 
+      } catch (AssertionError ae) {
+        throw new AssertionError(ae.getMessage() + ": " + p.toString() + " ==> " + rsp, ae);
+      }
+    }
+    
     // Pivot Faceting (combined wtih Field Faceting)
     for (SolrParams facetParams : 
            // with and w/o an excluded fq
diff --git a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
index cabe497..a27ad2da 100644
--- a/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
+++ b/solr/core/src/test/org/apache/solr/search/QueryEqualityTest.java
@@ -1202,14 +1202,8 @@
     assertFuncEquals("gte(foo_i,2)", "gte(foo_i,2)");
     assertFuncEquals("eq(foo_i,2)", "eq(foo_i,2)");
 
-    boolean equals = false;
-    try {
-      assertFuncEquals("eq(foo_i,2)", "lt(foo_i,2)");
-      equals = true;
-    } catch (AssertionError e) {
-      //expected
-    }
-    assertFalse(equals);
+    expectThrows(AssertionError.class, "expected error, functions are not equal",
+        () -> assertFuncEquals("eq(foo_i,2)", "lt(foo_i,2)"));
   }
 
   public void testChildField() throws Exception {
@@ -1223,32 +1217,25 @@
   }
 
   public void testPayloadScoreQuery() throws Exception {
-    // I don't see a precedent to test query inequality in here, so doing a `try`
     // There was a bug with PayloadScoreQuery's .equals() method that said two queries were equal with different includeSpanScore settings
 
-    try {
-      assertQueryEquals
-          ("payload_score"
-              , "{!payload_score f=foo_dpf v=query func=min includeSpanScore=false}"
-              , "{!payload_score f=foo_dpf v=query func=min includeSpanScore=true}"
-          );
-      fail("queries should not have been equal");
-    } catch(AssertionFailedError e) {
-      assertTrue("queries were not equal, as expected", true);
-    }
+    expectThrows(AssertionFailedError.class, "queries should not have been equal",
+        () -> assertQueryEquals
+            ("payload_score"
+                , "{!payload_score f=foo_dpf v=query func=min includeSpanScore=false}"
+                , "{!payload_score f=foo_dpf v=query func=min includeSpanScore=true}"
+            )
+    );
   }
 
   public void testPayloadCheckQuery() throws Exception {
-    try {
-      assertQueryEquals
-          ("payload_check"
-              , "{!payload_check f=foo_dpf payloads=2}one"
-              , "{!payload_check f=foo_dpf payloads=2}two"
-          );
-      fail("queries should not have been equal");
-    } catch(AssertionFailedError e) {
-      assertTrue("queries were not equal, as expected", true);
-    }
+    expectThrows(AssertionFailedError.class, "queries should not have been equal",
+        () -> assertQueryEquals
+            ("payload_check"
+                , "{!payload_check f=foo_dpf payloads=2}one"
+                , "{!payload_check f=foo_dpf payloads=2}two"
+            )
+    );
   }
 
   public void testPayloadFunction() throws Exception {
@@ -1272,16 +1259,14 @@
             "must='{!lucene}foo_s:c' filter='{!lucene}foo_s:d' filter='{!lucene}foo_s:e'}",
         "{!bool must='{!lucene}foo_s:c' filter='{!lucene}foo_s:d' " +
             "must_not='{!lucene}foo_s:a' should='{!lucene}foo_s:b' filter='{!lucene}foo_s:e'}");
-    try {
-      assertQueryEquals
-          ("bool"
-              , "{!bool must='{!lucene}foo_s:a'}"
-              , "{!bool should='{!lucene}foo_s:a'}"
-          );
-      fail("queries should not have been equal");
-    } catch(AssertionFailedError e) {
-      assertTrue("queries were not equal, as expected", true);
-    }
+
+    expectThrows(AssertionFailedError.class, "queries should not have been equal",
+        () -> assertQueryEquals
+            ("bool"
+                , "{!bool must='{!lucene}foo_s:a'}"
+                , "{!bool should='{!lucene}foo_s:a'}"
+            )
+    );
   }
 
   // Override req to add df param
diff --git a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
index 9f65ba5..7a5fd8d 100644
--- a/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestCollapseQParserPlugin.java
@@ -881,13 +881,10 @@
 
   public void testGroupHeadSelector() {
     GroupHeadSelector s;
-    
-    try {
-      s = GroupHeadSelector.build(params("sort", "foo_s asc", "min", "bar_s"));
-      fail("no exception with multi criteria");
-    } catch (SolrException e) {
-      // expected
-    }
+
+    expectThrows(SolrException.class, "no exception with multi criteria",
+        () -> GroupHeadSelector.build(params("sort", "foo_s asc", "min", "bar_s"))
+    );
     
     s = GroupHeadSelector.build(params("min", "foo_s"));
     assertEquals(GroupHeadSelectorType.MIN, s.type);
diff --git a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
index ff9a2c4..0f72b32 100644
--- a/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestExtendedDismaxParser.java
@@ -656,45 +656,35 @@
   public void testCyclicAliasing() throws Exception {
     try {
       ignoreException(".*Field aliases lead to a cycle.*");
-      try {
-        h.query(req("defType","edismax", "q","blarg", "qf","who", "f.who.qf","name","f.name.qf","who"));
-        fail("Simple cyclic alising not detected");
-      } catch (SolrException e) {
-        assertTrue(e.getCause().getMessage().contains("Field aliases lead to a cycle"));
-      }
-      
-      try {
-        h.query(req("defType","edismax", "q","blarg", "qf","who", "f.who.qf","name","f.name.qf","myalias", "f.myalias.qf","who"));
-        fail("Cyclic alising not detected");
-      } catch (SolrException e) {
-        assertTrue(e.getCause().getMessage().contains("Field aliases lead to a cycle"));
-      }
-      
-      try {
-        h.query(req("defType","edismax", "q","blarg", "qf","field1", "f.field1.qf","field2 field3","f.field2.qf","field4 field5", "f.field4.qf","field5", "f.field5.qf","field6", "f.field3.qf","field6"));
-      } catch (SolrException e) {
-        assertFalse("This is not cyclic alising", e.getCause().getMessage().contains("Field aliases lead to a cycle"));
-        assertTrue(e.getCause().getMessage().contains("not a valid field name"));
-      }
-      
-      try {
-        h.query(req("defType","edismax", "q","blarg", "qf","field1", "f.field1.qf","field2 field3", "f.field2.qf","field4 field5", "f.field4.qf","field5", "f.field5.qf","field4"));
-        fail("Cyclic alising not detected");
-      } catch (SolrException e) {
-        assertTrue(e.getCause().getMessage().contains("Field aliases lead to a cycle"));
-      }
-      
-      try {
-        h.query(req("defType","edismax", "q","who:(Zapp Pig)", "qf","text", "f.who.qf","name","f.name.qf","myalias", "f.myalias.qf","who"));
-        fail("Cyclic alising not detected");
-      } catch (SolrException e) {
-        assertTrue(e.getCause().getMessage().contains("Field aliases lead to a cycle"));
-      }
+
+      SolrException e = expectThrows(SolrException.class, "Simple cyclic alising not detected",
+          () -> h.query(req("defType","edismax", "q","blarg", "qf","who", "f.who.qf","name","f.name.qf","who")));
+      assertCyclicDetectionErrorMessage(e);
+
+      e = expectThrows(SolrException.class, "Cyclic alising not detected",
+          () -> h.query(req("defType","edismax", "q","blarg", "qf","who", "f.who.qf","name","f.name.qf","myalias", "f.myalias.qf","who")));
+      assertCyclicDetectionErrorMessage(e);
+
+      e = expectThrows(SolrException.class, "Cyclic aliasing not detected", () -> h.query(req("defType","edismax", "q","blarg", "qf","field1", "f.field1.qf","field2 field3","f.field2.qf","field4 field5", "f.field4.qf","field5", "f.field5.qf","field6", "f.field3.qf","field6")));
+      assertFalse("This is not cyclic aliasing", e.getCause().getMessage().contains("Field aliases lead to a cycle"));
+      assertTrue("Should throw exception due to invalid field name", e.getCause().getMessage().contains("not a valid field name"));
+
+      e = expectThrows(SolrException.class, "Cyclic alising not detected",
+          () -> h.query(req("defType","edismax", "q","blarg", "qf","field1", "f.field1.qf","field2 field3", "f.field2.qf","field4 field5", "f.field4.qf","field5", "f.field5.qf","field4")));
+      assertCyclicDetectionErrorMessage(e);
+
+      e = expectThrows(SolrException.class, "Cyclic alising not detected",
+          () -> h.query(req("defType","edismax", "q","who:(Zapp Pig)", "qf","text", "f.who.qf","name","f.name.qf","myalias", "f.myalias.qf","who")));
+      assertCyclicDetectionErrorMessage(e);
     } finally {
       resetExceptionIgnores();
     }
   }
 
+  private void assertCyclicDetectionErrorMessage(SolrException e) {
+    assertTrue(e.getCause().getMessage().contains("Field aliases lead to a cycle"));
+  }
+
   public void testOperatorsWithLiteralColons() {
     assertU(adoc("id", "142", "a_s", "bogus:xxx", "text_s", "yak"));
     assertU(adoc("id", "143", "a_s", "bogus:xxx"));
@@ -2092,8 +2082,8 @@
   public void killInfiniteRecursionParse() throws Exception {
     assertJQ(req("defType", "edismax", "q", "*", "qq", "{!edismax v=something}", "bq", "{!edismax v=$qq}"));
   }
-  
-  /** SOLR-5163 */ 
+
+  /** SOLR-5163 */
   @Test
   public void testValidateQueryFields() throws Exception {
     // field aliasing covered by test - testAliasing
@@ -2103,13 +2093,13 @@
     params.add("q", "olive AND other");
     params.add("qf", "subject^3 title");
     params.add("debugQuery", "true");
-    
+
     // test valid field names
     try (SolrQueryRequest req = req(params)) {
       String response = h.query(req);
       response.contains("+DisjunctionMaxQuery((title:olive | (subject:oliv)^3.0)) +DisjunctionMaxQuery((title:other | (subject:other)^3.0))");
     }
-    
+
     // test invalid field name
     params.set("qf", "subject^3 nosuchfield");
     try (SolrQueryRequest req = req(params)) {
@@ -2117,7 +2107,7 @@
     } catch (Exception e) {
       Assert.assertEquals("org.apache.solr.search.SyntaxError: Query Field 'nosuchfield' is not a valid field name", e.getMessage());
     }
-    
+
   }
 
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestFoldingMultitermQuery.java b/solr/core/src/test/org/apache/solr/search/TestFoldingMultitermQuery.java
index 5ceb224..30181e6 100644
--- a/solr/core/src/test/org/apache/solr/search/TestFoldingMultitermQuery.java
+++ b/solr/core/src/test/org/apache/solr/search/TestFoldingMultitermQuery.java
@@ -304,9 +304,9 @@
   public void testMultiBad() {
     try {
       ignoreException("analyzer returned too many terms");
-      assertQ(req("q", "content_multi_bad:" + "abCD*"));
-      fail("Should throw exception when token evaluates to more than one term");
-    } catch (Exception expected) {
+      Exception expected = expectThrows(Exception.class, "Should throw exception when token evaluates to more than one term",
+          () -> assertQ(req("q", "content_multi_bad:" + "abCD*"))
+      );
       assertTrue(expected.getCause() instanceof org.apache.solr.common.SolrException);
     } finally {
       resetExceptionIgnores();
diff --git a/solr/core/src/test/org/apache/solr/search/TestLRUCache.java b/solr/core/src/test/org/apache/solr/search/TestLRUCache.java
index 3bec6df..934c1ec 100644
--- a/solr/core/src/test/org/apache/solr/search/TestLRUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestLRUCache.java
@@ -182,11 +182,8 @@
     CacheRegenerator cr = new NoOpRegenerator();
     Object o = cache.init(params, null, cr);
 
-    try {
-      cache.put("1", "1");
-      fail("Adding a non-accountable value to a cache configured with maxRamBytes should have failed");
-    } catch (Exception e) {
-      assertEquals(e.getClass(), SolrException.class);
-    }
+    expectThrows(SolrException.class, "Adding a non-accountable value to a cache configured with maxRamBytes should have failed",
+        () -> cache.put("1", "1")
+    );
   }
 }
diff --git a/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java b/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java
index 477468a..29c9a37 100644
--- a/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java
+++ b/solr/core/src/test/org/apache/solr/search/TestQueryTypes.java
@@ -389,17 +389,17 @@
 
     try {
       ignoreException("No\\ default\\, and no switch case");
-      assertQ("no match and no default",
+      RuntimeException exp = expectThrows(RuntimeException.class, "Should have gotten an error w/o default",
+          () -> assertQ("no match and no default",
               req("q", "{!switch case.x=Dude case.z=Yonik}asdf")
-              ,"//result[@numFound='BOGUS']");
-      fail("Should have gotten an error w/o default");
-    } catch (RuntimeException exp) {
-      assertTrue("exp cause is wrong", 
-                 exp.getCause() instanceof SolrException);
+              , "//result[@numFound='BOGUS']")
+      );
+      assertTrue("exp cause is wrong",
+          exp.getCause() instanceof SolrException);
       SolrException e = (SolrException) exp.getCause();
       assertEquals("error isn't user error", 400, e.code());
       assertTrue("Error doesn't include bad switch case: " + e.getMessage(),
-                 e.getMessage().contains("asdf"));
+          e.getMessage().contains("asdf"));
     } finally {
       resetExceptionIgnores();
     }
diff --git a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
index 129516e..b3e01f2 100644
--- a/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
+++ b/solr/core/src/test/org/apache/solr/search/TestReRankQParserPlugin.java
@@ -598,12 +598,11 @@
     params.add("start", "0");
     params.add("rows", "2");
 
-    try {
-      h.query(req(params));
-      fail("A syntax error should be thrown when "+ReRankQParserPlugin.RERANK_QUERY+" parameter is not specified");
-    } catch (SolrException e) {
-      assertTrue(e.code() == SolrException.ErrorCode.BAD_REQUEST.code);
-    }
+    SolrException se = expectThrows(SolrException.class, "A syntax error should be thrown when "+ReRankQParserPlugin.RERANK_QUERY+" parameter is not specified",
+        () -> h.query(req(params))
+    );
+    assertTrue(se.code() == SolrException.ErrorCode.BAD_REQUEST.code);
+
   }
 
   @Test
diff --git a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java
index 69d34f6..ec1b422 100644
--- a/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java
+++ b/solr/core/src/test/org/apache/solr/search/TestRealTimeGet.java
@@ -335,76 +335,53 @@
     clearIndex();
     assertU(commit());
 
-    long version = addAndGetVersion(sdoc("id","1") , null);
+    final long version = addAndGetVersion(sdoc("id","1") , null);
     long version2;
 
-    try {
-      // try version added directly on doc
-      version2 = addAndGetVersion(sdoc("id","1", "_version_", Long.toString(version-1)), null);
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try version added directly on doc
+    SolrException se = expectThrows(SolrException.class, "version should cause an error",
+        () -> addAndGetVersion(sdoc("id","1", "_version_", Long.toString(version-1)), null));
+    assertEquals("version should cause a conflict", 409, se.code());
 
-    try {
-      // try version added as a parameter on the request
-      version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version-1)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try version added as a parameter on the request
+    se = expectThrows(SolrException.class, "version should cause an error",
+        () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version-1))));
+    assertEquals("version should cause a conflict", 409, se.code());
 
-    try {
-      // try an add specifying a negative version
-      version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(-version)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try an add specifying a negative version
+    se = expectThrows(SolrException.class, "negative version should cause a conflict",
+        () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(-version))));
+    assertEquals("version should cause a conflict", 409, se.code());
 
-    try {
-      // try an add with a greater version
-      version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version+random().nextInt(1000)+1)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try an add with a greater version
+    se = expectThrows(SolrException.class, "greater version should cause a conflict",
+        () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version+random().nextInt(1000)+1))));
+    assertEquals("version should cause a conflict", 409, se.code());
 
     //
     // deletes
     //
 
-    try {
-      // try a delete with version on the request
-      version2 = deleteAndGetVersion("1", params("_version_", Long.toString(version-1)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try a delete with version on the request
+    se = expectThrows(SolrException.class, "version should cause an error",
+        () -> deleteAndGetVersion("1", params("_version_", Long.toString(version-1))));
+    assertEquals("version should cause a conflict", 409, se.code());
 
-    try {
-      // try a delete with a negative version
-      version2 = deleteAndGetVersion("1", params("_version_", Long.toString(-version)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try a delete with a negative version
+    se = expectThrows(SolrException.class, "negative version should cause an error",
+        () -> deleteAndGetVersion("1", params("_version_", Long.toString(-version))));
+    assertEquals("version should cause a conflict", 409, se.code());
 
-    try {
-      // try a delete with a greater version
-      version2 = deleteAndGetVersion("1", params("_version_", Long.toString(version+random().nextInt(1000)+1)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try a delete with a greater version
+    se = expectThrows(SolrException.class, "greater version should cause an error",
+        () -> deleteAndGetVersion("1", params("_version_", Long.toString(version+random().nextInt(1000)+1))));
+    assertEquals("version should cause a conflict", 409, se.code());
 
-    try {
-      // try a delete of a document that doesn't exist, specifying a specific version
-      version2 = deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(version)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // try a delete of a document that doesn't exist, specifying a specific version
+    se = expectThrows(SolrException.class, "document does not exist should cause an error",
+        () -> deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(version))));
+    assertEquals("version should cause a conflict", 409, se.code());
+
 
     // try a delete of a document that doesn't exist, specifying that it should not
     version2 = deleteAndGetVersion("I_do_not_exist", params("_version_", Long.toString(-1)));
@@ -414,56 +391,44 @@
     version2 = addAndGetVersion(sdoc("id","1", "_version_", Long.toString(version)), null);
     assertTrue(version2 > version);
 
-    try {
-      // overwriting the previous version should now fail
-      version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // overwriting the previous version should now fail
+    se = expectThrows(SolrException.class, "overwriting previous version should fail",
+        () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version))));
+    assertEquals(409, se.code());
 
-    try {
-      // deleting the previous version should now fail
-      version2 = deleteAndGetVersion("1", params("_version_", Long.toString(version)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // deleting the previous version should now fail
+    se = expectThrows(SolrException.class, "deleting the previous version should now fail",
+        () -> deleteAndGetVersion("1", params("_version_", Long.toString(version))));
+    assertEquals(409, se.code());
 
-    version = version2;
+    final long prevVersion = version2;
 
     // deleting the current version should work
-    version2 = deleteAndGetVersion("1", params("_version_", Long.toString(version)));
+    version2 = deleteAndGetVersion("1", params("_version_", Long.toString(prevVersion)));
 
-    try {
-      // overwriting the previous existing doc should now fail (since it was deleted)
-      version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(version)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // overwriting the previous existing doc should now fail (since it was deleted)
+    se = expectThrows(SolrException.class, "overwriting the previous existing doc should now fail (since it was deleted)",
+        () -> addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(prevVersion))));
+    assertEquals(409, se.code());
 
-    try {
-      // deleting the previous existing doc should now fail (since it was deleted)
-      version2 = deleteAndGetVersion("1", params("_version_", Long.toString(version)));
-      fail();
-    } catch (SolrException se) {
-      assertEquals(409, se.code());
-    }
+    // deleting the previous existing doc should now fail (since it was deleted)
+    se = expectThrows(SolrException.class, "deleting the previous existing doc should now fail (since it was deleted)",
+        () -> deleteAndGetVersion("1", params("_version_", Long.toString(prevVersion))));
+    assertEquals(409, se.code());
 
     // overwriting a negative version should work
-    version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(-(version-1))));
+    version2 = addAndGetVersion(sdoc("id","1"), params("_version_", Long.toString(-(prevVersion-1))));
     assertTrue(version2 > version);
-    version = version2;
+    long lastVersion = version2;
 
     // sanity test that we see the right version via rtg
     assertJQ(req("qt","/get","id","1")
-        ,"=={'doc':{'id':'1','_version_':" + version + "}}"
+        ,"=={'doc':{'id':'1','_version_':" + lastVersion + "}}"
     );
   }
 
 
-    /***
+  /***
     @Test
     public void testGetRealtime() throws Exception {
       SolrQueryRequest sr1 = req("q","foo");
@@ -626,12 +591,9 @@
                   if (correct) {
                     version = deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(info.version)));
                   } else {
-                    try {
-                      version = deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(badVersion)));
-                      fail();
-                    } catch (SolrException se) {
-                      assertEquals(409, se.code());
-                    }
+                    SolrException se = expectThrows(SolrException.class, "should not get random version",
+                        () -> deleteAndGetVersion(Integer.toString(id), params("_version_", Long.toString(badVersion))));
+                    assertEquals(409, se.code());
                   }
                 } else {
                   version = deleteAndGetVersion(Integer.toString(id), null);
@@ -674,12 +636,9 @@
                   if (correct) {
                     version = addAndGetVersion(sd, params("_version_", Long.toString(info.version)));
                   } else {
-                    try {
-                      version = addAndGetVersion(sd, params("_version_", Long.toString(badVersion)));
-                      fail();
-                    } catch (SolrException se) {
-                      assertEquals(409, se.code());
-                    }
+                    SolrException se = expectThrows(SolrException.class, "should not get bad version",
+                        () -> addAndGetVersion(sd, params("_version_", Long.toString(badVersion))));
+                    assertEquals(409, se.code());
                   }
                 } else {
                   version = addAndGetVersion(sd, null);
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java
index 7a8c06a..7722664 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolr4Spatial.java
@@ -83,12 +83,10 @@
         "fq", "{!field f=" + fieldName + "}Intersectssss"), 400);
 
     ignoreException("NonexistentShape");
-    try {
-      assertU(adoc("id", "-1", fieldName, "NonexistentShape"));
-      fail();
-    } catch (SolrException e) {
-      assertEquals(400, e.code());
-    }
+    SolrException e = expectThrows(SolrException.class, "should throw exception on non existent shape",
+        () -> assertU(adoc("id", "-1", fieldName, "NonexistentShape"))
+    );
+    assertEquals(400, e.code());
     unIgnoreException("NonexistentShape");
   }
 
diff --git a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
index 37347b3..4920520 100644
--- a/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
+++ b/solr/core/src/test/org/apache/solr/search/TestSolrQueryParser.java
@@ -353,13 +353,10 @@
     String q = sb.toString();
 
     // This will still fail when used as the main query, but will pass in a filter query since TermsQuery can be used.
-    try {
+    {
       ignoreException("Too many clauses");
-      assertJQ(req("q",q)
-          ,"/response/numFound==6");
-      fail();
-    } catch (Exception e) {
-      // expect "too many clauses" exception... see SOLR-10921
+      SolrException e = expectThrows(SolrException.class, "exoected too many clauses exception",
+          () -> assertJQ(req("q", q), "/response/numFound==6"));
       assertTrue(e.getMessage().contains("many clauses"));
     }
 
@@ -1114,13 +1111,9 @@
     for (String suffix:fieldSuffix) {
       qParser = QParser.getParser("foo_" + suffix + ":(1 2 3 4 5 6 7 8 9 10 20 19 18 17 16 15 14 13 12 NOT_A_NUMBER)", req);
       qParser.setIsFilter(true); // this may change in the future
-      try {
-        qParser.getQuery();
-        fail("Expecting exception");
-      } catch (SolrException e) {
-        assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code());
-        assertTrue("Unexpected exception: " + e.getMessage(), e.getMessage().contains("Invalid Number: NOT_A_NUMBER"));
-      }
+      SolrException e = expectThrows(SolrException.class, "Expecting exception", qParser::getQuery);
+      assertEquals(SolrException.ErrorCode.BAD_REQUEST.code, e.code());
+      assertTrue("Unexpected exception: " + e.getMessage(), e.getMessage().contains("Invalid Number: NOT_A_NUMBER"));
     }
     
     
diff --git a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
index 0035b16..01d29b7 100644
--- a/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
+++ b/solr/core/src/test/org/apache/solr/search/facet/TestJsonFacets.java
@@ -2410,7 +2410,72 @@
             ", books2:{ buckets:[ {val:q,count:1} ] }" +
             "}"
     );
+  }
 
+  /**
+   * An explicit test for unique*(_root_) across all methods
+   */
+  public void testUniquesForMethod() throws Exception {
+    final Client client = Client.localClient();
+
+    final SolrParams p = params("rows","0");
+
+    client.deleteByQuery("*:*", null);
+
+    SolrInputDocument parent;
+    parent = sdoc("id", "1", "type_s","book", "book_s","A", "v_t","q");
+    client.add(parent, null);
+
+    parent = sdoc("id", "2", "type_s","book", "book_s","B", "v_t","q w");
+    parent.addChildDocument( sdoc("id","2.1", "type_s","page", "page_s","a", "v_t","x y z")  );
+    parent.addChildDocument( sdoc("id","2.2", "type_s","page", "page_s","b", "v_t","x y  ") );
+    parent.addChildDocument( sdoc("id","2.3", "type_s","page", "page_s","c", "v_t","  y z" )  );
+    client.add(parent, null);
+
+    parent = sdoc("id", "3", "type_s","book", "book_s","C", "v_t","q w e");
+    parent.addChildDocument( sdoc("id","3.1", "type_s","page", "page_s","d", "v_t","x    ")  );
+    parent.addChildDocument( sdoc("id","3.2", "type_s","page", "page_s","e", "v_t","  y  ")  );
+    parent.addChildDocument( sdoc("id","3.3", "type_s","page", "page_s","f", "v_t","    z")  );
+    client.add(parent, null);
+
+    parent = sdoc("id", "4", "type_s","book", "book_s","D", "v_t","e");
+    client.add(parent, null);
+
+    client.commit();
+
+    client.testJQ(params(p, "q", "type_s:page"
+        , "json.facet", "{" +
+            "  types: {" +
+            "    type:terms," +
+            "    field:type_s," +
+            "    limit:-1," +
+            "    facet: {" +
+            "           in_books: \"unique(_root_)\" }"+
+            "  }," +
+            "  pages: {" +
+            "    type:terms," +
+            "    field:page_s," +
+            "    limit:-1," +
+            "    facet: {" +
+            "           in_books: \"uniqueBlock(_root_)\" }"+
+            "  }" +
+            "}" )
+
+        , "response=={numFound:6,start:0,docs:[]}"
+        , "facets=={ count:6," +
+            "types:{" +
+            "    buckets:[ {val:page, count:6, in_books:2} ]}" +
+            "pages:{" +
+            "    buckets:[ " +
+            "     {val:a, count:1, in_books:1}," +
+            "     {val:b, count:1, in_books:1}," +
+            "     {val:c, count:1, in_books:1}," +
+            "     {val:d, count:1, in_books:1}," +
+            "     {val:e, count:1, in_books:1}," +
+            "     {val:f, count:1, in_books:1}" +
+            "    ]}" +
+            "}"
+    );
   }
 
 
diff --git a/solr/solr-ref-guide/src/collections-api.adoc b/solr/solr-ref-guide/src/collections-api.adoc
index ec8517d..b5dda3e 100644
--- a/solr/solr-ref-guide/src/collections-api.adoc
+++ b/solr/solr-ref-guide/src/collections-api.adoc
@@ -1663,7 +1663,7 @@
 === CLUSTERSTATUS Parameters
 
 `collection`::
-The collection name for which information is requested. If omitted, information on all collections in the cluster will be returned.
+The collection or alias name for which information is requested. If omitted, information on all collections in the cluster will be returned. If an alias is supplied, information on the collections in the alias will be returned.
 
 `shard`::
 The shard(s) for which information is requested. Multiple shard names can be specified as a comma-separated list.
diff --git a/solr/solr-ref-guide/src/scalar-math.adoc b/solr/solr-ref-guide/src/scalar-math.adoc
index b602279..f5fa745 100644
--- a/solr/solr-ref-guide/src/scalar-math.adoc
+++ b/solr/solr-ref-guide/src/scalar-math.adoc
@@ -130,8 +130,8 @@
 
 The following scalar math functions are available in the math expressions library:
 
-`abs`, `add`, `div`, `mult`, `sub`, `log`,
+`abs`, `add`, `div`, `mult`, `sub`, `log`, `log10`,
 `pow`, `mod`, `ceil`, `floor`, `sin`, `asin`,
 `sinh`, `cos`, `acos`, `cosh`, `tan`, `atan`,
-`tanh`, `round`, `precision`, `sqrt`, `cbrt`
+`tanh`, `round`, `precision`, `recip`, `sqrt`, `cbrt`
 
diff --git a/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc b/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc
index 97b9dd7..d091537 100644
--- a/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc
+++ b/solr/solr-ref-guide/src/solrcloud-autoscaling-triggers.adoc
@@ -304,6 +304,13 @@
 but it also limits the maximum load on the cluster that the large number of requested
 operations may cause. The default value is 10.
 
+`splitMethod`::
+One of the supported methods for index splitting to use. Default value is `rewrite`, which is
+slow and puts a high CPU load on the shard leader but results in optimized sub-shard indexes.
+The `link` method is much faster and puts very little load on the shard leader but results in
+indexes that are initially as large as the parent shard's index, which slows down replication and
+may lead to excessive initial disk space consumption on replicas.
+
 Events generated by this trigger contain additional details about the shards
 that exceeded thresholds and the types of violations (upper / lower bounds, bytes / docs metrics).
 
diff --git a/solr/solr-ref-guide/src/statistics.adoc b/solr/solr-ref-guide/src/statistics.adoc
index 324a8a6..48b81ed 100644
--- a/solr/solr-ref-guide/src/statistics.adoc
+++ b/solr/solr-ref-guide/src/statistics.adoc
@@ -321,6 +321,41 @@
  }
 ----
 
+The `percentile` function also operates on an array of percentile values.
+The example below is computing the 20th, 40th, 60th and 80th percentiles for a random sample
+of the *response_d* field:
+
+[source,text]
+----
+let(a=random(collection2, q="*:*", rows="15000", fl="response_d"),
+    b=col(a, response_d),
+    c=percentile(b, array(20,40,60,80)))
+----
+
+When this expression is sent to the `/stream` handler it responds with:
+
+[source,json]
+----
+{
+  "result-set": {
+    "docs": [
+      {
+        "c": [
+          818.0835543394625,
+          843.5590348165282,
+          866.1789509894824,
+          892.5033386599067
+        ]
+      },
+      {
+        "EOF": true,
+        "RESPONSE_TIME": 291
+      }
+    ]
+  }
+}
+----
+
 == Covariance and Correlation
 
 Covariance and Correlation measure how random variables move
@@ -543,6 +578,8 @@
 
 * `cbrt`: Returns a numeric array with the cube root of each element of the original array.
 
+* `recip`: Returns a numeric array with the reciprocal of each element of the original array.
+
 Below is an example of a ttest performed on log transformed data sets:
 
 [source,text]
@@ -661,6 +698,47 @@
 }
 ----
 
+Vectors that have been transformed with the `recip` function can be back-transformed by taking the reciprocal
+of the reciprocal.
+
+The example below shows an example of the back-transformation of the `recip` function.
+
+[source,text]
+----
+let(echo="b,c",
+    a=array(100, 200, 300),
+    b=recip(a),
+    c=recip(b))
+----
+
+When this expression is sent to the `/stream` handler it responds with:
+
+[source,json]
+----
+{
+  "result-set": {
+    "docs": [
+      {
+        "b": [
+          0.01,
+          0.005,
+          0.0033333333333333335
+        ],
+        "c": [
+          100,
+          200,
+          300
+        ]
+      },
+      {
+        "EOF": true,
+        "RESPONSE_TIME": 0
+      }
+    ]
+  }
+}
+----
+
 == Z-scores
 
 The `zscores` function converts a numeric array to an array of z-scores. The z-score
@@ -696,5 +774,4 @@
     ]
   }
 }
-----
-
+----
\ No newline at end of file
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java
index 39ad8bf..f052ae9 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/cloud/autoscaling/Suggester.java
@@ -299,7 +299,11 @@
       for (Map.Entry<String, List<ReplicaInfo>> shard : e.getValue().entrySet()) {
         if (!isAllowed(new Pair<>(e.getKey(), shard.getKey()), Hint.COLL_SHARD)) continue;//todo fix
         if (shard.getValue() == null || shard.getValue().isEmpty()) continue;
-        replicaList.add(new Pair<>(shard.getValue().get(0), r));
+        for (ReplicaInfo replicaInfo : shard.getValue()) {
+          if (replicaInfo.getName().startsWith("SYNTHETIC.")) continue;
+          replicaList.add(new Pair<>(shard.getValue().get(0), r));
+          break;
+        }
       }
     }
   }
@@ -311,10 +315,8 @@
     List<Violation> errors = new ArrayList<>();
     for (Clause clause : session.expandedClauses) {
       Clause originalClause = clause.derivedFrom == null ? clause : clause.derivedFrom;
-//      if (!executeInStrictMode && !clause.strict) {
       if (this.deviations == null) this.deviations = new LinkedHashMap<>();
       this.deviations.put(originalClause, new double[1]);
-//      }
       List<Violation> errs = clause.test(session, this.deviations == null ? null : this.deviations.get(originalClause));
       if (!errs.isEmpty() &&
           (executeInStrictMode || clause.strict)) errors.addAll(errs);
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java
index deb8fbc..484eaad 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/impl/HttpClusterStateProvider.java
@@ -96,14 +96,13 @@
         ClusterState cs = fetchClusterState(client, collection, null);
         return cs.getCollectionRef(collection);
       } catch (SolrServerException | RemoteSolrException | IOException e) {
-        if (e.getMessage().contains(collection + " not found")) {
-          // Cluster state for the given collection was not found.
-          // Lets fetch/update our aliases:
-          getAliases(true);
-          return null;
-        }
         log.warn("Attempt to fetch cluster state from " +
             Utils.getBaseUrlForNodeName(nodeName, urlScheme) + " failed.", e);
+      } catch (NotACollectionException e) {
+        // Cluster state for the given collection was not found, could be an alias.
+        // Lets fetch/update our aliases:
+        getAliases(true);
+        return null;
       }
     }
     throw new RuntimeException("Tried fetching cluster state using the node names we knew of, i.e. " + liveNodes +". However, "
@@ -114,7 +113,7 @@
   }
 
   @SuppressWarnings({"rawtypes", "unchecked"})
-  private ClusterState fetchClusterState(SolrClient client, String collection, Map<String, Object> clusterProperties) throws SolrServerException, IOException {
+  private ClusterState fetchClusterState(SolrClient client, String collection, Map<String, Object> clusterProperties) throws SolrServerException, IOException, NotACollectionException {
     ModifiableSolrParams params = new ModifiableSolrParams();
     if (collection != null) {
       params.set("collection", collection);
@@ -131,8 +130,12 @@
       collectionsMap = ((NamedList)cluster.get("collections")).asMap(10);
     }
     int znodeVersion;
-    if (collection != null) {
-      znodeVersion =  (int)((Map<String, Object>)(collectionsMap).get(collection)).get("znodeVersion");
+    Map<String, Object> collFromStatus = (Map<String, Object>) (collectionsMap).get(collection);
+    if (collection != null && collFromStatus == null) {
+      throw new NotACollectionException(); // probably an alias
+    }
+    if (collection != null) { // can be null if alias
+      znodeVersion =  (int) collFromStatus.get("znodeVersion");
     } else {
       znodeVersion = -1;
     }
@@ -253,6 +256,10 @@
       } catch (SolrServerException | RemoteSolrException | IOException e) {
         log.warn("Attempt to fetch cluster state from " +
             Utils.getBaseUrlForNodeName(nodeName, urlScheme) + " failed.", e);
+      } catch (NotACollectionException e) {
+        // Cluster state for the given collection was not found, could be an alias.
+        // Lets fetch/update our aliases:
+        getAliases(true);
       }
     }
     throw new RuntimeException("Tried fetching cluster state using the node names we knew of, i.e. " + liveNodes +". However, "
@@ -264,7 +271,7 @@
 
   @Override
   public Map<String, Object> getClusterProperties() {
-    for (String nodeName: liveNodes) {
+    for (String nodeName : liveNodes) {
       try (HttpSolrClient client = new HttpSolrClient.Builder().
           withBaseSolrUrl(Utils.getBaseUrlForNodeName(nodeName, urlScheme)).
           withHttpClient(httpClient).build()) {
@@ -274,9 +281,11 @@
       } catch (SolrServerException | RemoteSolrException | IOException e) {
         log.warn("Attempt to fetch cluster state from " +
             Utils.getBaseUrlForNodeName(nodeName, urlScheme) + " failed.", e);
+      } catch (NotACollectionException e) {
+        // should be an an alias, don't care
       }
     }
-    throw new RuntimeException("Tried fetching cluster state using the node names we knew of, i.e. " + liveNodes +". However, "
+    throw new RuntimeException("Tried fetching cluster state using the node names we knew of, i.e. " + liveNodes + ". However, "
         + "succeeded in obtaining the cluster state from none of them."
         + "If you think your Solr cluster is up and is accessible,"
         + " you could try re-creating a new CloudSolrClient using working"
@@ -309,4 +318,7 @@
     this.cacheTimeout = cacheTimeout;
   }
 
+  // This exception is not meant to escape this class it should be caught and wrapped.
+  private class NotACollectionException extends Exception {
+  }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
index 2be48e3..1777467 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/Lang.java
@@ -272,6 +272,7 @@
         .withFunctionName("getRadius", GetRadiusEvaluator.class)
         .withFunctionName("getSupportPoints", GetSupportPointsEvaluator.class)
         .withFunctionName("pairSort", PairSortEvaluator.class)
+        .withFunctionName("recip", RecipEvaluator.class)
         // Boolean Stream Evaluators
 
         .withFunctionName("and", AndEvaluator.class)
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java
index b545f4b..63fce52 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/PercentileEvaluator.java
@@ -17,6 +17,7 @@
 package org.apache.solr.client.solrj.io.eval;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 import java.util.Locale;
 
@@ -39,16 +40,26 @@
     if(null == second){
       throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - null found for the second value",toExpression(constructingFactory)));
     }
-    if(!(first instanceof List<?>)){
+    if(!(first instanceof List<?>)) {
       throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for the first value, expecting a List",toExpression(constructingFactory), first.getClass().getSimpleName()));
     }
-    if(!(second instanceof Number)){
-      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for the second value, expecting a Number",toExpression(constructingFactory), first.getClass().getSimpleName()));
+    if((second instanceof Number)) {
+      Percentile percentile = new Percentile();
+      percentile.setData(((List<?>) first).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray());
+      return percentile.evaluate(((Number) second).doubleValue());
+    } else if(second instanceof List){
+      Percentile percentile = new Percentile();
+      percentile.setData(((List<?>) first).stream().mapToDouble(value -> ((Number) value).doubleValue()).toArray());
+      List<Number> values = (List<Number>) second;
+      List<Number> percentiles = new ArrayList();
+      for(Number value : values) {
+        percentiles.add(percentile.evaluate(value.doubleValue()));
+      }
+
+      return percentiles;
+    } else {
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - found type %s for the second value, expecting a number or a numeric array",toExpression(constructingFactory), first.getClass().getSimpleName()));
     }
-    
-    Percentile percentile = new Percentile();
-    percentile.setData(((List<?>)first).stream().mapToDouble(value -> ((Number)value).doubleValue()).toArray());
-    return percentile.evaluate(((Number)second).doubleValue());    
   }
   
 }
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecipEvaluator.java b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecipEvaluator.java
new file mode 100644
index 0000000..2daaac7
--- /dev/null
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/io/eval/RecipEvaluator.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.solr.client.solrj.io.eval;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.Locale;
+import java.util.stream.Collectors;
+
+import org.apache.commons.math3.analysis.function.Inverse;
+import org.apache.solr.client.solrj.io.stream.expr.StreamExpression;
+import org.apache.solr.client.solrj.io.stream.expr.StreamFactory;
+
+public class RecipEvaluator extends RecursiveNumericEvaluator implements OneValueWorker {
+  protected static final long serialVersionUID = 1L;
+
+  public RecipEvaluator(StreamExpression expression, StreamFactory factory) throws IOException{
+    super(expression, factory);
+
+    if(1 != containedEvaluators.size()){
+      throw new IOException(String.format(Locale.ROOT,"Invalid expression %s - expecting exactly 1 value but found %d",expression,containedEvaluators.size()));
+    }
+  }
+
+  @Override
+  public Object doWork(Object value){
+    if(null == value) {
+      return null;
+    }
+    else if(value instanceof List) {
+      return ((List<?>)value).stream().map(innerValue -> doWork(innerValue)).collect(Collectors.toList());
+    }
+    else{
+      Inverse inverse = new Inverse();
+      return inverse.value(((Number)value).doubleValue());
+    }
+  }
+}
diff --git a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java
index be50a17..ccef7e2 100644
--- a/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java
+++ b/solr/solrj/src/java/org/apache/solr/common/NavigableObject.java
@@ -24,7 +24,7 @@
 
 /**This class contains helper methods for navigating deeply nested Objects. Keep in mind that
  * it may be expensive depending on the underlying implementation. each level needs an extra lookup
- * and the lookup may be as expensive as O(log(n)) to O(o) depending on the underlying impl
+ * and the lookup may be as expensive as O(log(n)) to O(n) depending on the underlying impl
  *
  */
 public interface NavigableObject {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
index 7b32567..8dc0c24 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/cloud/autoscaling/TestPolicy.java
@@ -1972,16 +1972,25 @@
 
   public void testMoveReplicaSuggester() {
     String autoScalingjson = "  '{cluster-policy':[" +
-        "    {      'cores':'<10',      'node':'#ANY'}," +
-        "    {      'replica':'<2',      'shard':'#EACH',      'node':'#ANY'}," +
-        "    {      'nodeRole':'overseer','replica':0}]," +
-        "  'cluster-preferences':[{'minimize':'cores'}]}";
+        "{'cores':'<10', 'node':'#ANY'}," +
+        "{'replica':'<2', 'shard':'#EACH','node':'#ANY'}]," +
+        "'cluster-preferences':[{'minimize':'cores'}]}";
     Policy policy = new Policy((Map<String, Object>) Utils.fromJSONString(autoScalingjson));
     Policy.Session session = policy.createSession(cloudManagerWithData((Map) loadFromResource("testMoveReplicaSuggester.json")));
-    Suggester suggester = session.getSuggester(MOVEREPLICA).hint(Hint.TARGET_NODE, "10.0.0.6:7574_solr");
+    Suggester suggester = session.getSuggester(MOVEREPLICA)
+        .hint(Hint.TARGET_NODE, "10.0.0.6:7574_solr");
     SolrRequest op = suggester.getSuggestion();
     assertNotNull(op);
-    suggester = suggester.getSession().getSuggester(MOVEREPLICA).hint(Hint.TARGET_NODE, "10.0.0.6:7574_solr");
+    suggester = suggester.getSession()
+        .getSuggester(MOVEREPLICA)
+        .hint(Hint.TARGET_NODE, "10.0.0.6:7574_solr");
+    op = suggester.getSuggestion();
+    assertNull(op);
+
+    suggester = suggester.getSession()
+        .getSuggester(MOVEREPLICA)
+        .forceOperation(true)
+        .hint(Hint.TARGET_NODE, "10.0.0.6:8983_solr");
     op = suggester.getSuggestion();
     assertNull(op);
   }
@@ -2183,7 +2192,7 @@
     assertEquals(0, violations.get(0).getViolatingReplicas().size());
 
     l = PolicyHelper.getSuggestions(cfg, cloudManagerWithData((Map) loadFromResource("testFreeDiskSuggestions.json")));
-    assertEquals(4, l.size());
+    assertEquals(3, l.size());
     assertEquals("r4", l.get(0)._get("operation/command/move-replica/replica", null));
     assertEquals("node1", l.get(0)._get("operation/command/move-replica/targetNode", null));
 
@@ -2193,8 +2202,6 @@
     assertEquals("r2", l.get(2)._get("operation/command/move-replica/replica", null));
     assertEquals("node1", l.get(2)._get("operation/command/move-replica/targetNode", null));
 
-    assertEquals("improvement", l.get(3)._get("type", null));
-
 
   }
 
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
index e06b973..960eb50 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/TestLang.java
@@ -73,7 +73,7 @@
       "outliers", "stream", "getCache", "putCache", "listCache", "removeCache", "zscores", "latlonVectors",
       "convexHull", "getVertices", "getBaryCenter", "getArea", "getBoundarySize","oscillate",
       "getAmplitude", "getPhase", "getAngularFrequency", "enclosingDisk", "getCenter", "getRadius",
-      "getSupportPoints", "pairSort", "log10", "plist"};
+      "getSupportPoints", "pairSort", "log10", "plist", "recip"};
 
   @Test
   public void testLang() {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
index 2bff1ab..4642388 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/MathExpressionTest.java
@@ -985,6 +985,23 @@
     tuple = tuples.get(0);
     p = tuple.getDouble("return-value");
     assertEquals(p, 2.4, 0.001);
+
+
+    cexpr = "percentile(array(11,10,3,4,5,6,7,8,9,2,1), array(20, 50))";
+    paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cexpr);
+    paramsLoc.set("qt", "/stream");
+
+    solrStream = new SolrStream(url, paramsLoc);
+
+    context = new StreamContext();
+    solrStream.setStreamContext(context);
+    tuples = getTuples(solrStream);
+    assertTrue(tuples.size() == 1);
+    tuple = tuples.get(0);
+    List<Number> percentiles = (List<Number>)tuple.get("return-value");
+    assertEquals(percentiles.get(0).doubleValue(), 2.4, 0.001);
+    assertEquals(percentiles.get(1).doubleValue(), 6.0, 0.001);
   }
 
   @Test
@@ -1783,6 +1800,31 @@
     assertEquals(log.doubleValue(), 1.4842998393467859, 0.0);
   }
 
+
+  @Test
+  public void testRecip() throws Exception {
+    String cexpr = "let(echo=true, a=array(10, 20, 30), b=recip(a), c=recip(30.5))";
+    ModifiableSolrParams paramsLoc = new ModifiableSolrParams();
+    paramsLoc.set("expr", cexpr);
+    paramsLoc.set("qt", "/stream");
+    String url = cluster.getJettySolrRunners().get(0).getBaseUrl().toString()+"/"+COLLECTIONORALIAS;
+    TupleStream solrStream = new SolrStream(url, paramsLoc);
+    StreamContext context = new StreamContext();
+    solrStream.setStreamContext(context);
+    List<Tuple> tuples = getTuples(solrStream);
+    assertEquals(tuples.size(), 1);
+    Tuple tuple = tuples.get(0);
+    List<Number> logs = (List<Number>)tuple.get("b");
+    assertEquals(logs.size(), 3);
+    assertEquals(logs.get(0).doubleValue(), .1, 0.0);
+    assertEquals(logs.get(1).doubleValue(), .05, 0.0);
+    assertEquals(logs.get(2).doubleValue(), 0.03333333333333333, 0.0);
+
+    Number log = (Number)tuple.get("c");
+    assertEquals(log.doubleValue(), 0.03278688524590164, 0.0);
+  }
+
+
   @Test
   public void testPow() throws Exception {
     String cexpr = "let(echo=true, a=array(10, 20, 30), b=pow(a, 2), c=pow(2, a), d=pow(10, 3), e=pow(a, array(1, 2, 3)))";
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
index 0a8030c..af79ea6 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamDecoratorTest.java
@@ -26,6 +26,7 @@
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.io.SolrClientCache;
 import org.apache.solr.client.solrj.io.Tuple;
@@ -65,6 +66,7 @@
 import org.junit.Test;
 
 @Slow
+@SolrTestCaseJ4.SuppressSSL
 @LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"})
 //commented 23-AUG-2018 @LuceneTestCase.BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
 public class StreamDecoratorTest extends SolrCloudTestCase {
diff --git a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
index add34cb..24264b6 100644
--- a/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
+++ b/solr/solrj/src/test/org/apache/solr/client/solrj/io/stream/StreamExpressionTest.java
@@ -26,6 +26,7 @@
 
 import org.apache.lucene.util.LuceneTestCase;
 import org.apache.lucene.util.LuceneTestCase.Slow;
+import org.apache.solr.SolrTestCaseJ4;
 import org.apache.solr.client.solrj.embedded.JettySolrRunner;
 import org.apache.solr.client.solrj.io.ClassificationEvaluation;
 import org.apache.solr.client.solrj.io.SolrClientCache;
@@ -52,6 +53,7 @@
 import org.junit.Test;
 
 @Slow
+@SolrTestCaseJ4.SuppressSSL
 @LuceneTestCase.SuppressCodecs({"Lucene3x", "Lucene40","Lucene41","Lucene42","Lucene45"})
 public class StreamExpressionTest extends SolrCloudTestCase {