SOLR-17284: Remove deprecated BlobRepository (#2447)

We have newer approaches such as the FileStore approach used by PackageManager.
diff --git a/solr/core/src/java/org/apache/solr/core/BlobRepository.java b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
deleted file mode 100644
index b2960a6..0000000
--- a/solr/core/src/java/org/apache/solr/core/BlobRepository.java
+++ /dev/null
@@ -1,367 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.solr.core;
-
-import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
-import static org.apache.solr.common.SolrException.ErrorCode.SERVICE_UNAVAILABLE;
-
-import java.io.InputStream;
-import java.lang.invoke.MethodHandles;
-import java.math.BigInteger;
-import java.nio.ByteBuffer;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Locale;
-import java.util.Map;
-import java.util.Random;
-import java.util.Set;
-import java.util.concurrent.Callable;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.regex.Pattern;
-import org.apache.http.HttpEntity;
-import org.apache.http.HttpResponse;
-import org.apache.http.client.HttpClient;
-import org.apache.http.client.methods.HttpGet;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.ClusterState;
-import org.apache.solr.common.cloud.DocCollection;
-import org.apache.solr.common.cloud.Replica;
-import org.apache.solr.common.cloud.Slice;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CollectionAdminParams;
-import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.common.util.Utils;
-import org.apache.zookeeper.server.ByteBufferInputStream;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-/**
- * The purpose of this class is to store the Jars loaded in memory and to keep only one copy of the
- * Jar in a single node.
- */
-public class BlobRepository {
-
-  private static final long MAX_JAR_SIZE =
-      Long.parseLong(System.getProperty("runtime.lib.size", String.valueOf(5 * 1024 * 1024)));
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  public static final Random RANDOM;
-  static final Pattern BLOB_KEY_PATTERN_CHECKER = Pattern.compile(".*/\\d+");
-
-  static {
-    // We try to make things reproducible in the context of our tests by initializing the random
-    // instance based on the current seed
-    String seed = System.getProperty("tests.seed");
-    if (seed == null) {
-      RANDOM = new Random();
-    } else {
-      RANDOM = new Random(seed.hashCode());
-    }
-  }
-
-  private final CoreContainer coreContainer;
-
-  @SuppressWarnings({"rawtypes"})
-  private Map<String, BlobContent> blobs = createMap();
-
-  // for unit tests to override
-  @SuppressWarnings({"rawtypes"})
-  ConcurrentHashMap<String, BlobContent> createMap() {
-    return new ConcurrentHashMap<>();
-  }
-
-  public BlobRepository(CoreContainer coreContainer) {
-    this.coreContainer = coreContainer;
-  }
-
-  // I wanted to {@link SolrCore#loadDecodeAndCacheBlob(String, Decoder)} below but precommit
-  // complains
-
-  /**
-   * Returns the contents of a blob containing a ByteBuffer and increments a reference count. Please
-   * return the same object to decrease the refcount. This is normally used for storing jar files,
-   * and binary raw data. If you are caching Java Objects you want to use {@code
-   * SolrCore#loadDecodeAndCacheBlob(String, Decoder)}
-   *
-   * @param key it is a combination of blobname and version like blobName/version
-   * @return The reference of a blob
-   */
-  public BlobContentRef<ByteBuffer> getBlobIncRef(String key) {
-    return getBlobIncRef(key, () -> addBlob(key));
-  }
-
-  /**
-   * Internal method that returns the contents of a blob and increments a reference count. Please
-   * return the same object to decrease the refcount. Only the decoded content will be cached when
-   * this method is used. Component authors attempting to share objects across cores should use
-   * {@code SolrCore#loadDecodeAndCacheBlob(String, Decoder)} which ensures that a proper close hook
-   * is also created.
-   *
-   * @param key it is a combination of blob name and version like blobName/version
-   * @param decoder a decoder that knows how to interpret the bytes from the blob
-   * @return The reference of a blob
-   */
-  <T> BlobContentRef<T> getBlobIncRef(String key, Decoder<T> decoder) {
-    return getBlobIncRef(key.concat(decoder.getName()), () -> addBlob(key, decoder));
-  }
-
-  <T> BlobContentRef<T> getBlobIncRef(String key, Decoder<T> decoder, String url, String sha512) {
-    StringBuilder keyBuilder = new StringBuilder(key);
-    if (decoder != null) keyBuilder.append(decoder.getName());
-    keyBuilder.append("/").append(sha512);
-
-    return getBlobIncRef(
-        keyBuilder.toString(),
-        () -> new BlobContent<>(key, fetchBlobAndVerify(key, url, sha512), decoder));
-  }
-
-  // do the actual work returning the appropriate type...
-  @SuppressWarnings({"unchecked"})
-  private <T> BlobContentRef<T> getBlobIncRef(String key, Callable<BlobContent<T>> blobCreator) {
-    BlobContent<T> aBlob;
-    if (this.coreContainer.isZooKeeperAware()) {
-      synchronized (blobs) {
-        aBlob = blobs.get(key);
-        if (aBlob == null) {
-          try {
-            aBlob = blobCreator.call();
-          } catch (Exception e) {
-            throw new SolrException(
-                SolrException.ErrorCode.SERVER_ERROR, "Blob loading failed: " + e.getMessage(), e);
-          }
-        }
-      }
-    } else {
-      throw new SolrException(
-          SolrException.ErrorCode.SERVER_ERROR, "Blob loading is not supported in non-cloud mode");
-      // todo
-    }
-    BlobContentRef<T> ref = new BlobContentRef<>(aBlob);
-    synchronized (aBlob.references) {
-      aBlob.references.add(ref);
-    }
-    return ref;
-  }
-
-  // For use cases sharing raw bytes
-  private BlobContent<ByteBuffer> addBlob(String key) {
-    ByteBuffer b = fetchBlob(key);
-    BlobContent<ByteBuffer> aBlob = new BlobContent<>(key, b);
-    blobs.put(key, aBlob);
-    return aBlob;
-  }
-
-  // for use cases sharing java objects
-  private <T> BlobContent<T> addBlob(String key, Decoder<T> decoder) {
-    ByteBuffer b = fetchBlob(key);
-    String keyPlusName = key + decoder.getName();
-    BlobContent<T> aBlob = new BlobContent<>(keyPlusName, b, decoder);
-    blobs.put(keyPlusName, aBlob);
-    return aBlob;
-  }
-
-  static String INVALID_JAR_MSG =
-      "Invalid jar from {0} , expected sha512 hash : {1} , actual : {2}";
-
-  private ByteBuffer fetchBlobAndVerify(String key, String url, String sha512) {
-    ByteBuffer byteBuffer = fetchFromUrl(key, url);
-    String computedDigest = sha512Digest(byteBuffer);
-    if (!computedDigest.equals(sha512)) {
-      throw new SolrException(
-          SERVER_ERROR, StrUtils.formatString(INVALID_JAR_MSG, url, sha512, computedDigest));
-    }
-    return byteBuffer;
-  }
-
-  public static String sha512Digest(ByteBuffer byteBuffer) {
-    MessageDigest digest = null;
-    try {
-      digest = MessageDigest.getInstance("SHA-512");
-    } catch (NoSuchAlgorithmException e) {
-      // unlikely
-      throw new SolrException(SERVER_ERROR, e);
-    }
-    digest.update(byteBuffer);
-    return String.format(Locale.ROOT, "%0128x", new BigInteger(1, digest.digest()));
-  }
-
-  /** Package local for unit tests only please do not use elsewhere */
-  ByteBuffer fetchBlob(String key) {
-    Replica replica = getSystemCollReplica();
-    String url =
-        replica.getBaseUrl()
-            + "/"
-            + CollectionAdminParams.SYSTEM_COLL
-            + "/blob/"
-            + key
-            + "?wt=filestream";
-    return fetchFromUrl(key, url);
-  }
-
-  ByteBuffer fetchFromUrl(String key, String url) {
-    HttpClient httpClient = coreContainer.getUpdateShardHandler().getDefaultHttpClient();
-    HttpGet httpGet = new HttpGet(url);
-    ByteBuffer b;
-    HttpResponse response = null;
-    HttpEntity entity = null;
-    try {
-      response = httpClient.execute(httpGet);
-      entity = response.getEntity();
-      int statusCode = response.getStatusLine().getStatusCode();
-      if (statusCode != 200) {
-        throw new SolrException(
-            SolrException.ErrorCode.NOT_FOUND, "no such blob or version available: " + key);
-      }
-
-      try (InputStream is = entity.getContent()) {
-        b = Utils.toByteArray(is, MAX_JAR_SIZE);
-      }
-    } catch (Exception e) {
-      if (e instanceof SolrException) {
-        throw (SolrException) e;
-      } else {
-        throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "could not load : " + key, e);
-      }
-    } finally {
-      Utils.consumeFully(entity);
-    }
-    return b;
-  }
-
-  private Replica getSystemCollReplica() {
-    ZkStateReader zkStateReader = this.coreContainer.getZkController().getZkStateReader();
-    ClusterState cs = zkStateReader.getClusterState();
-    DocCollection coll = cs.getCollectionOrNull(CollectionAdminParams.SYSTEM_COLL);
-    if (coll == null)
-      throw new SolrException(
-          SERVICE_UNAVAILABLE, CollectionAdminParams.SYSTEM_COLL + " collection not available");
-    ArrayList<Slice> slices = new ArrayList<>(coll.getActiveSlices());
-    if (slices.isEmpty())
-      throw new SolrException(
-          SERVICE_UNAVAILABLE,
-          "No active slices for " + CollectionAdminParams.SYSTEM_COLL + " collection");
-    Collections.shuffle(slices, RANDOM); // do load balancing
-
-    Replica replica = null;
-    for (Slice slice : slices) {
-      List<Replica> replicas = new ArrayList<>(slice.getReplicasMap().values());
-      Collections.shuffle(replicas, RANDOM);
-      for (Replica r : replicas) {
-        if (r.getState() == Replica.State.ACTIVE) {
-          if (zkStateReader
-              .getClusterState()
-              .getLiveNodes()
-              .contains(r.get(ZkStateReader.NODE_NAME_PROP))) {
-            replica = r;
-            break;
-          } else {
-            if (log.isInfoEnabled()) {
-              log.info(
-                  "replica {} says it is active but not a member of live nodes",
-                  r.get(ZkStateReader.NODE_NAME_PROP));
-            }
-          }
-        }
-      }
-    }
-    if (replica == null) {
-      throw new SolrException(
-          SERVICE_UNAVAILABLE,
-          "No active replica available for " + CollectionAdminParams.SYSTEM_COLL + " collection");
-    }
-    return replica;
-  }
-
-  /**
-   * This is to decrement a ref count
-   *
-   * @param ref The reference that is already there. Doing multiple calls with same ref will not
-   *     matter
-   */
-  public void decrementBlobRefCount(BlobContentRef<?> ref) {
-    if (ref == null) return;
-    synchronized (ref.blob.references) {
-      if (!ref.blob.references.remove(ref)) {
-        log.error("Multiple releases for the same reference");
-      }
-      if (ref.blob.references.isEmpty()) {
-        blobs.remove(ref.blob.key);
-      }
-    }
-  }
-
-  public static class BlobContent<T> {
-    public final String key;
-    // holds byte buffer or cached object, holding both is a waste of memory ref counting mechanism
-    private final T content;
-    private final Set<BlobContentRef<T>> references = new HashSet<>();
-
-    @SuppressWarnings("unchecked")
-    public BlobContent(String key, ByteBuffer buffer, Decoder<T> decoder) {
-      this.key = key;
-      this.content =
-          decoder == null ? (T) buffer : decoder.decode(new ByteBufferInputStream(buffer));
-    }
-
-    @SuppressWarnings("unchecked")
-    public BlobContent(String key, ByteBuffer buffer) {
-      this.key = key;
-      this.content = (T) buffer;
-    }
-
-    /**
-     * Get the cached object.
-     *
-     * @return the object representing the content that is cached.
-     */
-    public T get() {
-      return this.content;
-    }
-  }
-
-  public interface Decoder<T> {
-
-    /**
-     * A name by which to distinguish this decoding. This only needs to be implemented if you want
-     * to support decoding the same blob content with more than one decoder.
-     *
-     * @return The name of the decoding, defaults to empty string.
-     */
-    default String getName() {
-      return "";
-    }
-
-    /**
-     * A routine that knows how to convert the stream of bytes from the blob into a Java object.
-     *
-     * @param inputStream the bytes from a blob
-     * @return A Java object of the specified type.
-     */
-    T decode(InputStream inputStream);
-  }
-
-  public static class BlobContentRef<T> {
-    public final BlobContent<T> blob;
-
-    private BlobContentRef(BlobContent<T> blob) {
-      this.blob = blob;
-    }
-  }
-}
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index ac27230..d0978fc 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -251,8 +251,6 @@
 
   private volatile String hostName;
 
-  private final BlobRepository blobRepository = new BlobRepository(this);
-
   private volatile boolean asyncSolrCoreLoad;
 
   protected volatile SecurityConfHandler securityConfHandler;
@@ -2319,10 +2317,6 @@
     return core;
   }
 
-  public BlobRepository getBlobRepository() {
-    return blobRepository;
-  }
-
   /**
    * If using asyncSolrCoreLoad=true, calling this after {@link #load()} will not return until all
    * cores have finished loading.
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index 2b1bd65..92a963f 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -89,7 +89,6 @@
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.params.CollectionAdminParams;
 import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.CommonParams.EchoParamStyle;
 import org.apache.solr.common.params.SolrParams;
@@ -3540,40 +3539,6 @@
     return ImplicitHolder.INSTANCE;
   }
 
-  /**
-   * Convenience method to load a blob. This method minimizes the degree to which component and
-   * other code needs to depend on the structure of solr's object graph and ensures that a proper
-   * close hook is registered. This method should normally be called in {@link
-   * SolrCoreAware#inform(SolrCore)}, and should never be called during request processing. The
-   * Decoder will only run on the first invocations, subsequent invocations will return the cached
-   * object.
-   *
-   * @param key A key in the format of name/version for a blob stored in the {@link
-   *     CollectionAdminParams#SYSTEM_COLL} blob store via the Blob Store API
-   * @param decoder a decoder with which to convert the blob into a Java Object representation
-   *     (first time only)
-   * @return a reference to the blob that has already cached the decoded version.
-   */
-  public <T> BlobRepository.BlobContentRef<T> loadDecodeAndCacheBlob(
-      String key, BlobRepository.Decoder<T> decoder) {
-    // make sure component authors don't give us oddball keys with no version...
-    if (!BlobRepository.BLOB_KEY_PATTERN_CHECKER.matcher(key).matches()) {
-      throw new IllegalArgumentException(
-          "invalid key format, must end in /N where N is the version number");
-    }
-    // define the blob
-    BlobRepository.BlobContentRef<T> blobRef =
-        coreContainer.getBlobRepository().getBlobIncRef(key, decoder);
-    addCloseHook(
-        new CloseHook() {
-          @Override
-          public void postClose(SolrCore core) {
-            coreContainer.getBlobRepository().decrementBlobRefCount(blobRef);
-          }
-        });
-    return blobRef;
-  }
-
   public CancellableQueryTracker getCancellableQueryTracker() {
     return cancellableQueryTracker;
   }
diff --git a/solr/core/src/java/org/apache/solr/filestore/FileStoreAPI.java b/solr/core/src/java/org/apache/solr/filestore/FileStoreAPI.java
index 52dee0f..dc420d3 100644
--- a/solr/core/src/java/org/apache/solr/filestore/FileStoreAPI.java
+++ b/solr/core/src/java/org/apache/solr/filestore/FileStoreAPI.java
@@ -45,7 +45,6 @@
 import org.apache.solr.common.util.ContentStream;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.BlobRepository;
 import org.apache.solr.core.CoreContainer;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.pkg.PackageAPI;
@@ -84,7 +83,7 @@
         coreContainer.getZkController().getZkStateReader().getClusterState().getLiveNodes();
     ArrayList<String> l = new ArrayList<>(liveNodes);
     l.remove(coreContainer.getZkController().getNodeName());
-    Collections.shuffle(l, BlobRepository.RANDOM);
+    Collections.shuffle(l, Utils.RANDOM);
     return l;
   }
 
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/PackageUtils.java b/solr/core/src/java/org/apache/solr/packagemanager/PackageUtils.java
index d6e765b..c0d50dd 100644
--- a/solr/core/src/java/org/apache/solr/packagemanager/PackageUtils.java
+++ b/solr/core/src/java/org/apache/solr/packagemanager/PackageUtils.java
@@ -48,7 +48,6 @@
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.BlobRepository;
 import org.apache.solr.filestore.DistribFileStore;
 import org.apache.solr.filestore.FileStoreAPI;
 import org.apache.solr.packagemanager.SolrPackage.Manifest;
@@ -209,7 +208,7 @@
     NamedList<Object> response = solrClient.request(request);
     String manifestJson = (String) response.get("response");
     String calculatedSHA512 =
-        BlobRepository.sha512Digest(ByteBuffer.wrap(manifestJson.getBytes(StandardCharsets.UTF_8)));
+        Utils.sha512Digest(ByteBuffer.wrap(manifestJson.getBytes(StandardCharsets.UTF_8)));
     if (expectedSHA512.equals(calculatedSHA512) == false) {
       throw new SolrException(
           ErrorCode.UNAUTHORIZED,
diff --git a/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java b/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java
index 109468f..d92d027 100644
--- a/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java
+++ b/solr/core/src/java/org/apache/solr/packagemanager/RepositoryManager.java
@@ -50,7 +50,7 @@
 import org.apache.solr.common.cloud.SolrZkClient;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
-import org.apache.solr.core.BlobRepository;
+import org.apache.solr.common.util.Utils;
 import org.apache.solr.filestore.FileStoreAPI;
 import org.apache.solr.packagemanager.SolrPackage.Artifact;
 import org.apache.solr.packagemanager.SolrPackage.SolrPackageRelease;
@@ -193,8 +193,7 @@
       }
       String manifestJson = getMapper().writeValueAsString(release.manifest);
       String manifestSHA512 =
-          BlobRepository.sha512Digest(
-              ByteBuffer.wrap(manifestJson.getBytes(StandardCharsets.UTF_8)));
+          Utils.sha512Digest(ByteBuffer.wrap(manifestJson.getBytes(StandardCharsets.UTF_8)));
       PackageUtils.postFile(
           solrClient,
           ByteBuffer.wrap(manifestJson.getBytes(StandardCharsets.UTF_8)),
diff --git a/solr/core/src/test-files/solr/configsets/resource-sharing/schema.xml b/solr/core/src/test-files/solr/configsets/resource-sharing/schema.xml
deleted file mode 100644
index 287d4fe..0000000
--- a/solr/core/src/test-files/solr/configsets/resource-sharing/schema.xml
+++ /dev/null
@@ -1,21 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-<schema name="minimal" version="1.1">
-  <fieldType name="string" class="solr.StrField"/>
-  <dynamicField name="*" type="string" indexed="true" stored="true"/>
-</schema>
diff --git a/solr/core/src/test-files/solr/configsets/resource-sharing/solrconfig.xml b/solr/core/src/test-files/solr/configsets/resource-sharing/solrconfig.xml
deleted file mode 100644
index 1dd92fe..0000000
--- a/solr/core/src/test-files/solr/configsets/resource-sharing/solrconfig.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0" ?>
-
-<!--
- Licensed to the Apache Software Foundation (ASF) under one or more
- contributor license agreements.  See the NOTICE file distributed with
- this work for additional information regarding copyright ownership.
- The ASF licenses this file to You under the Apache License, Version 2.0
- (the "License"); you may not use this file except in compliance with
- the License.  You may obtain a copy of the License at
-
-     http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
--->
-
-<!-- Minimal solrconfig.xml with /select, /admin and /update only -->
-
-<config>
-
-  <dataDir>${solr.data.dir:}</dataDir>
-
-  <directoryFactory name="DirectoryFactory"
-                    class="${solr.directoryFactory:solr.NRTCachingDirectoryFactory}"/>
-  <schemaFactory class="ClassicIndexSchemaFactory"/>
-
-  <luceneMatchVersion>${tests.luceneMatchVersion:LATEST}</luceneMatchVersion>
-
-  <updateHandler class="solr.DirectUpdateHandler2">
-    <commitWithin>
-      <softCommit>${solr.commitwithin.softcommit:true}</softCommit>
-    </commitWithin>
-
-  </updateHandler>
-  <searchComponent name="testComponent" class="org.apache.solr.handler.component.ResourceSharingTestComponent" />
-
-  <requestHandler name="/select" class="solr.SearchHandler">
-    <lst name="defaults">
-      <str name="echoParams">explicit</str>
-      <str name="indent">true</str>
-      <str name="df">text</str>
-    </lst>
-    <arr name="first-components">
-      <str>testComponent</str>
-    </arr>
-  </requestHandler>
-</config>
-
diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java
deleted file mode 100644
index 8fe8c34..0000000
--- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryCloudTest.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Path;
-import org.apache.solr.client.solrj.SolrQuery;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.CloudSolrClient;
-import org.apache.solr.client.solrj.request.CollectionAdminRequest;
-import org.apache.solr.client.solrj.response.QueryResponse;
-import org.apache.solr.cloud.SolrCloudTestCase;
-import org.apache.solr.common.SolrDocumentList;
-import org.apache.solr.common.SolrInputDocument;
-import org.apache.solr.common.cloud.ZkStateReader;
-import org.apache.solr.common.params.CollectionAdminParams;
-import org.apache.solr.handler.TestBlobHandler;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class BlobRepositoryCloudTest extends SolrCloudTestCase {
-
-  public static final Path TEST_PATH = getFile("solr/configsets").toPath();
-
-  @BeforeClass
-  public static void setupCluster() throws Exception {
-    configureCluster(1) // only sharing *within* a node
-        .addConfig("configname", TEST_PATH.resolve("resource-sharing"))
-        .configure();
-    //    Thread.sleep(2000);
-    CollectionAdminRequest.createCollection(CollectionAdminParams.SYSTEM_COLL, null, 1, 1)
-        .process(cluster.getSolrClient());
-    // test component will fail if it can't find a blob with this data by this name
-    TestBlobHandler.postData(
-        cluster.getSolrClient(),
-        findLiveNodeURI(),
-        "testResource",
-        ByteBuffer.wrap("foo,bar\nbaz,bam".getBytes(StandardCharsets.UTF_8)));
-    //    Thread.sleep(2000);
-    // if these don't load we probably failed to post the blob above
-    CollectionAdminRequest.createCollection("col1", "configname", 1, 1)
-        .process(cluster.getSolrClient());
-    CollectionAdminRequest.createCollection("col2", "configname", 1, 1)
-        .process(cluster.getSolrClient());
-
-    SolrInputDocument document = new SolrInputDocument();
-    document.addField("id", "1");
-    document.addField("text", "col1");
-    CloudSolrClient solrClient = cluster.getSolrClient();
-    solrClient.add("col1", document);
-    solrClient.commit("col1");
-    document = new SolrInputDocument();
-    document.addField("id", "1");
-    document.addField("text", "col2");
-    solrClient.add("col2", document);
-    solrClient.commit("col2");
-    Thread.sleep(2000);
-  }
-
-  @Test
-  public void test() throws Exception {
-    // This test relies on the installation of ResourceSharingTestComponent which has 2 useful
-    // properties:
-    // 1. it will fail to initialize if it doesn't find a 2 line CSV like foo,bar\nbaz,bam thus
-    // validating that we are properly pulling data from the blob store
-    // 2. It replaces any q for a query request to /select with "text:<name>" where <name> is
-    // the name of the last collection to run a query. It does this by caching a shared resource of
-    // type ResourceSharingTestComponent.TestObject, and the following sequence is proof that either
-    // collection can tell if it was (or was not) the last collection to issue a query by consulting
-    // the shared object
-    assertLastQueryNotToCollection("col1");
-    assertLastQueryNotToCollection("col2");
-    assertLastQueryNotToCollection("col1");
-    assertLastQueryToCollection("col1");
-    assertLastQueryNotToCollection("col2");
-    assertLastQueryToCollection("col2");
-  }
-
-  // TODO: move this up to parent class?
-  private static String findLiveNodeURI() {
-    ZkStateReader zkStateReader = cluster.getZkStateReader();
-    return zkStateReader.getBaseUrlForNodeName(
-        zkStateReader
-            .getClusterState()
-            .getCollection(".system")
-            .getSlices()
-            .iterator()
-            .next()
-            .getLeader()
-            .getNodeName());
-  }
-
-  private void assertLastQueryToCollection(String collection)
-      throws SolrServerException, IOException {
-    assertEquals(1, getSolrDocuments(collection).size());
-  }
-
-  private void assertLastQueryNotToCollection(String collection)
-      throws SolrServerException, IOException {
-    assertEquals(0, getSolrDocuments(collection).size());
-  }
-
-  private SolrDocumentList getSolrDocuments(String collection)
-      throws SolrServerException, IOException {
-    SolrQuery query = new SolrQuery("*:*");
-    CloudSolrClient client = cluster.getSolrClient();
-    QueryResponse resp1 = client.query(collection, query);
-    return resp1.getResults();
-  }
-}
diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
deleted file mode 100644
index cf37174..0000000
--- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
+++ /dev/null
@@ -1,191 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.reset;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.when;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.StringWriter;
-import java.nio.ByteBuffer;
-import java.nio.charset.Charset;
-import java.nio.charset.StandardCharsets;
-import java.util.Objects;
-import java.util.concurrent.ConcurrentHashMap;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.common.SolrException;
-import org.junit.Before;
-import org.junit.BeforeClass;
-import org.junit.Test;
-
-public class BlobRepositoryMockingTest extends SolrTestCaseJ4 {
-
-  private static final Charset UTF8 = StandardCharsets.UTF_8;
-  private static final String[][] PARSED =
-      new String[][] {{"foo", "bar", "baz"}, {"bang", "boom", "bash"}};
-  private static final String BLOBSTR = "foo,bar,baz\nbang,boom,bash";
-  private CoreContainer mockContainer = mock(CoreContainer.class);
-
-  @SuppressWarnings({"unchecked", "rawtypes"})
-  private ConcurrentHashMap<String, BlobRepository.BlobContent> blobStorage;
-
-  BlobRepository repository;
-  ByteBuffer blobData = ByteBuffer.wrap(BLOBSTR.getBytes(UTF8));
-  boolean blobFetched = false;
-  String blobKey = "";
-  String url = null;
-  ByteBuffer filecontent = null;
-
-  @BeforeClass
-  public static void beforeClass() {
-    SolrTestCaseJ4.assumeWorkingMockito();
-  }
-
-  @Override
-  @Before
-  public void setUp() throws Exception {
-    super.setUp();
-    blobFetched = false;
-    blobKey = "";
-    reset(mockContainer);
-    blobStorage = new ConcurrentHashMap<>();
-    repository =
-        new BlobRepository(mockContainer) {
-          @Override
-          ByteBuffer fetchBlob(String key) {
-            blobKey = key;
-            blobFetched = true;
-            return blobData;
-          }
-
-          @Override
-          ByteBuffer fetchFromUrl(String key, String url) {
-            if (!Objects.equals(url, BlobRepositoryMockingTest.this.url)) return null;
-            blobKey = key;
-            blobFetched = true;
-            return filecontent;
-          }
-
-          @Override
-          @SuppressWarnings({"rawtypes"})
-          ConcurrentHashMap<String, BlobContent> createMap() {
-            return blobStorage;
-          }
-        };
-  }
-
-  @Test(expected = SolrException.class)
-  public void testCloudOnly() {
-    when(mockContainer.isZooKeeperAware()).thenReturn(false);
-    try {
-      repository.getBlobIncRef("foo!");
-    } catch (SolrException e) {
-      verify(mockContainer).isZooKeeperAware();
-      throw e;
-    }
-  }
-
-  @Test
-  public void testGetBlobIncrRefString() {
-    when(mockContainer.isZooKeeperAware()).thenReturn(true);
-    BlobRepository.BlobContentRef<ByteBuffer> ref = repository.getBlobIncRef("foo!");
-    assertEquals("foo!", blobKey);
-    assertTrue(blobFetched);
-    assertNotNull(ref.blob);
-    assertEquals(blobData, ref.blob.get());
-    verify(mockContainer).isZooKeeperAware();
-    assertNotNull(blobStorage.get("foo!"));
-  }
-
-  @Test
-  public void testGetBlobIncrRefByUrl() throws Exception {
-    when(mockContainer.isZooKeeperAware()).thenReturn(true);
-    filecontent = TestSolrConfigHandler.getFileContent("runtimecode/runtimelibs_v2.jar.bin");
-    url = "http://localhost:8080/myjar/location.jar";
-    BlobRepository.BlobContentRef<?> ref =
-        repository.getBlobIncRef(
-            "filefoo",
-            null,
-            url,
-            "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417");
-    assertEquals("filefoo", blobKey);
-    assertTrue(blobFetched);
-    assertNotNull(ref.blob);
-    assertEquals(filecontent, ref.blob.get());
-    verify(mockContainer).isZooKeeperAware();
-    try {
-      repository.getBlobIncRef("filefoo", null, url, "WRONG-SHA512-KEY");
-      fail("expected exception");
-    } catch (Exception e) {
-      assertTrue(e.getMessage().contains(" expected sha512 hash : WRONG-SHA512-KEY , actual :"));
-    }
-
-    url = null;
-    filecontent = null;
-  }
-
-  @Test
-  public void testCachedAlready() {
-    when(mockContainer.isZooKeeperAware()).thenReturn(true);
-    blobStorage.put("foo!", new BlobRepository.BlobContent<BlobRepository>("foo!", blobData));
-    BlobRepository.BlobContentRef<ByteBuffer> ref = repository.getBlobIncRef("foo!");
-    assertEquals("", blobKey);
-    assertFalse(blobFetched);
-    assertNotNull(ref.blob);
-    assertEquals(blobData, ref.blob.get());
-    verify(mockContainer).isZooKeeperAware();
-    assertNotNull("Key was not mapped to a BlobContent instance.", blobStorage.get("foo!"));
-  }
-
-  @Test
-  public void testGetBlobIncrRefStringDecoder() {
-    when(mockContainer.isZooKeeperAware()).thenReturn(true);
-    BlobRepository.BlobContentRef<Object> ref =
-        repository.getBlobIncRef(
-            "foo!",
-            new BlobRepository.Decoder<>() {
-              @Override
-              public String[][] decode(InputStream inputStream) {
-                StringWriter writer = new StringWriter();
-                try {
-                  new InputStreamReader(inputStream, UTF8).transferTo(writer);
-                } catch (IOException e) {
-                  throw new RuntimeException(e);
-                }
-
-                assertEquals(BLOBSTR, writer.toString());
-                return PARSED;
-              }
-
-              @Override
-              public String getName() {
-                return "mocked";
-              }
-            });
-    assertEquals("foo!", blobKey);
-    assertTrue(blobFetched);
-    assertNotNull(ref.blob);
-    assertEquals(PARSED, ref.blob.get());
-    verify(mockContainer).isZooKeeperAware();
-    assertNotNull(blobStorage.get("foo!mocked"));
-  }
-}
diff --git a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java b/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
deleted file mode 100644
index e653b1f..0000000
--- a/solr/core/src/test/org/apache/solr/handler/component/ResourceSharingTestComponent.java
+++ /dev/null
@@ -1,142 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler.component;
-
-import static org.junit.Assert.assertEquals;
-
-import java.io.BufferedReader;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.lang.invoke.MethodHandles;
-import java.nio.charset.StandardCharsets;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.stream.Stream;
-import org.apache.solr.common.params.ModifiableSolrParams;
-import org.apache.solr.common.params.SolrParams;
-import org.apache.solr.core.BlobRepository;
-import org.apache.solr.core.SolrCore;
-import org.apache.solr.util.plugin.SolrCoreAware;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-public class ResourceSharingTestComponent extends SearchComponent implements SolrCoreAware {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-  private SolrCore core;
-  private volatile BlobRepository.BlobContent<TestObject> blob;
-
-  @SuppressWarnings("SynchronizeOnNonFinalField")
-  @Override
-  public void prepare(ResponseBuilder rb) {
-    SolrParams params = rb.req.getParams();
-    ModifiableSolrParams mParams = new ModifiableSolrParams(params);
-    String q = "text:" + getTestObj().getLastCollection();
-    mParams.set("q", q); // search for the last collection name.
-    // This should cause the param to show up in the response...
-    rb.req.setParams(mParams);
-    getTestObj().setLastCollection(core.getCoreDescriptor().getCollectionName());
-  }
-
-  @Override
-  public void process(ResponseBuilder rb) {}
-
-  @Override
-  public String getDescription() {
-    return "ResourceSharingTestComponent";
-  }
-
-  TestObject getTestObj() {
-    return this.blob.get();
-  }
-
-  @SuppressWarnings("unchecked")
-  @Override
-  public void inform(SolrCore core) {
-    log.info("Informing test component...");
-    this.core = core;
-    this.blob = core.loadDecodeAndCacheBlob(getKey(), new DumbCsvDecoder()).blob;
-    log.info("Test component informed!");
-  }
-
-  private String getKey() {
-    return getResourceName() + "/" + getResourceVersion();
-  }
-
-  public String getResourceName() {
-    return "testResource";
-  }
-
-  public String getResourceVersion() {
-    return "1";
-  }
-
-  class DumbCsvDecoder implements BlobRepository.Decoder<TestObject> {
-    private final Map<String, String> dict = new HashMap<>();
-
-    public DumbCsvDecoder() {}
-
-    void processSimpleCsvRow(String string) {
-      String[] row = string.split(","); // dumbest csv parser ever... :)
-      getDict().put(row[0], row[1]);
-    }
-
-    public Map<String, String> getDict() {
-      return dict;
-    }
-
-    @Override
-    public TestObject decode(InputStream inputStream) {
-      // loading a tiny csv like:
-      //
-      // foo,bar
-      // baz,bam
-
-      try (Stream<String> lines =
-          new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8)).lines()) {
-        lines.forEach(this::processSimpleCsvRow);
-      } catch (Exception e) {
-        log.error("failed to read dictionary {}", getResourceName());
-        throw new RuntimeException("Cannot load  dictionary ", e);
-      }
-
-      assertEquals("bar", dict.get("foo"));
-      assertEquals("bam", dict.get("baz"));
-      if (log.isInfoEnabled()) {
-        log.info("Loaded {}  using {}", getDict().size(), this.getClass().getClassLoader());
-      }
-
-      // if we get here we have seen the data from the blob and all we need is to test that two
-      // collections are able to see the same object...
-      return new TestObject();
-    }
-  }
-
-  public static class TestObject {
-    public static final String NEVER_UPDATED = "never updated";
-    private volatile String lastCollection = NEVER_UPDATED;
-
-    public String getLastCollection() {
-      return this.lastCollection;
-    }
-
-    public void setLastCollection(String lastCollection) {
-      this.lastCollection = lastCollection;
-    }
-  }
-}
diff --git a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java
index 57d5a6b..90a7685 100644
--- a/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java
+++ b/solr/core/src/test/org/apache/solr/handler/designer/TestSchemaDesignerAPI.java
@@ -70,7 +70,7 @@
     configureCluster(1)
         .addConfig(DEFAULT_CONFIGSET_NAME, new File(ExternalPaths.DEFAULT_CONFIGSET).toPath())
         .configure();
-    // SchemaDesignerAPI depends on the blob store
+    // SchemaDesignerAPI depends on the blob store ".system" collection existing.
     CollectionAdminRequest.createCollection(BLOB_STORE_ID, 1, 1).process(cluster.getSolrClient());
     cluster.waitForActiveCollection(BLOB_STORE_ID, 1, 1);
   }
diff --git a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
index a712bbe..e1491a6 100644
--- a/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
+++ b/solr/solr-ref-guide/modules/upgrade-notes/pages/major-changes-in-solr-10.adoc
@@ -55,3 +55,7 @@
 * The node configuration file `/solr.xml` can no longer be loaded from Zookeeper. Solr startup will fail if it is present.
 
 * The legacy Circuit Breaker named `CircuitBreakerManager`, is removed. Please use individual Circuit Breaker plugins instead.
+
+* The `BlobRepository`, which was deprecated in 8x in favour of the `FileStore` approach is removed.   
+Users should migrate to the `FileStore` implementation (per node stored file) and the still existing `BlobHandler` (across the cluster storage backed by `.system` collection).
+Please note this also removes the ability to share resource intensive objects across multiple cores as this feature was tied to the `BlobRepository` implementation.
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
index f37038e..9889963 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
@@ -19,6 +19,7 @@
 import static java.nio.charset.StandardCharsets.UTF_8;
 import static java.util.Collections.singletonList;
 import static java.util.concurrent.TimeUnit.NANOSECONDS;
+import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
 
 import com.fasterxml.jackson.annotation.JsonAnyGetter;
 import java.io.ByteArrayInputStream;
@@ -38,10 +39,13 @@
 import java.lang.reflect.Field;
 import java.lang.reflect.Method;
 import java.lang.reflect.Modifier;
+import java.math.BigInteger;
 import java.net.URL;
 import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
 import java.util.AbstractMap;
 import java.util.ArrayList;
 import java.util.Arrays;
@@ -52,8 +56,10 @@
 import java.util.HashSet;
 import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Objects;
+import java.util.Random;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.TreeSet;
@@ -90,6 +96,31 @@
 
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
+  public static final Random RANDOM;
+
+  static {
+    // We try to make things reproducible in the context of our tests by initializing the random
+    // instance based on the current seed
+    String seed = System.getProperty("tests.seed");
+    if (seed == null) {
+      RANDOM = new Random();
+    } else {
+      RANDOM = new Random(seed.hashCode());
+    }
+  }
+
+  public static String sha512Digest(ByteBuffer byteBuffer) {
+    MessageDigest digest;
+    try {
+      digest = MessageDigest.getInstance("SHA-512");
+    } catch (NoSuchAlgorithmException e) {
+      // unlikely
+      throw new SolrException(SERVER_ERROR, e);
+    }
+    digest.update(byteBuffer);
+    return String.format(Locale.ROOT, "%0128x", new BigInteger(1, digest.digest()));
+  }
+
   @SuppressWarnings({"rawtypes"})
   public static Map getDeepCopy(Map<?, ?> map, int maxDepth) {
     return getDeepCopy(map, maxDepth, true, false);