SOLR-13661: Package management APIs, Package loading, Package store

Combining the following in a single commit (for easier review): SOLR-13707, SOLR-13659, SOLR-13565, SOLR-13650, SOLR-13710, SOLR-13721, SOLR-13637
diff --git a/solr/CHANGES.txt b/solr/CHANGES.txt
index 51436a6..bc27b0a 100644
--- a/solr/CHANGES.txt
+++ b/solr/CHANGES.txt
@@ -113,6 +113,8 @@
   when using compositeIds.  Document distribution is calculated using the "id_prefix" field (if it exists) containing
   just the compositeId prefixes, or directly from the indexed "id" field otherwise. (yonik, Megan Carey)
 
+* SOLR-13565: Node level runtime libs loaded from remote urls  (noble)
+
 * SOLR-13553: Node level custom RequestHandlers (noble)
 
 * SOLR-13622: Add cat() stream source to create tuples from lines in local files (Jason Gerlowski and Joel Bernstein)
@@ -122,9 +124,16 @@
 
 * SOLR-13682: command line option to export documents to a file (noble)
 
+* SOLR-13650: Solr now can define and add "packages" with plugins. Each plugin can choose to
+  load from one of those packages & updating packages can reload those plugins independently (noble)
+
 * SOLR-13257: Support deterministic replica routing preferences for better cache usage (Michael Gibney
   via Christine Poerschke, Tomás Fernández Löbbe)
 
+* SOLR-13707: API to expose the currently used package name, details for each plugin (noble)
+
+* SOLR-13710: Persist package jars locally & expose them over http at /api/node/blob  (noble)
+
 * SOLR-13122: Ability to query aliases in Solr Admin UI (janhoy)
 
 * SOLR-13713: JWTAuthPlugin to support multiple JWKS endpoints (janhoy)
@@ -151,6 +160,8 @@
 
 * SOLR-6305: Ability to set the replication factor for index files created by HDFSDirectoryFactory (Boris Pasko via Kevin Risden)
 
+* SOLR-13677: All Metrics Gauges should be unregistered by the objects that registered them (noble)
+
 * SOLR-13702: Some components register twice their metric names (janhoy)
 
 * SOLR-11601: Improved error message when geodist(llpsf) is used with arguments referring to a LatLonPointSpatialField.
@@ -266,6 +277,8 @@
 
 * SOLR-13643: Add Getters/Setters in ResponseBuilder for analytics response handling (Neal Sidhwaney via Munendra S N)
 
+* SOLR-13659: Refactor CacheConfig to lazily load the the implementation class (noble)
+
 * SOLR-13680: Use try-with-resource to close the closeable resource (Furkan KAMACI, Munendra S N)
 
 * SOLR-13573: Add SolrRangeQuery getters for upper, lower bound (Brian Rhees via Jason Gerlowski)
diff --git a/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
new file mode 100644
index 0000000..45b3cba
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/api/AnnotatedApi.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.api;
+
+
+import java.lang.reflect.Method;
+import java.lang.reflect.Modifier;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.security.AuthorizationContext;
+import org.apache.solr.security.PermissionNameProvider;
+
+public class AnnotatedApi extends Api implements PermissionNameProvider {
+  private EndPoint endPoint;
+  private Map<String, Cmd> commands = new HashMap<>();
+
+
+  public AnnotatedApi(Object obj) {
+    super(Utils.getSpec(readSpec(obj.getClass())));
+    Class<?> klas = obj.getClass();
+    if (!Modifier.isPublic(klas.getModifiers())) {
+      throw new RuntimeException(obj.getClass().getName() + " is not public");
+    }
+
+    endPoint = klas.getAnnotation(EndPoint.class);
+
+    for (Method m : klas.getDeclaredMethods()) {
+      Command command = m.getAnnotation(Command.class);
+      if (command == null) continue;
+
+      if (commands.containsKey(command.name())) {
+        throw new RuntimeException("Duplicate commands " + command.name());
+      }
+      commands.put(command.name(), new Cmd(command, obj, m));
+    }
+
+  }
+
+  @Override
+  public Name getPermissionName(AuthorizationContext request) {
+    return endPoint.permission();
+  }
+
+  private static String readSpec(Class klas) {
+    EndPoint endPoint = (EndPoint) klas.getAnnotation(EndPoint.class);
+    return endPoint.spec();
+
+  }
+
+  @Override
+  public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
+    if (commands.size() == 1) {
+      Cmd cmd = commands.get("");
+      if (cmd != null) {
+        cmd.invoke(req, rsp, null);
+        return;
+      }
+    }
+
+    List<CommandOperation> cmds = req.getCommands(true);
+    boolean allExists = true;
+    for (CommandOperation cmd : cmds) {
+      if (!commands.containsKey(cmd.name)) {
+        cmd.addError("No such command supported :" + cmd.name);
+        allExists = false;
+      }
+    }
+    if (!allExists) {
+      throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing commands",
+          CommandOperation.captureErrors(cmds));
+    }
+
+    for (CommandOperation cmd : cmds) {
+      commands.get(cmd.name).invoke(req, rsp, cmd);
+    }
+
+    List<Map> errs = CommandOperation.captureErrors(cmds);
+    if (!errs.isEmpty()) {
+      throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "Error in executing commands", errs);
+    }
+
+  }
+
+  class Cmd {
+    final Command command;
+    final Method method;
+    final Object obj;
+
+    Cmd(Command command, Object obj, Method method) {
+      if (Modifier.isPublic(method.getModifiers())) {
+        this.command = command;
+        this.obj = obj;
+        this.method = method;
+        Class<?>[] parameterTypes = method.getParameterTypes();
+        if (parameterTypes.length > 1 || parameterTypes[0] != CallInfo.class) {
+          throw new RuntimeException("Invalid params for method " + method);
+        }
+      } else {
+        throw new RuntimeException(method.toString() + " is not a public static method");
+      }
+
+    }
+
+    void invoke(SolrQueryRequest req, SolrQueryResponse rsp, CommandOperation cmd) {
+      try {
+        method.invoke(obj, new CallInfo(req, rsp, cmd));
+      } catch (SolrException se) {
+        throw se;
+      } catch (Exception e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+      }
+
+    }
+  }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/api/ApiBag.java b/solr/core/src/java/org/apache/solr/api/ApiBag.java
index 8a3f972..bfeb0ef 100644
--- a/solr/core/src/java/org/apache/solr/api/ApiBag.java
+++ b/solr/core/src/java/org/apache/solr/api/ApiBag.java
@@ -230,22 +230,28 @@
   }
 
   public static class ReqHandlerToApi extends Api implements PermissionNameProvider {
-    SolrRequestHandler rh;
+     PluginBag.PluginHolder<SolrRequestHandler> rh;
 
     public ReqHandlerToApi(SolrRequestHandler rh, SpecProvider spec) {
       super(spec);
+      this.rh = new PluginBag.PluginHolder(new PluginInfo(SolrRequestHandler.TYPE, Collections.emptyMap()),rh );
+    }
+
+    public ReqHandlerToApi(PluginBag.PluginHolder<SolrRequestHandler> rh, SpecProvider spec) {
+      super(spec);
       this.rh = rh;
     }
 
     @Override
     public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
-      rh.handleRequest(req, rsp);
+      rh.get().handleRequest(req, rsp);
     }
 
     @Override
     public Name getPermissionName(AuthorizationContext ctx) {
-      if (rh instanceof PermissionNameProvider) {
-        return ((PermissionNameProvider) rh).getPermissionName(ctx);
+      SolrRequestHandler handler = rh.get();
+      if (handler instanceof PermissionNameProvider) {
+        return ((PermissionNameProvider) handler).getPermissionName(ctx);
       }
       return null;
     }
@@ -339,22 +345,22 @@
   }
 
   public static class LazyLoadedApi extends Api {
-
-    private final PluginBag.PluginHolder<SolrRequestHandler> holder;
     private Api delegate;
 
     protected LazyLoadedApi(SpecProvider specProvider, PluginBag.PluginHolder<SolrRequestHandler> lazyPluginHolder) {
       super(specProvider);
-      this.holder = lazyPluginHolder;
+      delegate =  new ReqHandlerToApi(lazyPluginHolder, spec);
     }
 
     @Override
     public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
-      if (!holder.isLoaded()) {
-        delegate = new ReqHandlerToApi(holder.get(), ApiBag.EMPTY_SPEC);
-      }
       delegate.call(req, rsp);
     }
+
+    @Override
+    public ValidatingJsonMap getSpec() {
+      return super.getSpec();
+    }
   }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/api/CallInfo.java b/solr/core/src/java/org/apache/solr/api/CallInfo.java
new file mode 100644
index 0000000..f5ae198
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/api/CallInfo.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.api;
+
+import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+
+public class CallInfo {
+  public final SolrQueryRequest req;
+  public final SolrQueryResponse rsp;
+  public final CommandOperation command;
+
+  public CallInfo(SolrQueryRequest req, SolrQueryResponse rsp,  CommandOperation command) {
+    this.req = req;
+    this.rsp = rsp;
+    this.command = command;
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/api/Command.java b/solr/core/src/java/org/apache/solr/api/Command.java
new file mode 100644
index 0000000..45303d6
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/api/Command.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.api;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target(ElementType.METHOD)
+public @interface Command {
+  /**if this is not a json command , leave it empty.
+   * Keep in mind that you cannot have duplicates.
+   * Only one method per name
+   *
+   */
+  String name() default "";
+}
diff --git a/solr/core/src/java/org/apache/solr/api/EndPoint.java b/solr/core/src/java/org/apache/solr/api/EndPoint.java
new file mode 100644
index 0000000..12f4897
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/api/EndPoint.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.api;
+
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+
+import org.apache.solr.security.PermissionNameProvider;
+
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE})
+public @interface EndPoint {
+  /**name of the API spec file without the '.json' suffix
+   */
+  String spec();
+
+
+  /**The well known persmission name if any
+   */
+  PermissionNameProvider.Name permission();
+}
diff --git a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
index 957b321..17a6ec3 100644
--- a/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
+++ b/solr/core/src/java/org/apache/solr/cloud/ReplicateFromLeader.java
@@ -133,7 +133,7 @@
 
   public void stopReplication() {
     if (replicationProcess != null) {
-      replicationProcess.close();
+      replicationProcess.shutdown();
     }
   }
 }
diff --git a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
index f32669c..6023f43 100644
--- a/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
+++ b/solr/core/src/java/org/apache/solr/cloud/autoscaling/IndexSizeTrigger.java
@@ -24,13 +24,13 @@
 import java.util.HashMap;
 import java.util.HashSet;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
 import java.util.Set;
 import java.util.TreeMap;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.AtomicLong;
-import java.util.Locale;
 
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.ReplicaInfo;
diff --git a/solr/core/src/java/org/apache/solr/core/BlobRepository.java b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
index 24bb88e..5517699 100644
--- a/solr/core/src/java/org/apache/solr/core/BlobRepository.java
+++ b/solr/core/src/java/org/apache/solr/core/BlobRepository.java
@@ -16,17 +16,14 @@
  */
 package org.apache.solr.core;
 
+import java.io.IOException;
 import java.io.InputStream;
 import java.lang.invoke.MethodHandles;
-import java.math.BigInteger;
 import java.nio.ByteBuffer;
-import java.security.MessageDigest;
-import java.security.NoSuchAlgorithmException;
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Locale;
 import java.util.Map;
 import java.util.Random;
 import java.util.Set;
@@ -34,6 +31,7 @@
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.regex.Pattern;
 
+import org.apache.commons.codec.digest.DigestUtils;
 import org.apache.http.HttpEntity;
 import org.apache.http.HttpResponse;
 import org.apache.http.client.HttpClient;
@@ -45,14 +43,12 @@
 import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionAdminParams;
-import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.util.SimplePostTool;
 import org.apache.zookeeper.server.ByteBufferInputStream;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
 import static org.apache.solr.common.SolrException.ErrorCode.SERVICE_UNAVAILABLE;
 import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
 
@@ -60,7 +56,7 @@
  * The purpose of this class is to store the Jars loaded in memory and to keep only one copy of the Jar in a single node.
  */
 public class BlobRepository {
-  private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtme.lib.size", String.valueOf(5 * 1024 * 1024)));
+  private static final long MAX_JAR_SIZE = Long.parseLong(System.getProperty("runtime.lib.size", String.valueOf(5 * 1024 * 1024)));
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   static final Random RANDOM;
   static final Pattern BLOB_KEY_PATTERN_CHECKER = Pattern.compile(".*/\\d+");
@@ -76,6 +72,7 @@
     }
   }
 
+
   private final CoreContainer coreContainer;
   private Map<String, BlobContent> blobs = createMap();
 
@@ -88,6 +85,7 @@
     this.coreContainer = coreContainer;
   }
 
+
   // I wanted to {@link SolrCore#loadDecodeAndCacheBlob(String, Decoder)} below but precommit complains
 
   /**
@@ -116,13 +114,6 @@
     return getBlobIncRef(key.concat(decoder.getName()), () -> addBlob(key, decoder));
   }
 
-  BlobContentRef getBlobIncRef(String key, Decoder decoder, String url, String sha512) {
-    StringBuffer keyBuilder = new StringBuffer(key);
-    if (decoder != null) keyBuilder.append(decoder.getName());
-    keyBuilder.append("/").append(sha512);
-
-    return getBlobIncRef(keyBuilder.toString(), () -> new BlobContent<>(key, fetchBlobAndVerify(key, url, sha512), decoder));
-  }
 
   // do the actual work returning the appropriate type...
   private <T> BlobContentRef<T> getBlobIncRef(String key, Callable<BlobContent<T>> blobCreator) {
@@ -166,31 +157,14 @@
     return aBlob;
   }
 
-  static String INVALID_JAR_MSG = "Invalid jar from {0} , expected sha512 hash : {1} , actual : {2}";
 
-  private ByteBuffer fetchBlobAndVerify(String key, String url, String sha512) {
-    ByteBuffer byteBuffer = fetchFromUrl(key, url);
-    String computedDigest = sha512Digest(byteBuffer);
-    if (!computedDigest.equals(sha512)) {
-      throw new SolrException(SERVER_ERROR, StrUtils.formatString(INVALID_JAR_MSG, url, sha512, computedDigest));
 
-    }
-    return byteBuffer;
-  }
-
-  public static String sha512Digest(ByteBuffer byteBuffer) {
-    MessageDigest digest = null;
+  public static String sha256Digest(ByteBuffer buf) {
     try {
-      digest = MessageDigest.getInstance("SHA-512");
-    } catch (NoSuchAlgorithmException e) {
-      //unlikely
-      throw new SolrException(SERVER_ERROR, e);
+      return DigestUtils.sha256Hex(new ByteBufferInputStream(ByteBuffer.wrap(buf.array(), buf.arrayOffset(), buf.limit())));
+    } catch (IOException e) {
+      throw new RuntimeException("Unable to compute sha256", e);
     }
-    digest.update(byteBuffer);
-    return String.format(
-        Locale.ROOT,
-        "%0128x",
-        new BigInteger(1, digest.digest()));
   }
 
 
@@ -214,13 +188,14 @@
       entity = response.getEntity();
       int statusCode = response.getStatusLine().getStatusCode();
       if (statusCode != 200) {
-        throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such blob or version available: " + key);
+        throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "no such resource available: " + key + ", url : " + url);
       }
 
       try (InputStream is = entity.getContent()) {
         b = SimplePostTool.inputStreamToByteArray(is, MAX_JAR_SIZE);
       }
     } catch (Exception e) {
+      log.error("Error loading resource " + url, e);
       if (e instanceof SolrException) {
         throw (SolrException) e;
       } else {
@@ -281,6 +256,7 @@
     }
   }
 
+
   public static class BlobContent<T> {
     public final String key;
     private final T content; // holds byte buffer or cached object, holding both is a waste of memory
@@ -334,7 +310,7 @@
   public static class BlobContentRef<T> {
     public final BlobContent<T> blob;
 
-    private BlobContentRef(BlobContent<T> blob) {
+    public BlobContentRef(BlobContent<T> blob) {
       this.blob = blob;
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/core/ContainerRequestHandlers.java b/solr/core/src/java/org/apache/solr/core/ContainerRequestHandlers.java
new file mode 100644
index 0000000..86ec41d
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/ContainerRequestHandlers.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.solr.api.CallInfo;
+import org.apache.solr.api.Command;
+import org.apache.solr.api.EndPoint;
+import org.apache.solr.api.V2HttpCall;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.schema.FieldType;
+import org.apache.solr.security.PermissionNameProvider;
+import org.apache.solr.util.plugin.PluginInfoInitialized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.params.CommonParams.NAME;
+import static org.apache.solr.common.params.CommonParams.PACKAGE;
+import static org.apache.solr.common.params.CommonParams.VERSION;
+import static org.apache.solr.core.PluginBag.closeQuietly;
+
+@EndPoint(spec = "node.ext", permission = PermissionNameProvider.Name.CUSTOM_PERM)
+public class ContainerRequestHandlers {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  final CoreContainer coreContainer;
+
+  private Map<String, Handler> customHandlers = new HashMap<>();
+
+  ContainerRequestHandlers(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+  }
+
+  public void handleRequestBody(SolrQueryRequest req, SolrQueryResponse rsp) {
+    int v = req.getParams().getInt(ConfigOverlay.ZNODEVER, -1);
+    if (v >= 0) {
+      log.debug("expected version : {} , my version {}", v, coreContainer.getPackageBag().myVersion);
+      ZkStateReader zkStateReader = coreContainer.getZkController().getZkStateReader();
+      try {
+        zkStateReader.forceRefreshClusterProps(v);
+      } catch (SolrException e) {
+        log.error("Error refreshing state ", e);
+        throw e;
+      }
+    }
+    rsp.add("metadata", (MapWriter) ew -> ew.put(VERSION,
+        coreContainer.getZkController().zkStateReader.getClusterPropsVersion()));
+    rsp.add(SolrRequestHandler.TYPE, customHandlers.values());
+
+  }
+
+  @Command
+  public void call(CallInfo info) {
+    String name = ((V2HttpCall) info.req.getHttpSolrCall()).getUrlParts().get("handlerName");
+    if (name == null) {
+      handleRequestBody(info.req, info.rsp);
+      return;
+    }
+    Handler wrapper = customHandlers.get(name);
+    if (wrapper == null) {
+      String err = StrUtils.formatString(" No such handler: {0}, available handlers : {1}", name, customHandlers.keySet());
+      log.error(err);
+      throw new SolrException(SolrException.ErrorCode.NOT_FOUND, err);
+    }
+    wrapper.handler.handleRequest(info.req, info.rsp);
+
+  }
+
+
+  void updateReqHandlers(Map<String, Object> properties) {
+    Map m = (Map) properties.getOrDefault(SolrRequestHandler.TYPE, Collections.emptyMap());
+    if (m.isEmpty() && customHandlers.isEmpty()) return;
+    if (customHandlers.size() == m.size() && customHandlers.keySet().containsAll(m.keySet())) return;
+    log.debug("RequestHandlers being reloaded : {}", m.keySet());
+    Map<String, Handler> newCustomHandlers = new HashMap<>();
+    List<Handler> toBeClosed = new ArrayList<>();
+    for (Object o : m.entrySet()) {
+      Object v = ((Map.Entry) o).getValue();
+      String name = (String) ((Map.Entry) o).getKey();
+      if (v instanceof Map) {
+        Map metaData = (Map) v;
+        Handler existing = customHandlers.get(name);
+        if (existing == null || !existing.meta.equals(metaData)) {
+          String klas = (String) metaData.get(FieldType.CLASS_NAME);
+          if (klas != null) {
+            newCustomHandlers.put(name, new Handler(metaData));
+          } else {
+            log.error("Invalid requestHandler {}", Utils.toJSONString(v));
+          }
+          if (existing != null) {
+            toBeClosed.add(existing);
+          }
+
+        } else {
+          newCustomHandlers.put(name, existing);
+        }
+
+      } else {
+        log.error("Invalid data for requestHandler : {} , {}", name, v);
+      }
+    }
+
+    log.debug("Registering request handlers {} ", newCustomHandlers.keySet());
+    Map<String, Handler> old = customHandlers;
+    for (Map.Entry<String, Handler> e : old.entrySet()) {
+      if (!newCustomHandlers.containsKey(e.getKey())) {
+        toBeClosed.add(e.getValue());
+      }
+    }
+    customHandlers = newCustomHandlers;
+    for (Handler wrapper : toBeClosed) {
+      closeQuietly(wrapper);
+    }
+  }
+
+  private SolrRequestHandler createHandler(Map metaData) {
+    String pkg = (String) metaData.get(PACKAGE);
+    SolrRequestHandler inst = coreContainer.getPackageBag().newInstance((String) metaData.get(FieldType.CLASS_NAME),
+        SolrRequestHandler.class, pkg);
+    if (inst instanceof PluginInfoInitialized) {
+      ((PluginInfoInitialized) inst).init(new PluginInfo(SolrRequestHandler.TYPE, metaData));
+    }
+    return inst;
+  }
+
+
+  class Handler implements MapWriter, PackageListeners.Listener, AutoCloseable {
+    SolrRequestHandler handler;
+    final String pkg;
+    int zkversion;
+    PluginInfo meta;
+    PackageBag.PackageInfo packageInfo;
+    String name;
+
+    @Override
+    public void writeMap(EntryWriter ew) throws IOException {
+      ew.put(NAME, name);
+      ew.put(ConfigOverlay.ZNODEVER, zkversion);
+      meta.attributes.forEach(ew.getBiConsumer());
+    }
+
+    Handler(Map meta) {
+      this.meta = new PluginInfo(SolrRequestHandler.TYPE, meta);
+      pkg = (String) meta.get("package");
+      this.handler = createHandler(meta);
+      if (pkg != null) {
+        this.packageInfo = coreContainer.getPackageBag().getPackageInfo(pkg);
+        coreContainer.getPackageBag().listenerRegistry.addListener(this);
+      }
+
+
+    }
+
+    @Override
+    public String packageName() {
+      return pkg;
+    }
+
+    @Override
+    public PluginInfo pluginInfo() {
+      return meta;
+    }
+
+    @Override
+    public void changed(PackageBag.PackageInfo info) {
+      if (this.packageInfo.znodeVersion < info.znodeVersion) {
+        this.handler = createHandler(meta.attributes);
+        this.packageInfo = info;
+      }
+
+    }
+
+    @Override
+    public void close() throws Exception {
+      closeQuietly(handler);
+    }
+
+    @Override
+    public PackageBag.PackageInfo packageInfo() {
+      return packageInfo;
+    }
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/core/CoreContainer.java b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
index b50ed6d..8dbe1e3 100644
--- a/solr/core/src/java/org/apache/solr/core/CoreContainer.java
+++ b/solr/core/src/java/org/apache/solr/core/CoreContainer.java
@@ -47,6 +47,7 @@
 import org.apache.lucene.index.IndexWriter;
 import org.apache.lucene.search.IndexSearcher;
 import org.apache.lucene.store.Directory;
+import org.apache.solr.api.AnnotatedApi;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.cloud.SolrCloudManager;
 import org.apache.solr.client.solrj.embedded.EmbeddedSolrServer;
@@ -192,6 +193,10 @@
 
   private final BlobRepository blobRepository = new BlobRepository(this);
 
+  private final DistribFileStore fileStore = new DistribFileStore(this);
+
+  final ContainerRequestHandlers containerRequestHandlers = new ContainerRequestHandlers(this);
+
   private volatile PluginBag<SolrRequestHandler> containerHandlers = new PluginBag<>(SolrRequestHandler.class, null);
 
   private volatile boolean asyncSolrCoreLoad;
@@ -218,6 +223,8 @@
 
   protected volatile AutoscalingHistoryHandler autoscalingHistoryHandler;
 
+  private final PackageBag packageBag = new PackageBag(this);
+
 
   // Bits for the state variable.
   public final static long LOAD_COMPLETE = 0x1L;
@@ -625,6 +632,7 @@
 
     zkSys.initZooKeeper(this, solrHome, cfg.getCloudConfig());
     if (isZooKeeperAware()) {
+      getZkController().getZkStateReader().registerClusterPropertiesListener(packageBag);
       pkiAuthenticationPlugin = new PKIAuthenticationPlugin(this, zkSys.getZkController().getNodeName(),
           (PublicKeyHandler) containerHandlers.get(PublicKeyHandler.PATH));
       pkiAuthenticationPlugin.initializeMetrics(metricManager, SolrInfoBean.Group.node.toString(), metricTag, "/authentication/pki");
@@ -637,6 +645,8 @@
     reloadSecurityProperties();
     this.backupRepoFactory = new BackupRepositoryFactory(cfg.getBackupRepositoryPlugins());
 
+    containerHandlers.put(new AnnotatedApi(containerRequestHandlers));
+    containerHandlers.put(new AnnotatedApi(fileStore.fileStoreRead));
     createHandler(ZK_PATH, ZookeeperInfoHandler.class.getName(), ZookeeperInfoHandler.class);
     createHandler(ZK_STATUS_PATH, ZookeeperStatusHandler.class.getName(), ZookeeperStatusHandler.class);
     collectionsHandler = createHandler(COLLECTIONS_HANDLER_PATH, cfg.getCollectionsHandlerClass(), CollectionsHandler.class);
@@ -1537,7 +1547,7 @@
       } catch (SolrCoreState.CoreIsClosedException e) {
         throw e;
       } catch (Exception e) {
-        coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, (Exception) e));
+        coreInitFailures.put(cd.getName(), new CoreLoadFailure(cd, e));
         throw new SolrException(ErrorCode.SERVER_ERROR, "Unable to reload core [" + cd.getName() + "]", e);
       } finally {
         if (!success && newCore != null && newCore.getOpenCount() > 0) {
@@ -1755,6 +1765,10 @@
     return blobRepository;
   }
 
+  public DistribFileStore getFileStore(){
+    return fileStore;
+  }
+
   /**
    * If using asyncSolrCoreLoad=true, calling this after {@link #load()} will
    * not return until all cores have finished loading.
@@ -1782,6 +1796,14 @@
     return handler;
   }
 
+  public PluginBag<SolrRequestHandler> getContainerHandlers() {
+    return containerHandlers;
+  }
+
+  public PackageBag getPackageBag(){
+    return packageBag;
+  }
+
   public CoreAdminHandler getMultiCoreHandler() {
     return coreAdminHandler;
   }
@@ -1895,6 +1917,10 @@
     return auditloggerPlugin == null ? null : auditloggerPlugin.plugin;
   }
 
+  public ContainerRequestHandlers getContainerRequestHandlers(){
+    return containerRequestHandlers;
+  }
+
   public NodeConfig getNodeConfig() {
     return cfg;
   }
diff --git a/solr/core/src/java/org/apache/solr/core/DistribFileStore.java b/solr/core/src/java/org/apache/solr/core/DistribFileStore.java
new file mode 100644
index 0000000..7d6a73b
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/DistribFileStore.java
@@ -0,0 +1,360 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.function.Consumer;
+
+import org.apache.http.client.HttpClient;
+import org.apache.lucene.util.IOUtils;
+import org.apache.solr.api.CallInfo;
+import org.apache.solr.api.Command;
+import org.apache.solr.api.EndPoint;
+import org.apache.solr.api.V2HttpCall;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.request.SolrQueryRequest;
+import org.apache.solr.response.SolrQueryResponse;
+import org.apache.solr.security.PermissionNameProvider;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.MapWriter.EMPTY;
+import static org.apache.solr.common.SolrException.ErrorCode.BAD_REQUEST;
+import static org.apache.solr.common.SolrException.ErrorCode.SERVER_ERROR;
+import static org.apache.solr.common.params.CommonParams.PACKAGE;
+import static org.apache.solr.core.BlobRepository.sha256Digest;
+import static org.apache.solr.handler.ReplicationHandler.FILE_STREAM;
+
+/**
+ * This class represents the new P2P, File System Store.
+ * This identifies a file by its sha256 +  filename.
+ * This acts as a server for files for a user or any other node to
+ * download a file.
+ * This also is responsible for distributing a file across the nodes
+ * in the cluster.
+ */
+
+public class DistribFileStore {
+  static final long MAX_PKG_SIZE = Long.parseLong(System.getProperty("max.package.size", String.valueOf(100 * 1024 * 1024)));
+
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  private final CoreContainer coreContainer;
+
+  private Map<String, ByteBuffer> tmpFiles = new ConcurrentHashMap<>();
+
+  final FileStoreRead fileStoreRead = new FileStoreRead();
+
+  public DistribFileStore(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+  }
+
+  public MapWriter fileList(SolrParams params) {
+    String id = params.get(CommonParams.ID);
+    File dir = getFileStorePath().toFile();
+
+    String fromNode = params.get("fromNode");
+    if (id != null && fromNode != null) {
+      //asking to fetch it from somewhere if it does not exist locally
+      if (!new File(dir, id).exists()) {
+        if ("*".equals(fromNode)) {
+          //asking to fetch from a random node
+          fetchFromOtherNodes(id);
+          return EMPTY;
+        } else { // asking to fetch from a specific node
+          fetchFileFromNodeAndPersist(id, fromNode);
+          return MapWriter.EMPTY;
+        }
+      }
+    }
+    return ew -> dir.listFiles((f, name) -> {
+      if (id == null || name.equals(id)) {
+        ew.putNoEx(name, (MapWriter) ew1 -> {
+          File file = new File(f, name);
+          ew1.put("size", file.length());
+          ew1.put("timestamp", new Date(file.lastModified()));
+        });
+      }
+      return false;
+    });
+  }
+
+  public Path getFileStorePath() {
+    Path blobsDirPath = SolrResourceLoader.getFileStoreDirPath(this.coreContainer.getResourceLoader().getInstancePath());
+    return new File(blobsDirPath.toFile(), PACKAGE).toPath();
+  }
+
+  private ByteBuffer fetchFromOtherNodes(String id) {
+    ByteBuffer[] result = new ByteBuffer[1];
+    ArrayList<String> l = shuffledNodes();
+    ModifiableSolrParams solrParams = new ModifiableSolrParams();
+    solrParams.add(CommonParams.ID, id);
+    ZkStateReader stateReader = coreContainer.getZkController().getZkStateReader();
+    for (String liveNode : l) {
+      try {
+        String baseurl = stateReader.getBaseUrlForNodeName(liveNode);
+        String url = baseurl.replace("/solr", "/api");
+        String reqUrl = url + "/node/filestore/package?wt=javabin&omitHeader=true&id=" + id;
+        boolean nodeHasBlob = false;
+        Object nl = Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), reqUrl, Utils.JAVABINCONSUMER);
+        if (Utils.getObjectByPath(nl, false, Arrays.asList("blob", id)) != null) {
+          nodeHasBlob = true;
+        }
+
+        if (nodeHasBlob) {
+          result[0] = fetchFileFromNodeAndPersist(id, liveNode);
+          if (result[0] != null) break;
+        }
+      } catch (Exception e) {
+        //it's OK for some nodes to fail
+      }
+    }
+
+    return result[0];
+  }
+
+  /**
+   * get a list of nodes randomly shuffled
+   * * @lucene.internal
+   */
+  public ArrayList<String> shuffledNodes() {
+    Set<String> liveNodes = coreContainer.getZkController().getZkStateReader().getClusterState().getLiveNodes();
+    ArrayList<String> l = new ArrayList(liveNodes);
+    Collections.shuffle(l, BlobRepository.RANDOM);
+    return l;
+  }
+
+  public static class FileObjName {
+    final String sha256;
+    final String fname;
+
+    FileObjName(String name) {
+      int idx = name.indexOf('-');
+      if (idx == -1) {
+        sha256 = name;
+        fname = null;
+        return;
+      } else {
+        sha256 = name.substring(0, idx);
+        fname = name.substring(idx + 1);
+      }
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj instanceof FileObjName) {
+        FileObjName that = (FileObjName) obj;
+        return Objects.equals(this.sha256, that.sha256) && Objects.equals(this.fname, that.fname);
+
+
+      }
+      return false;
+    }
+
+    public String name() {
+      return fname == null ? sha256 : sha256 + "-" + fname;
+    }
+
+    @Override
+    public String toString() {
+      return name();
+    }
+  }
+
+
+  private void persistToFile(ByteBuffer b, String id) throws IOException {
+    FileObjName fileObjName = new FileObjName(id);
+    String actual = sha256Digest(b);
+    if (!Objects.equals(actual, fileObjName.sha256)) {
+      throw new SolrException(SERVER_ERROR, "invalid id for blob actual: " + actual + " expected : " + fileObjName.sha256);
+    }
+    File file = new File(getFileStorePath().toFile(), id);
+    try (FileOutputStream fos = new FileOutputStream(file)) {
+      fos.write(b.array(), 0, b.limit());
+    }
+    log.info("persisted a blob {} ", id);
+    IOUtils.fsync(file.toPath(), false);
+  }
+
+
+  boolean fetchFile(String id) {
+    File f = new File(getFileStorePath().toFile(), id);
+    if (f.exists()) return true;
+    fetchFromOtherNodes(id);
+    return f.exists();
+  }
+
+  /**
+   * Read a blob from the blobstore file system
+   */
+  public void readFile(String id, Consumer<InputStream> consumer) throws IOException {
+    if (!fetchFile(id)) throw new FileNotFoundException("No such file: " + id);
+    File f = new File(getFileStorePath().toFile(), id);
+    try (InputStream is = new FileInputStream(f)) {
+      consumer.accept(is);
+    }
+  }
+
+  /**
+   * This distributes a blob to all nodes in the cluster
+   * *USE CAREFULLY*
+   */
+  public void distributeFile(ByteBuffer buf, String id) throws IOException {
+    persistToFile(buf, id);
+    tmpFiles.put(id, buf);
+    List<String> nodes = coreContainer.getFileStore().shuffledNodes();
+    int i = 0;
+    int FETCHFROM_SRC = 50;
+    try {
+      for (String node : nodes) {
+        String baseUrl = coreContainer.getZkController().getZkStateReader().getBaseUrlForNodeName(node);
+        String url = baseUrl.replace("/solr", "/api") + "/node/filestore/package?id=" + id + "&fromNode=";
+        if (i < FETCHFROM_SRC) {
+          // this is to protect very large clusters from overwhelming a single node
+          // the first FETCHFROM_SRC nodes will be asked to fetch from this node.
+          // it's there in  the memory now. So , it must be served fast
+          url += coreContainer.getZkController().getNodeName();
+        } else {
+          if (i == FETCHFROM_SRC) {
+            // This is just an optimization
+            // at this point a bunch of nodes are already downloading from me
+            // I'll wait for them to finish before asking other nodes to download from each other
+            try {
+              Thread.sleep(2 * 1000);
+            } catch (Exception e) {
+            }
+          }
+          // trying to avoid the thundering herd problem when there are a very large no:of nodes
+          // others should try to fetch it from any node where it is available. By now,
+          // almost FETCHFROM_SRC other nodes may have it
+          url += "*";
+        }
+        try {
+          //fire and forget
+          Utils.executeGET(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), url, null);
+        } catch (Exception e) {
+          log.info("Node: " + node +
+              " failed to respond for blob notification", e);
+          //ignore the exception
+          // some nodes may be down or not responding
+        }
+        i++;
+      }
+    } finally {
+      new Thread(() -> {
+        try {
+          // keep the jar in memory for 10 secs , so that
+          //every node can download it from memory without the file system
+          Thread.sleep(10 * 1000);
+        } catch (Exception e) {
+          //don't care
+        } finally {
+          coreContainer.getFileStore().tmpFiles.remove(id);
+        }
+      }).start();
+
+
+    }
+
+  }
+
+
+  private ByteBuffer fetchFileFromNodeAndPersist(String id, String fromNode) {
+    log.info("fetching a blob {} from {} ", id, fromNode);
+    ByteBuffer[] result = new ByteBuffer[1];
+    String url = coreContainer.getZkController().getZkStateReader().getBaseUrlForNodeName(fromNode);
+    if (url == null) throw new SolrException(BAD_REQUEST, "No such node");
+    coreContainer.getUpdateShardHandler().getUpdateExecutor().submit(() -> {
+      String fromUrl = url.replace("/solr", "/api") + "/node/filestore/package/" + id;
+      try {
+        HttpClient httpClient = coreContainer.getUpdateShardHandler().getDefaultHttpClient();
+        result[0] = Utils.executeGET(httpClient, fromUrl, Utils.newBytesConsumer((int) MAX_PKG_SIZE));
+        String actualSha256 = sha256Digest(result[0]);
+        FileObjName fileObjName = new FileObjName(id);
+        if (fileObjName.sha256.equals(actualSha256)) {
+          persistToFile(result[0], id);
+        } else {
+          result[0] = null;
+          log.error("expected sha256 : {} actual sha256: {} from blob downloaded from {} ", fileObjName.sha256, actualSha256, fromNode);
+        }
+      } catch (IOException e) {
+        log.error("Unable to fetch jar: {} from node: {}", id, fromNode);
+      }
+    });
+    return result[0];
+  }
+
+
+  @EndPoint(spec = "node.filestore.GET",
+      permission = PermissionNameProvider.Name.FILESTORE_READ)
+  public class FileStoreRead {
+
+    @Command
+    public void get(CallInfo info) {
+      SolrQueryRequest req = info.req;
+      SolrQueryResponse rsp = info.rsp;
+      String id = ((V2HttpCall) req.getHttpSolrCall()).getUrlParts().get(CommonParams.ID);
+      if (id == null) {
+        rsp.add("files",Collections.singletonMap(PACKAGE,  fileList(req.getParams())));
+      } else {
+        if (!fetchFile(id)) {
+          throw new SolrException(SolrException.ErrorCode.NOT_FOUND, "No such blob");
+        }
+
+        ModifiableSolrParams solrParams = new ModifiableSolrParams();
+        solrParams.add(CommonParams.WT, FILE_STREAM);
+        req.setParams(SolrParams.wrapDefaults(solrParams, req.getParams()));
+        rsp.add(FILE_STREAM, (SolrCore.RawWriter) os -> {
+          ByteBuffer b = tmpFiles.get(id);
+          if (b != null) {
+            os.write(b.array(), b.arrayOffset(), b.limit());
+          } else {
+            File file = new File(getFileStorePath().toFile(), id);
+            try (FileInputStream is = new FileInputStream(file)) {
+              org.apache.commons.io.IOUtils.copy(is, os);
+            }
+          }
+        });
+      }
+    }
+
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
index cf6bb4d..c5d97c9 100644
--- a/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/MemClassLoader.java
@@ -26,6 +26,7 @@
 import java.security.ProtectionDomain;
 import java.security.cert.Certificate;
 import java.util.ArrayList;
+import java.util.Collection;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
@@ -43,21 +44,29 @@
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
   private boolean allJarsLoaded = false;
   private final SolrResourceLoader parentLoader;
-  private List<PluginBag.RuntimeLib> libs = new ArrayList<>();
+  private List<RuntimeLib> libs = new ArrayList<>();
   private Map<String, Class> classCache = new HashMap<>();
   private List<String> errors = new ArrayList<>();
 
 
-  public MemClassLoader(List<PluginBag.RuntimeLib> libs, SolrResourceLoader resourceLoader) {
+  public MemClassLoader(List<RuntimeLib> libs, SolrResourceLoader resourceLoader) {
     this.parentLoader = resourceLoader;
     this.libs = libs;
   }
 
+  public int getZnodeVersion(){
+    int result = -1;
+    for (RuntimeLib lib : libs) {
+      if(lib.znodeVersion > result) result = lib.znodeVersion;
+    }
+    return result;
+  }
+
   synchronized void loadRemoteJars() {
     if (allJarsLoaded) return;
     int count = 0;
-    for (PluginBag.RuntimeLib lib : libs) {
-      if (lib.getUrl() != null) {
+    for (RuntimeLib lib : libs) {
+      if (lib.getSha256() != null) {
         try {
           lib.loadJar();
           lib.verify();
@@ -70,10 +79,13 @@
     if (count == libs.size()) allJarsLoaded = true;
   }
 
+  public Collection<String> getErrors(){
+    return errors;
+  }
   public synchronized void loadJars() {
     if (allJarsLoaded) return;
 
-    for (PluginBag.RuntimeLib lib : libs) {
+    for (RuntimeLib lib : libs) {
       try {
         lib.loadJar();
         lib.verify();
@@ -133,7 +145,7 @@
 
     String path = name.replace('.', '/').concat(".class");
     ByteBuffer buf = null;
-    for (PluginBag.RuntimeLib lib : libs) {
+    for (RuntimeLib lib : libs) {
       try {
         buf = lib.getFileContent(path);
         if (buf != null) {
@@ -150,7 +162,7 @@
 
   @Override
   public void close() throws Exception {
-    for (PluginBag.RuntimeLib lib : libs) {
+    for (RuntimeLib lib : libs) {
       try {
         lib.close();
       } catch (Exception e) {
@@ -176,6 +188,7 @@
     try {
       return findClass(cname).asSubclass(expectedType);
     } catch (Exception e) {
+      log.error("Error loading class from runtime libs ", e);
       if (e instanceof SolrException) {
         throw (SolrException) e;
       } else {
diff --git a/solr/core/src/java/org/apache/solr/core/PackageBag.java b/solr/core/src/java/org/apache/solr/core/PackageBag.java
new file mode 100644
index 0000000..041ec62
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/PackageBag.java
@@ -0,0 +1,331 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+import java.io.File;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.security.PublicKey;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+import com.google.common.collect.ImmutableList;
+import org.apache.lucene.analysis.util.ResourceLoader;
+import org.apache.solr.cloud.CloudUtil;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterPropertiesListener;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.Base64;
+import org.apache.solr.util.CryptoKeys;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.params.CommonParams.NAME;
+import static org.apache.solr.common.params.CommonParams.PACKAGES;
+import static org.apache.solr.common.params.CommonParams.VERSION;
+
+/**
+ * This class listens to changes to packages and it also keeps a
+ * registry of the resource loader instances and the metadata of the package
+ * The resource loader is supposed to be in sync with the data in Zookeeper
+ * and it will always have one and only one instance of the resource loader
+ * per package in a given node. These resource loaders are shared across
+ * all components in a Solr node
+ * <p>
+ * when packages are created/updated new resource loaders are created and if there are
+ * listeners, they are notified. They can in turn choose to discard old instances of plugins
+ * loaded from old resource loaders and create new instances if required.
+ * <p>
+ * All the resource loaders are loaded from files that exist in the {@link DistribFileStore}
+ */
+public class PackageBag implements ClusterPropertiesListener {
+  public static final boolean enablePackage = Boolean.parseBoolean(System.getProperty("enable.package", "false"));
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  final CoreContainer coreContainer;
+
+  private Map<String, PackageResourceLoader> pkgs = new HashMap<>();
+
+  final PackageListeners listenerRegistry = new PackageListeners();
+
+  int myVersion = -1;
+
+
+  public int getZNodeVersion(String pkg) {
+    PackageResourceLoader p = pkgs.get(pkg);
+    return p == null ? -1 : p.packageInfo.znodeVersion;
+  }
+
+
+  public PackageInfo getPackageInfo(String pkg) {
+    PackageResourceLoader p = pkgs.get(pkg);
+    return p == null ? null : p.packageInfo;
+  }
+
+
+  public static class PackageInfo implements MapWriter {
+    public final String name;
+    public final String version;
+    public final List<FileObj> fileObjs;
+    public final int znodeVersion;
+    public final String manifest;
+
+    public PackageInfo(Map m, int znodeVersion) {
+      name = (String) m.get(NAME);
+      version = (String) m.get(VERSION);
+      manifest = (String) m.get("manifest");
+      this.znodeVersion = znodeVersion;
+      Object o = m.get("file");
+      if (o instanceof Map) {
+        Map map = (Map) o;
+        this.fileObjs = ImmutableList.of(new FileObj(map));
+      } else if (o instanceof List) {
+        List list = (List) o;
+        ImmutableList.Builder<FileObj> builder = new ImmutableList.Builder();
+        for (Object o1 : list) {
+          if(o1 instanceof Map) {
+            builder.add(new FileObj(o1));
+          } else {
+            throw new RuntimeException("Invalid type for attribute 'files'");
+          }
+        }
+        fileObjs = builder.build();
+      } else throw new RuntimeException("Invalid type for attribute 'file'");
+    }
+
+    public List<String> validate(CoreContainer coreContainer) throws Exception {
+      List<String> errors = new ArrayList<>();
+      if (!enablePackage) {
+        errors.add("node not started with -Denable.package=true");
+        return errors;
+      }
+      Map<String, byte[]> keys = CloudUtil.getTrustedKeys(
+          coreContainer.getZkController().getZkClient(), "exe");
+      if (keys.isEmpty()) {
+        errors.add("No public keys in ZK : /keys/exe");
+        return errors;
+      }
+      CryptoKeys cryptoKeys = new CryptoKeys(keys);
+      for (FileObj fileObj : fileObjs) {
+        if (!fileObj.verifyJar(cryptoKeys, coreContainer)) {
+          errors.add("Invalid signature for file : " + fileObj.fileObjName);
+        }
+      }
+      return errors;
+    }
+
+    @Override
+    public void writeMap(EntryWriter ew) throws IOException {
+      ew.put("name", name);
+      ew.put("version", version);
+      ew.putIfNotNull("manifest", manifest);
+      if (fileObjs.size() == 1) {
+        ew.put("file", fileObjs.get(0));
+      } else {
+        ew.put("files", fileObjs);
+      }
+    }
+
+
+    @Override
+    public boolean equals(Object obj) {
+      if (obj instanceof PackageInfo) {
+        PackageInfo that = (PackageInfo) obj;
+        if (!Objects.equals(this.version, that.version)) return false;
+        if (this.fileObjs.size() == that.fileObjs.size()) {
+          for (int i = 0; i < fileObjs.size(); i++) {
+            if (!Objects.equals(fileObjs.get(i), that.fileObjs.get(i))) {
+              return false;
+            }
+          }
+        } else {
+          return false;
+        }
+      } else {
+        return false;
+      }
+      return true;
+    }
+
+    PackageResourceLoader createPackage(PackageBag packageBag) {
+      return new PackageResourceLoader(packageBag, this);
+    }
+
+    public static class FileObj implements MapWriter {
+      public final DistribFileStore.FileObjName fileObjName;
+      public final String sig;
+
+
+      public FileObj(Object o) {
+        if (o instanceof Map) {
+          Map m = (Map) o;
+          this.fileObjName = new DistribFileStore.FileObjName((String) m.get(CommonParams.ID));
+          this.sig = (String) m.get("sig");
+        } else {
+          throw new RuntimeException("'file' should be a Object Type");
+        }
+      }
+
+      @Override
+      public void writeMap(EntryWriter ew) throws IOException {
+        ew.put(CommonParams.ID, fileObjName.name());
+        ew.put("sig", sig);
+      }
+
+      @Override
+      public boolean equals(Object obj) {
+        if (obj instanceof FileObj) {
+          FileObj that = (FileObj) obj;
+          return Objects.equals(this.fileObjName, that.fileObjName) && Objects.equals(this.sig, that.sig);
+        } else {
+          return false;
+        }
+      }
+
+      public boolean verifyJar(CryptoKeys cryptoKeys, CoreContainer coreContainer) throws IOException {
+        boolean[] result = new boolean[]{false};
+        for (Map.Entry<String, PublicKey> e : cryptoKeys.keys.entrySet()) {
+          coreContainer.getFileStore().readFile(fileObjName.name(), is -> {
+            try {
+              if (CryptoKeys.verify(e.getValue(), Base64.base64ToByteArray(sig), is)) result[0] = true;
+            } catch (Exception ex) {
+              log.error("Unexpected error in verifying jar", ex);
+            }
+          });
+        }
+        return result[0];
+
+      }
+    }
+
+  }
+
+  public static class PackageResourceLoader extends SolrResourceLoader implements MapWriter {
+    final PackageInfo packageInfo;
+
+    @Override
+    public void writeMap(EntryWriter ew) throws IOException {
+      packageInfo.writeMap(ew);
+    }
+
+    PackageResourceLoader(PackageBag packageBag, PackageInfo packageInfo) {
+      super(packageBag.coreContainer.getResourceLoader().getInstancePath(),
+          packageBag.coreContainer.getResourceLoader().classLoader);
+      this.packageInfo = packageInfo;
+      List<URL> fileURLs = new ArrayList<>(packageInfo.fileObjs.size());
+      for (PackageInfo.FileObj fileObj : packageInfo.fileObjs) {
+        try {
+          if (!packageBag.coreContainer.getFileStore().fetchFile(fileObj.fileObjName.name())) {
+            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+                "File not available " + fileObj.fileObjName.name());
+          }
+          fileURLs.add(new File(packageBag.coreContainer.getFileStore().getFileStorePath().toFile(), fileObj.fileObjName.name()).toURI().toURL());
+        } catch (MalformedURLException e) {
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+        }
+      }
+      addToClassLoader(fileURLs);
+    }
+  }
+
+  PackageBag(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+  }
+
+
+  public <T> T newInstance(String cName, Class<T> expectedType, String pkg) {
+    PackageResourceLoader p = pkgs.get(pkg);
+    if (p == null) {
+      return coreContainer.getResourceLoader().newInstance(cName, expectedType);
+    } else {
+      return p.newInstance(cName, expectedType);
+    }
+  }
+
+  @Override
+  public boolean onChange(Map<String, Object> properties) {
+    log.debug("clusterprops.json changed , version {}", coreContainer.getZkController().getZkStateReader().getClusterPropsVersion());
+    int v = coreContainer.getZkController().getZkStateReader().getClusterPropsVersion();
+    List<PackageInfo> touchedPackages = updatePackages(properties, v);
+
+    if (!touchedPackages.isEmpty()) {
+      Collection<SolrCore> cores = coreContainer.getCores();
+
+      log.info(" {} cores being notified of updated packages  : {}",cores.size() ,touchedPackages.stream().map(p -> p.name).collect(Collectors.toList()) );
+      for (SolrCore core : cores) {
+        core.getListenerRegistry().packagesUpdated(touchedPackages);
+      }
+      listenerRegistry.packagesUpdated(touchedPackages);
+
+    }
+    coreContainer.getContainerRequestHandlers().updateReqHandlers(properties);
+    myVersion = v;
+    return false;
+  }
+
+
+  private List<PackageInfo> updatePackages(Map<String, Object> properties, int ver) {
+    Map m = (Map) properties.getOrDefault(PACKAGES, Collections.emptyMap());
+    if (pkgs.isEmpty() && m.isEmpty()) return Collections.emptyList();
+    Map<String, PackageInfo> reloadPackages = new HashMap<>();
+    m.forEach((k, v) -> {
+      if (v instanceof Map) {
+        PackageInfo info = new PackageInfo((Map) v, ver);
+        PackageResourceLoader pkg = pkgs.get(k);
+        if (pkg == null || !pkg.packageInfo.equals(info)) {
+          reloadPackages.put(info.name, info);
+        }
+      }
+    });
+    pkgs.forEach((name, aPackage) -> {
+      if (!m.containsKey(name)) reloadPackages.put(name, null);
+    });
+
+    if (!reloadPackages.isEmpty()) {
+      List<PackageInfo> touchedPackages = new ArrayList<>();
+      Map<String, PackageResourceLoader> newPkgs = new HashMap<>(pkgs);
+      reloadPackages.forEach((s, pkgInfo) -> {
+        if (pkgInfo == null) {
+          newPkgs.remove(s);
+        } else {
+          newPkgs.put(s, pkgInfo.createPackage(PackageBag.this));
+          touchedPackages.add(pkgInfo);
+        }
+      });
+      this.pkgs = newPkgs;
+      return touchedPackages;
+
+    }
+    return Collections.emptyList();
+  }
+
+  public ResourceLoader getResourceLoader(String pkg) {
+    PackageResourceLoader loader = pkgs.get(pkg);
+    return loader == null ? coreContainer.getResourceLoader() : loader;
+  }
+
+}
diff --git a/solr/core/src/java/org/apache/solr/core/PackageListeners.java b/solr/core/src/java/org/apache/solr/core/PackageListeners.java
new file mode 100644
index 0000000..dd0ec39
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/PackageListeners.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+import java.lang.ref.WeakReference;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+/**This is a registry for Package lifecycle listeners. When a package is uupdated
+ * the listeners get notified and they can take appropriate action. Most likely,
+ * the listeners would want to reload the plugins loaded from a given package.
+ *
+ */
+public class PackageListeners {
+  // this registry only keeps a weak reference because it does not want to
+  // cause a memory leak if the listener forgets to unregister itself
+  private List<WeakReference<Listener>> listeners = new ArrayList<>();
+
+  public synchronized void addListener(Listener listener) {
+    listeners.add(new WeakReference<>(listener));
+
+  }
+
+  public synchronized void removeListener(Listener listener) {
+    Iterator<WeakReference<Listener>> it = listeners.iterator();
+    while (it.hasNext()) {
+      WeakReference<Listener> ref = it.next();
+      Listener pkgListener = ref.get();
+      if(pkgListener == null || pkgListener == listener){
+        it.remove();
+      }
+
+    }
+
+  }
+
+  public synchronized void packagesUpdated(List<PackageBag.PackageInfo> pkgs){
+    for (PackageBag.PackageInfo pkgInfo : pkgs) {
+      invokeListeners(pkgInfo);
+    }
+  }
+
+  private synchronized void invokeListeners(PackageBag.PackageInfo pkgInfo) {
+    for (WeakReference<Listener> ref : listeners) {
+      Listener listener = ref.get();
+      if (listener != null && listener.packageName().equals(pkgInfo.name)) {
+        listener.changed(pkgInfo);
+      }
+    }
+  }
+
+  public List<Listener> getListeners(){
+    List<Listener> result = new ArrayList<>();
+    for (WeakReference<Listener> ref : listeners) {
+      Listener l = ref.get();
+      if(l != null){
+        result.add(l);
+      }
+
+    }
+    return result;
+  }
+
+
+  public interface Listener {
+
+    String packageName();
+
+    PluginInfo pluginInfo();
+
+    void changed(PackageBag.PackageInfo lib);
+
+    PackageBag.PackageInfo packageInfo();
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/core/PluginBag.java b/solr/core/src/java/org/apache/solr/core/PluginBag.java
index 6088f52..5189de3 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginBag.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginBag.java
@@ -16,11 +16,8 @@
  */
 package org.apache.solr.core;
 
-import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
 import java.util.HashMap;
@@ -30,24 +27,20 @@
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.stream.Collectors;
-import java.util.zip.ZipEntry;
-import java.util.zip.ZipInputStream;
 
 import org.apache.lucene.analysis.util.ResourceLoader;
 import org.apache.lucene.analysis.util.ResourceLoaderAware;
 import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
 import org.apache.solr.api.ApiSupport;
-import org.apache.solr.cloud.CloudUtil;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.handler.RequestHandlerBase;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.update.processor.UpdateRequestProcessorChain;
 import org.apache.solr.update.processor.UpdateRequestProcessorFactory;
-import org.apache.solr.util.CryptoKeys;
-import org.apache.solr.util.SimplePostTool;
 import org.apache.solr.util.plugin.NamedListInitializedPlugin;
 import org.apache.solr.util.plugin.PluginInfoInitialized;
 import org.apache.solr.util.plugin.SolrCoreAware;
@@ -56,7 +49,6 @@
 
 import static java.util.Collections.singletonMap;
 import static org.apache.solr.api.ApiBag.HANDLER_NAME;
-import static org.apache.solr.common.params.CommonParams.NAME;
 
 /**
  * This manages the lifecycle of a set of plugin of the same type .
@@ -124,24 +116,36 @@
     return result;
   }
 
-  public PluginHolder<T> createPlugin(PluginInfo info) {
-    if ("true".equals(String.valueOf(info.attributes.get("runtimeLib")))) {
-      log.debug(" {} : '{}'  created with runtimeLib=true ", meta.getCleanTag(), info.name);
-      LazyPluginHolder<T> holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ?
-          core.getMemClassLoader() :
-          core.getResourceLoader(), true);
+  private static <T> T createInitInstance(PluginInfo pluginInfo, SolrConfig.SolrPluginInfo pluginMeta,
+                                          SolrCore core, ResourceLoader resourceLoader,
+                                          boolean isRuntimeLib) {
+    T localInst = null;
+    try {
+      localInst = (T) SolrCore.createInstance(pluginInfo.className, pluginMeta.clazz, pluginMeta.getCleanTag(), core, resourceLoader);
+    } catch (SolrException e) {
+      if (isRuntimeLib && !(resourceLoader instanceof MemClassLoader)) {
+        throw new SolrException(SolrException.ErrorCode.getErrorCode(e.code()),
+            e.getMessage() + ". runtime library loading is not enabled, start Solr with -Denable.runtime.lib=true",
+            e.getCause());
+      }
+      throw e;
 
-      return meta.clazz == UpdateRequestProcessorFactory.class ?
-          (PluginHolder<T>) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder(holder) :
-          holder;
-    } else if ("lazy".equals(info.attributes.get("startup")) && meta.options.contains(SolrConfig.PluginOpts.LAZY)) {
-      log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name);
-      return new LazyPluginHolder<T>(meta, info, core, core.getResourceLoader(), false);
-    } else {
-      T inst = core.createInstance(info.className, (Class<T>) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader());
-      initInstance(inst, info);
-      return new PluginHolder<>(info, inst);
+
     }
+    initInstance(localInst, pluginInfo);
+    if (localInst instanceof SolrCoreAware) {
+      SolrResourceLoader.assertAwareCompatibility(SolrCoreAware.class, localInst);
+      ((SolrCoreAware) localInst).inform(core);
+    }
+    if (localInst instanceof ResourceLoaderAware) {
+      SolrResourceLoader.assertAwareCompatibility(ResourceLoaderAware.class, localInst);
+      try {
+        ((ResourceLoaderAware) localInst).inform(core.getResourceLoader());
+      } catch (IOException e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error initializing component", e);
+      }
+    }
+    return localInst;
   }
 
   /** make a plugin available in an alternate name. This is an internal API and not for public use
@@ -194,8 +198,11 @@
     PluginHolder<T> old = put(name, pluginHolder);
     return old == null ? null : old.get();
   }
+  public void put(Api api){
+    apiBag.register(api,Collections.emptyMap());
+  }
 
-  PluginHolder<T> put(String name, PluginHolder<T> plugin) {
+  public PluginHolder<T> put(String name, PluginHolder<T> plugin) {
     Boolean registerApi = null;
     Boolean disableHandler = null;
     if (plugin.pluginInfo != null) {
@@ -324,13 +331,60 @@
     }
   }
 
+  public static void closeQuietly(Object inst)  {
+    try {
+      if (inst != null && inst instanceof AutoCloseable) ((AutoCloseable) inst).close();
+    } catch (Exception e) {
+      log.error("Error closing "+ inst , e);
+    }
+  }
+
+  public PluginHolder<T> createPlugin(PluginInfo info) {
+    String pkg = info.attributes.get(CommonParams.PACKAGE);
+    if (pkg != null) {
+      log.debug(" {} : '{}'  created with package={} ", meta.getCleanTag(), info.name, pkg);
+      PluginHolder<T> holder = new PackagePluginHolder<T>(info, core, meta);
+      return meta.clazz == UpdateRequestProcessorFactory.class ?
+          (PluginHolder<T>) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder((PluginHolder<UpdateRequestProcessorFactory>) holder) :
+          holder;
+
+    } else if (info.isRuntimePlugin()) {
+      log.debug(" {} : '{}'  created with packageInfo=true ", meta.getCleanTag(), info.name);
+      LazyPluginHolder<T> holder = new LazyPluginHolder<>(meta, info, core, RuntimeLib.isEnabled() ?
+          core.getMemClassLoader() :
+          core.getResourceLoader(), true);
+
+      return meta.clazz == UpdateRequestProcessorFactory.class ?
+          (PluginHolder<T>) new UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder((PluginHolder<UpdateRequestProcessorFactory>) holder) :
+          holder;
+    } else if ("lazy".equals(info.attributes.get("startup")) && meta.options.contains(SolrConfig.PluginOpts.LAZY)) {
+      log.debug("{} : '{}' created with startup=lazy ", meta.getCleanTag(), info.name);
+      return new LazyPluginHolder<T>(meta, info, core, core.getResourceLoader(), false);
+    } else {
+      T inst = SolrCore.createInstance(info.className, (Class<T>) meta.clazz, meta.getCleanTag(), null, core.getResourceLoader());
+      initInstance(inst, info);
+      return new PluginHolder<>(info, inst);
+    }
+  }
+
+  public Api v2lookup(String path, String method, Map<String, String> parts) {
+    if (apiBag == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "this should not happen, looking up for v2 API at the wrong place");
+    }
+    return apiBag.lookup(path, method, parts);
+  }
+
+  public ApiBag getApiBag() {
+    return apiBag;
+  }
+
   /**
    * An indirect reference to a plugin. It just wraps a plugin instance.
    * subclasses may choose to lazily load the plugin
    */
   public static class PluginHolder<T> implements AutoCloseable {
-    private T inst;
     protected final PluginInfo pluginInfo;
+    T inst;
     boolean registerAPI = false;
 
     public PluginHolder(PluginInfo info) {
@@ -358,7 +412,7 @@
       // can close() be called concurrently with other methods?
       if (isLoaded()) {
         T myInst = get();
-        if (myInst != null && myInst instanceof AutoCloseable) ((AutoCloseable) myInst).close();
+        closeQuietly(myInst);
       }
     }
 
@@ -426,209 +480,62 @@
         MemClassLoader loader = (MemClassLoader) resourceLoader;
         loader.loadJars();
       }
-      Class<T> clazz = (Class<T>) pluginMeta.clazz;
-      T localInst = null;
-      try {
-        localInst = core.createInstance(pluginInfo.className, clazz, pluginMeta.getCleanTag(), null, resourceLoader);
-      } catch (SolrException e) {
-        if (isRuntimeLib && !(resourceLoader instanceof MemClassLoader)) {
-          throw new SolrException(SolrException.ErrorCode.getErrorCode(e.code()),
-              e.getMessage() + ". runtime library loading is not enabled, start Solr with -Denable.runtime.lib=true",
-              e.getCause());
-        }
-        throw e;
-
-
-      }
-      initInstance(localInst, pluginInfo);
-      if (localInst instanceof SolrCoreAware) {
-        SolrResourceLoader.assertAwareCompatibility(SolrCoreAware.class, localInst);
-        ((SolrCoreAware) localInst).inform(core);
-      }
-      if (localInst instanceof ResourceLoaderAware) {
-        SolrResourceLoader.assertAwareCompatibility(ResourceLoaderAware.class, localInst);
-        try {
-          ((ResourceLoaderAware) localInst).inform(core.getResourceLoader());
-        } catch (IOException e) {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error initializing component", e);
-        }
-      }
-      lazyInst = localInst;  // only assign the volatile until after the plugin is completely ready to use
+      lazyInst = createInitInstance(pluginInfo,pluginMeta,core,resourceLoader, isRuntimeLib);
       return true;
     }
 
 
   }
 
-  /**
-   * This represents a Runtime Jar. A jar requires two details , name and version
-   */
-  public static class RuntimeLib implements PluginInfoInitialized, AutoCloseable {
-    private String name, version, sig, sha512, url;
-    private BlobRepository.BlobContentRef<ByteBuffer> jarContent;
-    private final CoreContainer coreContainer;
-    private boolean verified = false;
+  public class PackagePluginHolder<T> extends PluginHolder<T> {
+    private final SolrCore core;
+    private final SolrConfig.SolrPluginInfo pluginMeta;
+    private final PackageBag packageBag;
+    private final String pkg;
+    private PackageBag.PackageInfo packageInfo;
 
-    @Override
-    public void init(PluginInfo info) {
-      name = info.attributes.get(NAME);
-      url = info.attributes.get("url");
-      sig = info.attributes.get("sig");
-      if(url == null) {
-        Object v = info.attributes.get("version");
-        if (name == null || v == null) {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version");
+    public PackagePluginHolder(PluginInfo info, SolrCore core, SolrConfig.SolrPluginInfo pluginMeta) {
+      super(info);
+      this.core = core;
+      this.pluginMeta = pluginMeta;
+      this.pkg = info.attributes.get(CommonParams.PACKAGE);
+      this.core.getListenerRegistry().addListener(new PackageListeners.Listener() {
+        @Override
+        public String packageName() {
+          return pkg;
         }
-        version = String.valueOf(v);
-      } else {
-        sha512 = info.attributes.get("sha512");
-        if(sha512 == null){
-          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "runtimeLib with url must have a 'sha512' attribute");
+
+        @Override
+        public PluginInfo pluginInfo() {
+          return info;
         }
-        ByteBuffer buf = null;
-        buf = coreContainer.getBlobRepository().fetchFromUrl(name, url);
 
-        String digest = BlobRepository.sha512Digest(buf);
-        if(!sha512.equals(digest))  {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG, url, sha512, digest)  );
+        @Override
+        public PackageBag.PackageInfo packageInfo() {
+          return packageInfo;
         }
-        log.info("dynamic library verified {}, sha512: {}", url, sha512);
 
-      }
-
-    }
-
-    public RuntimeLib(SolrCore core) {
-      coreContainer = core.getCoreContainer();
-    }
-
-    public String getUrl(){
-      return url;
-    }
-
-    void loadJar() {
-      if (jarContent != null) return;
-      synchronized (this) {
-        if (jarContent != null) return;
-
-        jarContent = url == null?
-            coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version):
-            coreContainer.getBlobRepository().getBlobIncRef(name, null,url,sha512);
-
-      }
-    }
-
-    public static boolean isEnabled() {
-      return Boolean.getBoolean("enable.runtime.lib");
-    }
-
-    public String getName() {
-      return name;
-    }
-
-    public String getVersion() {
-      return version;
-    }
-
-    public String getSig() {
-      return sig;
-
-    }
-
-    public ByteBuffer getFileContent(String entryName) throws IOException {
-      if (jarContent == null)
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name  );
-      return getFileContent(jarContent.blob, entryName);
-
-    }
-
-    public ByteBuffer getFileContent(BlobRepository.BlobContent<ByteBuffer> blobContent,  String entryName) throws IOException {
-      ByteBuffer buff = blobContent.get();
-      ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit());
-      ZipInputStream zis = new ZipInputStream(zipContents);
-      try {
-        ZipEntry entry;
-        while ((entry = zis.getNextEntry()) != null) {
-          if (entryName == null || entryName.equals(entry.getName())) {
-            SimplePostTool.BAOS out = new SimplePostTool.BAOS();
-            byte[] buffer = new byte[2048];
-            int size;
-            while ((size = zis.read(buffer, 0, buffer.length)) != -1) {
-              out.write(buffer, 0, size);
-            }
-            out.close();
-            return out.getByteBuffer();
-          }
+        @Override
+        public void changed(PackageBag.PackageInfo lib) {
+          int myVersion = packageInfo == null? -1 : packageInfo.znodeVersion;
+          if(lib.znodeVersion > myVersion) reload();
         }
-      } finally {
-        zis.closeEntry();
-      }
-      return null;
+      });
+      this.packageBag = core.getCoreContainer().getPackageBag();
+      reload();
     }
 
 
-    @Override
-    public void close() throws Exception {
-      if (jarContent != null) coreContainer.getBlobRepository().decrementBlobRefCount(jarContent);
+    private void reload() {
+      if(inst == null) log.info("reloading plugin {} ", pluginInfo.name);
+      inst = createInitInstance(pluginInfo, pluginMeta,
+          core, packageBag.getResourceLoader(this.pkg), true);
+      this.packageInfo = packageBag.getPackageInfo(this.pkg);
+
     }
 
-    public static List<RuntimeLib> getLibObjects(SolrCore core, List<PluginInfo> libs) {
-      List<RuntimeLib> l = new ArrayList<>(libs.size());
-      for (PluginInfo lib : libs) {
-        RuntimeLib rtl = new RuntimeLib(core);
-        try {
-          rtl.init(lib);
-        } catch (Exception e) {
-          log.error("error loading runtime library", e);
-        }
-        l.add(rtl);
-      }
-      return l;
-    }
 
-    public void verify() throws Exception {
-      if (verified) return;
-      if (jarContent == null) {
-        log.error("Calling verify before loading the jar");
-        return;
-      }
-
-      if (!coreContainer.isZooKeeperAware())
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud");
-      Map<String, byte[]> keys = CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe");
-      if (keys.isEmpty()) {
-        if (sig == null) {
-          verified = true;
-          return;
-        } else {
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib  " + name);
-        }
-      } else if (sig == null) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name));
-      }
-
-      try {
-        String matchedKey = new CryptoKeys(keys).verify(sig, jarContent.blob.get());
-        if (matchedKey == null)
-          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version);
-        log.info("Jar {} signed with {} successfully verified", name, matchedKey);
-      } catch (Exception e) {
-        if (e instanceof SolrException) throw e;
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e);
-      }
-    }
   }
 
 
-  public Api v2lookup(String path, String method, Map<String, String> parts) {
-    if (apiBag == null) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "this should not happen, looking up for v2 API at the wrong place");
-    }
-    return apiBag.lookup(path, method, parts);
-  }
-
-  public ApiBag getApiBag() {
-    return apiBag;
-  }
-
 }
diff --git a/solr/core/src/java/org/apache/solr/core/PluginInfo.java b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
index 1bc85ae..8cbecc9 100644
--- a/solr/core/src/java/org/apache/solr/core/PluginInfo.java
+++ b/solr/core/src/java/org/apache/solr/core/PluginInfo.java
@@ -16,14 +16,22 @@
  */
 package org.apache.solr.core;
 
+import java.lang.invoke.MethodHandles;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+
 import org.apache.solr.common.MapSerializable;
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.util.DOMUtil;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
-import java.util.*;
-
 import static java.util.Arrays.asList;
 import static java.util.Collections.unmodifiableList;
 import static java.util.Collections.unmodifiableMap;
@@ -31,23 +39,26 @@
 import static org.apache.solr.schema.FieldType.CLASS_NAME;
 
 /**
- * An Object which represents a Plugin of any type 
- *
+ * An Object which represents a Plugin of any type
  */
 public class PluginInfo implements MapSerializable {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
   public final String name, className, type;
   public final NamedList initArgs;
   public final Map<String, String> attributes;
   public final List<PluginInfo> children;
   private boolean isFromSolrConfig;
 
+  public List<String> pathInConfig;
+
   public PluginInfo(String type, Map<String, String> attrs, NamedList initArgs, List<PluginInfo> children) {
     this.type = type;
     this.name = attrs.get(NAME);
     this.className = attrs.get(CLASS_NAME);
     this.initArgs = initArgs;
     attributes = unmodifiableMap(attrs);
-    this.children = children == null ? Collections.<PluginInfo>emptyList(): unmodifiableList(children);
+    this.children = children == null ? Collections.emptyList() : unmodifiableList(children);
     isFromSolrConfig = false;
   }
 
@@ -62,7 +73,7 @@
     isFromSolrConfig = true;
   }
 
-  public PluginInfo(String type, Map<String,Object> map) {
+  public PluginInfo(String type, Map<String, Object> map) {
     LinkedHashMap m = new LinkedHashMap<>(map);
     initArgs = new NamedList();
     for (Map.Entry<String, Object> entry : map.entrySet()) {
@@ -87,7 +98,7 @@
     this.name = (String) m.get(NAME);
     this.className = (String) m.get(CLASS_NAME);
     attributes = unmodifiableMap(m);
-    this.children =  Collections.<PluginInfo>emptyList();
+    this.children = Collections.emptyList();
     isFromSolrConfig = true;
   }
 
@@ -102,7 +113,7 @@
       PluginInfo pluginInfo = new PluginInfo(nd, null, false, false);
       if (pluginInfo.isEnabled()) children.add(pluginInfo);
     }
-    return children.isEmpty() ? Collections.<PluginInfo>emptyList() : unmodifiableList(children);
+    return children.isEmpty() ? Collections.emptyList() : unmodifiableList(children);
   }
 
   @Override
@@ -117,37 +128,37 @@
     return sb.toString();
   }
 
-  public boolean isEnabled(){
+  public boolean isEnabled() {
     String enable = attributes.get("enable");
-    return enable == null || Boolean.parseBoolean(enable); 
+    return enable == null || Boolean.parseBoolean(enable);
   }
 
   public boolean isDefault() {
     return Boolean.parseBoolean(attributes.get("default"));
   }
 
-  public PluginInfo getChild(String type){
+  public PluginInfo getChild(String type) {
     List<PluginInfo> l = getChildren(type);
-    return  l.isEmpty() ? null:l.get(0);
+    return l.isEmpty() ? null : l.get(0);
   }
 
   public Map<String, Object> toMap(Map<String, Object> map) {
     map.putAll(attributes);
     Map m = map;
-    if(initArgs!=null ) m.putAll(initArgs.asMap(3));
-    if(children != null){
+    if (initArgs != null) m.putAll(initArgs.asMap(3));
+    if (children != null) {
       for (PluginInfo child : children) {
         Object old = m.get(child.name);
-        if(old == null){
+        if (old == null) {
           m.put(child.name, child.toMap(new LinkedHashMap<>()));
         } else if (old instanceof List) {
           List list = (List) old;
           list.add(child.toMap(new LinkedHashMap<>()));
-        }  else {
+        } else {
           ArrayList l = new ArrayList();
           l.add(old);
           l.add(child.toMap(new LinkedHashMap<>()));
-          m.put(child.name,l);
+          m.put(child.name, l);
         }
       }
 
@@ -155,36 +166,46 @@
     return m;
   }
 
-  /**Filter children by type
+  /**
+   * Filter children by type
+   *
    * @param type The type name. must not be null
    * @return The mathcing children
    */
-  public List<PluginInfo> getChildren(String type){
-    if(children.isEmpty()) return children;
+  public List<PluginInfo> getChildren(String type) {
+    if (children.isEmpty()) return children;
     List<PluginInfo> result = new ArrayList<>();
-    for (PluginInfo child : children) if(type.equals(child.type)) result.add(child);
+    for (PluginInfo child : children) if (type.equals(child.type)) result.add(child);
     return result;
   }
-  public static final PluginInfo EMPTY_INFO = new PluginInfo("",Collections.<String,String>emptyMap(), new NamedList(),Collections.<PluginInfo>emptyList());
+
+  public static final PluginInfo EMPTY_INFO = new PluginInfo("", Collections.emptyMap(), new NamedList(), Collections.emptyList());
 
   private static final HashSet<String> NL_TAGS = new HashSet<>
-    (asList("lst", "arr",
-        "bool",
-        "str",
-        "int", "long",
-        "float", "double"));
+      (asList("lst", "arr",
+          "bool",
+          "str",
+          "int", "long",
+          "float", "double"));
   public static final String DEFAULTS = "defaults";
   public static final String APPENDS = "appends";
   public static final String INVARIANTS = "invariants";
 
-  public boolean isFromSolrConfig(){
+  public boolean isFromSolrConfig() {
     return isFromSolrConfig;
 
   }
+
   public PluginInfo copy() {
     PluginInfo result = new PluginInfo(type, attributes,
         initArgs != null ? initArgs.clone() : null, children);
     result.isFromSolrConfig = isFromSolrConfig;
+    result.pathInConfig = pathInConfig;
     return result;
   }
+
+  public boolean isRuntimePlugin() {
+    return "true".equals(String.valueOf(attributes.get(RuntimeLib.TYPE)));
+  }
+
 }
diff --git a/solr/core/src/java/org/apache/solr/core/RuntimeLib.java b/solr/core/src/java/org/apache/solr/core/RuntimeLib.java
new file mode 100644
index 0000000..4ba2158
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/core/RuntimeLib.java
@@ -0,0 +1,232 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.core;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipInputStream;
+
+import org.apache.solr.cloud.CloudUtil;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.util.CryptoKeys;
+import org.apache.solr.util.SimplePostTool;
+import org.apache.solr.util.plugin.PluginInfoInitialized;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static org.apache.solr.common.params.CommonParams.NAME;
+
+/**
+ * This represents a Runtime Jar. A jar requires two details , name and version
+ */
+public class RuntimeLib implements PluginInfoInitialized, AutoCloseable, MapWriter {
+  public static final String TYPE = "runtimeLib";
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+  private final CoreContainer coreContainer;
+  private String name, version, sig, sha256;
+  private BlobRepository.BlobContentRef<ByteBuffer> blobContentRef;
+  ByteBuffer buffer;
+  private boolean verified = false;
+  int znodeVersion = -1;
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    ew.putIfNotNull(NAME, name);
+    ew.putIfNotNull(version, version);
+    ew.putIfNotNull("sig", sig);
+    if (znodeVersion > -1) {
+      ew.put(ConfigOverlay.ZNODEVER, znodeVersion);
+    }
+  }
+  public int getZnodeVersion(){
+    return znodeVersion;
+  }
+
+  public RuntimeLib(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+  }
+
+  public static boolean isEnabled() {
+    return "true".equals(System.getProperty("enable.runtime.lib"));
+  }
+
+  public static List<RuntimeLib> getLibObjects(SolrCore core, List<PluginInfo> libs) {
+    List<RuntimeLib> l = new ArrayList<>(libs.size());
+    for (PluginInfo lib : libs) {
+      RuntimeLib rtl = new RuntimeLib(core.getCoreContainer());
+      try {
+        rtl.init(lib);
+      } catch (Exception e) {
+        log.error("error loading runtime library", e);
+      }
+      l.add(rtl);
+    }
+    return l;
+  }
+
+  @Override
+  public void init(PluginInfo info) {
+    name = info.attributes.get(NAME);
+    sha256 = info.attributes.get(CommonParams.ID);
+    sig = info.attributes.get("sig");
+    Object v = info.attributes.get("version");
+    if (name == null || v == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "runtimeLib must have name and version");
+    }
+    version = String.valueOf(v);
+
+    /*if (sha256 == null) {
+
+    } else {
+      throw new RuntimeException("must have  a version");
+     try {
+        buffer = coreContainer.getBlobRepository().getBlob(sha256);
+      } catch (IOException e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+      }
+      if(buffer == null){
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No such blob : "+ sha256);
+      }
+
+      String digest = BlobRepository.sha256Digest(buffer);
+      if (!sha256.equals(digest)) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString(BlobRepository.INVALID_JAR_MSG,  sha256, digest));
+      }
+      verifyJarSignature(buffer);
+
+      log.debug("dynamic library verified , sha256: {}",  sha256);
+
+    }*/
+
+  }
+
+
+
+  void loadJar() {
+    if (buffer != null) return;
+    synchronized (this) {
+      if (buffer != null) return;
+        //legacy type
+        blobContentRef = coreContainer.getBlobRepository().getBlobIncRef(name + "/" + version);
+        buffer = blobContentRef.blob.get();
+
+    }
+  }
+
+  public String getName() {
+    return name;
+  }
+
+  public String getVersion() {
+    return version;
+  }
+
+  public String getSig() {
+    return sig;
+
+  }
+
+  public String getSha256() {
+    return sha256;
+  }
+
+  public ByteBuffer getFileContent(String entryName) throws IOException {
+    if (buffer == null)
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "jar not available: " + name);
+    return getFileContent(buffer, entryName);
+
+  }
+
+  public ByteBuffer getFileContent(ByteBuffer buff, String entryName) throws IOException {
+    ByteArrayInputStream zipContents = new ByteArrayInputStream(buff.array(), buff.arrayOffset(), buff.limit());
+    ZipInputStream zis = new ZipInputStream(zipContents);
+    try {
+      ZipEntry entry;
+      while ((entry = zis.getNextEntry()) != null) {
+        if (entryName == null || entryName.equals(entry.getName())) {
+          SimplePostTool.BAOS out = new SimplePostTool.BAOS();
+          byte[] buffer = new byte[2048];
+          int size;
+          while ((size = zis.read(buffer, 0, buffer.length)) != -1) {
+            out.write(buffer, 0, size);
+          }
+          out.close();
+          return out.getByteBuffer();
+        }
+      }
+    } finally {
+      zis.closeEntry();
+    }
+    return null;
+  }
+
+  @Override
+  public void close() throws Exception {
+    if (blobContentRef != null) coreContainer.getBlobRepository().decrementBlobRefCount(blobContentRef);
+  }
+
+  public void verify() throws Exception {
+    if (verified) return;
+    if (buffer == null) {
+      log.error("Calling verify before loading the jar");
+      return;
+    }
+
+    if (!coreContainer.isZooKeeperAware())
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Signing jar is possible only in cloud");
+    verifyJarSignature(buffer);
+  }
+
+  void verifyJarSignature(ByteBuffer buf) {
+    Map<String, byte[]> keys = getPublicKeys();
+    if (keys.isEmpty()) {
+      if (sig == null) {
+        verified = true;
+        return;
+      } else {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No public keys are available in ZK to verify signature for runtime lib  " + name);
+      }
+    } else if (sig == null) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, StrUtils.formatString("runtimelib {0} should be signed with one of the keys in ZK /keys/exe ", name));
+    }
+
+    try {
+      String matchedKey = new CryptoKeys(keys).verify(sig, buf);
+      if (matchedKey == null)
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "No key matched signature for jar : " + name + " version: " + version);
+      log.info("Jar {} signed with {} successfully verified", name, matchedKey);
+    } catch (Exception e) {
+      log.error("Signature verifying error ", e);
+      if (e instanceof SolrException) throw (SolrException) e;
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error verifying key ", e);
+    }
+  }
+
+  protected Map<String, byte[]> getPublicKeys() {
+    return CloudUtil.getTrustedKeys(coreContainer.getZkController().getZkClient(), "exe");
+  }
+}
diff --git a/solr/core/src/java/org/apache/solr/core/SolrConfig.java b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
index 0c494a5..2ab7851 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrConfig.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrConfig.java
@@ -29,6 +29,7 @@
 import java.nio.file.Paths;
 import java.text.ParseException;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.Collections;
 import java.util.EnumSet;
 import java.util.HashMap;
@@ -55,6 +56,7 @@
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.IOUtils;
+import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.QueryResponseWriter;
@@ -271,7 +273,8 @@
       args.put("size", "10000");
       args.put("initialSize", "10");
       args.put("showItems", "-1");
-      conf = new CacheConfig(FastLRUCache.class, args, null);
+      args.put("class", FastLRUCache.class.getName());
+      conf = new CacheConfig(args,"query/fieldValueCache");
     }
     fieldValueCacheConfig = conf;
     useColdSearcher = getBool("query/useColdSearcher", false);
@@ -294,11 +297,11 @@
     slowQueryThresholdMillis = getInt("query/slowQueryThresholdMillis", -1);
     for (SolrPluginInfo plugin : plugins) loadPluginInfo(plugin);
 
-    Map<String, CacheConfig> userCacheConfigs = CacheConfig.getMultipleConfigs(this, "query/cache");
+    Map<String, CacheConfig> userCacheConfigs = CacheConfig.getConfigs(this, "query/cache");
     List<PluginInfo> caches = getPluginInfos(SolrCache.class.getName());
     if (!caches.isEmpty()) {
       for (PluginInfo c : caches) {
-        userCacheConfigs.put(c.name, CacheConfig.getConfig(this, "cache", c.attributes, null));
+        userCacheConfigs.put(c.name, new CacheConfig(c.attributes, StrUtils.join(c.pathInConfig, '/')));
       }
     }
     this.userCacheConfigs = Collections.unmodifiableMap(userCacheConfigs);
@@ -372,17 +375,17 @@
       .add(new SolrPluginInfo(TransformerFactory.class, "transformer", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
       .add(new SolrPluginInfo(SearchComponent.class, "searchComponent", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
       .add(new SolrPluginInfo(UpdateRequestProcessorFactory.class, "updateProcessor", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
-      .add(new SolrPluginInfo(SolrCache.class, "cache", REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
-          // TODO: WTF is up with queryConverter???
-          // it apparently *only* works as a singleton? - SOLR-4304
-          // and even then -- only if there is a single SpellCheckComponent
-          // because of queryConverter.setIndexAnalyzer
+      .add(new SolrPluginInfo(SolrCache.class, SolrCache.TYPE, REQUIRE_NAME, REQUIRE_CLASS, MULTI_OK))
+      // TODO: WTF is up with queryConverter???
+      // it apparently *only* works as a singleton? - SOLR-4304
+      // and even then -- only if there is a single SpellCheckComponent
+      // because of queryConverter.setIndexAnalyzer
       .add(new SolrPluginInfo(QueryConverter.class, "queryConverter", REQUIRE_NAME, REQUIRE_CLASS))
-      .add(new SolrPluginInfo(PluginBag.RuntimeLib.class, "runtimeLib", REQUIRE_NAME, MULTI_OK))
-          // this is hackish, since it picks up all SolrEventListeners,
-          // regardless of when/how/why they are used (or even if they are
-          // declared outside of the appropriate context) but there's no nice
-          // way around that in the PluginInfo framework
+      .add(new SolrPluginInfo(RuntimeLib.class, RuntimeLib.TYPE, REQUIRE_NAME, MULTI_OK))
+      // this is hackish, since it picks up all SolrEventListeners,
+      // regardless of when/how/why they are used (or even if they are
+      // declared outside of the appropriate context) but there's no nice
+      // way around that in the PluginInfo framework
       .add(new SolrPluginInfo(InitParams.class, InitParams.TYPE, MULTI_OK, REQUIRE_NAME_IN_OVERLAY))
       .add(new SolrPluginInfo(SolrEventListener.class, "//listener", REQUIRE_CLASS, MULTI_OK, REQUIRE_NAME_IN_OVERLAY))
 
@@ -532,6 +535,9 @@
     NodeList nodes = (NodeList) evaluate(tag, XPathConstants.NODESET);
     for (int i = 0; i < nodes.getLength(); i++) {
       PluginInfo pluginInfo = new PluginInfo(nodes.item(i), "[solrconfig.xml] " + tag, requireName, requireClass);
+      if (requireName) {
+        pluginInfo.pathInConfig = Arrays.asList(tag, pluginInfo.name);
+      }
       if (pluginInfo.isEnabled()) result.add(pluginInfo);
     }
     return result;
@@ -605,7 +611,7 @@
           "cacheControl", cacheControlHeader);
     }
 
-    public static enum LastModFrom {
+    public enum LastModFrom {
       OPENTIME, DIRLASTMOD, BOGUS;
 
       /**
@@ -757,20 +763,24 @@
       Map<String, Map> infos = overlay.getNamedPlugins(info.getCleanTag());
       if (!infos.isEmpty()) {
         LinkedHashMap<String, PluginInfo> map = new LinkedHashMap<>();
-        if (result != null) for (PluginInfo pluginInfo : result) {
-          //just create a UUID for the time being so that map key is not null
-          String name = pluginInfo.name == null ?
-              UUID.randomUUID().toString().toLowerCase(Locale.ROOT) :
-              pluginInfo.name;
-          map.put(name, pluginInfo);
+        if (result != null) {
+          for (PluginInfo pluginInfo : result) {
+            //just create a UUID for the time being so that map key is not null
+            String name = pluginInfo.name == null ?
+                UUID.randomUUID().toString().toLowerCase(Locale.ROOT) :
+                pluginInfo.name;
+            map.put(name, pluginInfo);
+          }
         }
         for (Map.Entry<String, Map> e : infos.entrySet()) {
-          map.put(e.getKey(), new PluginInfo(info.getCleanTag(), e.getValue()));
+          PluginInfo value = new PluginInfo(info.getCleanTag(), e.getValue());
+          value.pathInConfig = Arrays.asList(info.getCleanTag(),e.getKey());
+          map.put(e.getKey(), value);
         }
         result = new ArrayList<>(map.values());
       }
     }
-    return result == null ? Collections.<PluginInfo>emptyList() : result;
+    return result == null ? Collections.emptyList() : result;
   }
 
   public PluginInfo getPluginInfo(String type) {
@@ -945,7 +955,7 @@
 
   private void addCacheConfig(Map queryMap, CacheConfig... cache) {
     if (cache == null) return;
-    for (CacheConfig config : cache) if (config != null) queryMap.put(config.getNodeName(), config);
+    for (CacheConfig config : cache) if (config != null) queryMap.put(config.getName(), config);
 
   }
 
diff --git a/solr/core/src/java/org/apache/solr/core/SolrCore.java b/solr/core/src/java/org/apache/solr/core/SolrCore.java
index b730f3e..57e299f 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrCore.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrCore.java
@@ -239,10 +239,14 @@
   public volatile boolean indexEnabled = true;
   public volatile boolean readOnly = false;
 
+  private final PackageListeners listenerRegistry = new PackageListeners() ;
+
+
   public Set<String> getMetricNames() {
     return metricNames;
   }
 
+
   public Date getStartTimeStamp() {
     return startTime;
   }
@@ -267,6 +271,10 @@
 
   private ExecutorService coreAsyncTaskExecutor = ExecutorUtil.newMDCAwareCachedThreadPool("Core Async Task");
 
+  public PackageListeners getListenerRegistry(){
+    return listenerRegistry;
+  }
+
   /**
    * The SolrResourceLoader used to load all resources for this core.
    *
@@ -838,7 +846,7 @@
       for (Constructor<?> con : cons) {
         Class<?>[] types = con.getParameterTypes();
         if (types.length == 2 && types[0] == SolrCore.class && types[1] == UpdateHandler.class) {
-          return UpdateHandler.class.cast(con.newInstance(this, updateHandler));
+          return (UpdateHandler) con.newInstance(this, updateHandler);
         }
       }
       throw new SolrException(ErrorCode.SERVER_ERROR, "Error Instantiating " + msg + ", " + className + " could not find proper constructor for " + UpdateHandler.class.getName());
@@ -858,7 +866,12 @@
 
   public <T extends Object> T createInitInstance(PluginInfo info, Class<T> cast, String msg, String defClassName) {
     if (info == null) return null;
-    T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, getResourceLoader());
+    String pkg = info.attributes.get(CommonParams.PACKAGE);
+    ResourceLoader resourceLoader = pkg != null?
+        coreContainer.getPackageBag().getResourceLoader(pkg):
+        getResourceLoader();
+
+    T o = createInstance(info.className == null ? defClassName : info.className, cast, msg, this, resourceLoader);
     if (o instanceof PluginInfoInitialized) {
       ((PluginInfoInitialized) o).init(info);
     } else if (o instanceof NamedListInitializedPlugin) {
@@ -966,7 +979,7 @@
       this.codec = initCodec(solrConfig, this.schema);
 
       memClassLoader = new MemClassLoader(
-          PluginBag.RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(PluginBag.RuntimeLib.class.getName())),
+          RuntimeLib.getLibObjects(this, solrConfig.getPluginInfos(RuntimeLib.class.getName())),
           getResourceLoader());
       initIndex(prev != null, reload);
 
@@ -2405,7 +2418,6 @@
 
       if (!success) {
         newSearcherOtherErrorsCounter.inc();
-        ;
         synchronized (searcherLock) {
           onDeckSearchers--;
 
@@ -3108,8 +3120,7 @@
     try {
       Stat stat = zkClient.exists(zkPath, null, true);
       if (stat == null) {
-        if (currentVersion > -1) return true;
-        return false;
+        return currentVersion > -1;
       }
       if (stat.getVersion() > currentVersion) {
         log.debug("{} is stale will need an update from {} to {}", zkPath, currentVersion, stat.getVersion());
diff --git a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
index f27edbc..054a214 100644
--- a/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
+++ b/solr/core/src/java/org/apache/solr/core/SolrResourceLoader.java
@@ -63,6 +63,7 @@
 import org.apache.lucene.codecs.PostingsFormat;
 import org.apache.lucene.util.IOUtils;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.handler.admin.CoreAdminHandler;
 import org.apache.solr.handler.component.SearchComponent;
 import org.apache.solr.handler.component.ShardHandlerFactory;
@@ -576,8 +577,8 @@
       }
     }
   }
-  
-  static final String empty[] = new String[0];
+
+  static final String[] empty = new String[0];
   
   @Override
   public <T> T newInstance(String name, Class<T> expectedType) {
@@ -808,6 +809,7 @@
    * manipulated using select Solr features (e.g. streaming expressions).
    */
   public static final String USER_FILES_DIRECTORY = "userfiles";
+  public static final String FILESTORE_DIRECTORY = "filestore";
   public static void ensureUserFilesDataDir(Path solrHome) {
     final Path userFilesPath = getUserFilesPath(solrHome);
     final File userFilesDirectory = new File(userFilesPath.toString());
@@ -823,10 +825,28 @@
     }
   }
 
+  public static void ensureFileStoreDir(Path solrHome) {
+    final Path fileStoreDirPath = getFileStoreDirPath(solrHome);
+    final File packageDir = new File(fileStoreDirPath.toFile(), CommonParams.PACKAGE);
+    if (! packageDir.exists()) {
+      try {
+        final boolean created = packageDir.mkdirs();
+        if (! created) {
+          log.warn("Unable to create [{}] directory in SOLR_HOME [{}].  Features requiring this directory may fail.", packageDir, solrHome);
+        }
+      } catch (Exception e) {
+          log.warn("Unable to create [" + packageDir + "] directory in SOLR_HOME [" + solrHome + "].  Features requiring this directory may fail.", e);
+      }
+    }
+  }
+
+  public static Path getFileStoreDirPath(Path solrHome) {
+    return Paths.get(solrHome.toAbsolutePath().toString(), FILESTORE_DIRECTORY).toAbsolutePath();
+  }
+
   public static Path getUserFilesPath(Path solrHome) {
     return Paths.get(solrHome.toAbsolutePath().toString(), USER_FILES_DIRECTORY).toAbsolutePath();
   }
-
   // Logs a message only once per startup
   private static void logOnceInfo(String key, String msg) {
     if (!loggedOnce.contains(key)) {
@@ -923,7 +943,7 @@
           throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
         }
       }
-      try (OutputStream out = new FileOutputStream(confFile);) {
+      try (OutputStream out = new FileOutputStream(confFile)) {
         out.write(content);
       }
       log.info("Written confile " + resourceName);
diff --git a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
index dc1d1b1..5098a0d 100644
--- a/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/ReplicationHandler.java
@@ -1387,7 +1387,7 @@
     });
   }
 
-  public void close() {
+  public void shutdown() {
     if (executorService != null) executorService.shutdown();
     if (pollingIndexFetcher != null) {
       pollingIndexFetcher.destroy();
diff --git a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
index eca391b..212c30c 100644
--- a/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
+++ b/solr/core/src/java/org/apache/solr/handler/RequestHandlerBase.java
@@ -22,11 +22,14 @@
 import java.util.Set;
 import java.util.concurrent.ConcurrentHashMap;
 
-import com.codahale.metrics.MetricRegistry;
-import com.google.common.collect.ImmutableList;
 import com.codahale.metrics.Counter;
 import com.codahale.metrics.Meter;
+import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
+import com.google.common.collect.ImmutableList;
+import org.apache.solr.api.Api;
+import org.apache.solr.api.ApiBag;
+import org.apache.solr.api.ApiSupport;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.params.ShardParams;
 import org.apache.solr.common.params.SolrParams;
@@ -43,9 +46,6 @@
 import org.apache.solr.response.SolrQueryResponse;
 import org.apache.solr.search.SyntaxError;
 import org.apache.solr.util.SolrPluginUtils;
-import org.apache.solr.api.Api;
-import org.apache.solr.api.ApiBag;
-import org.apache.solr.api.ApiSupport;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
diff --git a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
index 11c6404..703ae23 100644
--- a/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/SolrConfigHandler.java
@@ -36,6 +36,7 @@
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.locks.Lock;
 import java.util.concurrent.locks.ReentrantLock;
+import java.util.function.Consumer;
 
 import com.google.common.collect.ImmutableMap;
 import com.google.common.collect.ImmutableSet;
@@ -47,6 +48,7 @@
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.cloud.ZkController;
 import org.apache.solr.cloud.ZkSolrResourceLoader;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
@@ -62,9 +64,10 @@
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.ConfigOverlay;
-import org.apache.solr.core.PluginBag;
+import org.apache.solr.core.PackageListeners;
 import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.RequestParams;
+import org.apache.solr.core.RuntimeLib;
 import org.apache.solr.core.SolrConfig;
 import org.apache.solr.core.SolrCore;
 import org.apache.solr.core.SolrResourceLoader;
@@ -150,11 +153,262 @@
 
   public static boolean getImmutable(SolrCore core) {
     NamedList configSetProperties = core.getConfigSetProperties();
-    if(configSetProperties == null) return false;
+    if (configSetProperties == null) return false;
     Object immutable = configSetProperties.get(IMMUTABLE_CONFIGSET_ARG);
-    return immutable != null ? Boolean.parseBoolean(immutable.toString()) : false;
+    return immutable != null && Boolean.parseBoolean(immutable.toString());
   }
 
+  public static String validateName(String s) {
+    for (int i = 0; i < s.length(); i++) {
+      char c = s.charAt(i);
+      if ((c >= 'A' && c <= 'Z') ||
+          (c >= 'a' && c <= 'z') ||
+          (c >= '0' && c <= '9') ||
+          c == '_' ||
+          c == '-' ||
+          c == '.'
+      ) continue;
+      else {
+        return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s);
+      }
+    }
+    return null;
+  }
+
+  /**
+   * Block up to a specified maximum time until we see agreement on the schema
+   * version in ZooKeeper across all replicas for a collection.
+   */
+  public static void waitForAllReplicasState(String collection,
+                                             ZkController zkController,
+                                             String prop,
+                                             int expectedVersion,
+                                             int maxWaitSecs) {
+    final RTimer timer = new RTimer();
+    // get a list of active replica cores to query for the schema zk version (skipping this core of course)
+    List<PerReplicaCallable> concurrentTasks = new ArrayList<>();
+
+    for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) {
+      PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs);
+      concurrentTasks.add(e);
+    }
+    if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
+
+    log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}",
+        maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection));
+
+    // use an executor service to invoke schema zk version requests in parallel with a max wait time
+    execInparallel(concurrentTasks, parallelExecutor -> {
+      try {
+        List<String> failedList = executeAll(expectedVersion, maxWaitSecs, concurrentTasks, parallelExecutor);
+        // if any tasks haven't completed within the specified timeout, it's an error
+        if (failedList != null)
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+              formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
+                  failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList));
+      } catch (InterruptedException e) {
+        log.warn(formatString(
+            "Core  was interrupted . trying to set the property {0} to version {1} to propagate to {2} replicas for collection {3}",
+            prop, expectedVersion, concurrentTasks.size(), collection));
+        Thread.currentThread().interrupt();
+      }
+    });
+
+    log.info("Took {}ms to set the property {} to be of version {} for collection {}",
+        timer.getTime(), prop, expectedVersion, collection);
+  }
+
+  public static void execInparallel(List<? extends PerReplicaCallable> concurrentTasks, Consumer<ExecutorService> fun) {
+    int poolSize = Math.min(concurrentTasks.size(), 10);
+    ExecutorService parallelExecutor =
+        ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor"));
+    try {
+
+      fun.accept(parallelExecutor);
+
+    } finally {
+      ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor);
+    }
+  }
+
+  @Override
+  public SolrRequestHandler getSubHandler(String path) {
+    if (subPaths.contains(path)) return this;
+    if (path.startsWith("/params/")) return this;
+    List<String> p = StrUtils.splitSmart(path, '/', true);
+    if (p.size() > 1) {
+      if (subPaths.contains("/" + p.get(0))) return this;
+    }
+    return null;
+  }
+
+
+  private static Set<String> subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler",
+      "/query", "/jmx", "/requestDispatcher", "/znodeVersion"));
+
+  static {
+    for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins)
+      subPaths.add("/" + solrPluginInfo.getCleanTag());
+
+  }
+
+  //////////////////////// SolrInfoMBeans methods //////////////////////
+
+
+  @Override
+  public String getDescription() {
+    return "Edit solrconfig.xml";
+  }
+
+  @Override
+  public Category getCategory() {
+    return Category.ADMIN;
+  }
+
+
+  public static final String SET_PROPERTY = "set-property";
+  public static final String UNSET_PROPERTY = "unset-property";
+  public static final String SET_USER_PROPERTY = "set-user-property";
+  public static final String UNSET_USER_PROPERTY = "unset-user-property";
+  public static final String SET = "set";
+  public static final String UPDATE = "update";
+  public static final String CREATE = "create";
+  private static Set<String> cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add");
+
+  public static List<String> executeAll(int expectedVersion, int maxWaitSecs, List<? extends PerReplicaCallable> concurrentTasks, ExecutorService parallelExecutor) throws InterruptedException {
+    List<Future<Boolean>> results =
+        parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS);
+
+    // determine whether all replicas have the update
+    List<String> failedList = null; // lazily init'd
+    for (int f = 0; f < results.size(); f++) {
+      Boolean success = false;
+      Future<Boolean> next = results.get(f);
+      if (next.isDone() && !next.isCancelled()) {
+        // looks to have finished, but need to check if it succeeded
+        try {
+          success = next.get();
+        } catch (ExecutionException e) {
+          // shouldn't happen since we checked isCancelled
+        }
+      }
+
+      if (!success) {
+        String coreUrl = concurrentTasks.get(f).coreUrl;
+        log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion);
+        if (failedList == null) failedList = new ArrayList<>();
+        failedList.add(coreUrl);
+      }
+    }
+    return failedList;
+  }
+
+  public static class PerReplicaCallable extends SolrRequest implements Callable<Boolean> {
+    protected String coreUrl;
+    String prop;
+    protected int expectedZkVersion;
+    protected Number remoteVersion = null;
+    protected int maxWait;
+
+    public PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) {
+      super(METHOD.GET, "/config/" + ZNODEVER);
+      this.coreUrl = coreUrl;
+      this.expectedZkVersion = expectedZkVersion;
+      this.prop = prop;
+      this.maxWait = maxWait;
+    }
+
+    @Override
+    public SolrParams getParams() {
+      return new ModifiableSolrParams()
+          .set(prop, expectedZkVersion)
+          .set(CommonParams.WT, CommonParams.JAVABIN);
+    }
+
+    @Override
+    public Boolean call() throws Exception {
+      final RTimer timer = new RTimer();
+      int attempts = 0;
+      try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
+        // eventually, this loop will get killed by the ExecutorService's timeout
+        while (true) {
+          try {
+            long timeElapsed = (long) timer.getTime() / 1000;
+            if (timeElapsed >= maxWait) {
+              return false;
+            }
+            log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait);
+            Thread.sleep(100);
+            MapWriter resp = solr.httpUriRequest(this).future.get();
+            if (verifyResponse(resp, attempts)) break;
+            attempts++;
+          } catch (Exception e) {
+            if (e instanceof InterruptedException) {
+              break; // stop looping
+            } else {
+              log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e);
+            }
+          }
+        }
+      }
+      return true;
+    }
+
+    protected boolean verifyResponse(MapWriter mw, int attempts) {
+      NamedList resp = (NamedList) mw;
+      if (resp != null) {
+        Map m = (Map) resp.get(ZNODEVER);
+        if (m != null) {
+          remoteVersion = (Number) m.get(prop);
+          if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) return true;
+          log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2}   after {3} attempts", expectedZkVersion, coreUrl, prop, attempts));
+
+        }
+      }
+      return false;
+    }
+
+
+    @Override
+    protected SolrResponse createResponse(SolrClient client) {
+      return null;
+    }
+  }
+
+  public static List<String> getActiveReplicaCoreUrls(ZkController zkController,
+                                                      String collection) {
+    List<String> activeReplicaCoreUrls = new ArrayList<>();
+    ClusterState clusterState = zkController.getZkStateReader().getClusterState();
+    Set<String> liveNodes = clusterState.getLiveNodes();
+    final DocCollection docCollection = clusterState.getCollectionOrNull(collection);
+    if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) {
+      final Collection<Slice> activeSlices = docCollection.getActiveSlices();
+      for (Slice next : activeSlices) {
+        Map<String, Replica> replicasMap = next.getReplicasMap();
+        if (replicasMap != null) {
+          for (Map.Entry<String, Replica> entry : replicasMap.entrySet()) {
+            Replica replica = entry.getValue();
+            if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
+              activeReplicaCoreUrls.add(replica.getCoreUrl());
+            }
+          }
+        }
+      }
+    }
+    return activeReplicaCoreUrls;
+  }
+
+  @Override
+  public Name getPermissionName(AuthorizationContext ctx) {
+    switch (ctx.getHttpMethod()) {
+      case "GET":
+        return Name.CONFIG_READ_PERM;
+      case "POST":
+        return Name.CONFIG_EDIT_PERM;
+      default:
+        return null;
+    }
+  }
 
   private class Command {
     private final SolrQueryRequest req;
@@ -257,25 +511,54 @@
 
     private Map<String, Object> getConfigDetails(String componentType, SolrQueryRequest req) {
       String componentName = componentType == null ? null : req.getParams().get("componentName");
-      boolean showParams = req.getParams().getBool("expandParams", false);
-      Map<String, Object> map = this.req.getCore().getSolrConfig().toMap(new LinkedHashMap<>());
-      if (componentType != null && !SolrRequestHandler.TYPE.equals(componentType)) return map;
-      Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE);
-      if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>());
-      List<PluginInfo> plugins = this.req.getCore().getImplicitHandlers();
-      for (PluginInfo plugin : plugins) {
-        if (SolrRequestHandler.TYPE.equals(plugin.type)) {
-          if (!reqHandlers.containsKey(plugin.name)) {
-            reqHandlers.put(plugin.name, plugin);
-          }
+      if(componentName == null && parts.size() > 2){
+        componentName = parts.get(2);
+        if(SolrRequestHandler.TYPE.equals(componentType)){
+          componentName = "/"+componentName;
         }
       }
-      if (!showParams) return map;
-      for (Object o : reqHandlers.entrySet()) {
-        Map.Entry e = (Map.Entry) o;
-        if (componentName == null || e.getKey().equals(componentName)) {
-          Map<String, Object> m = expandUseParams(req, e.getValue());
-          e.setValue(m);
+
+      boolean showParams = req.getParams().getBool("expandParams", false);
+      Map<String, Object> map = this.req.getCore().getSolrConfig().toMap(new LinkedHashMap<>());
+      if (SolrRequestHandler.TYPE.equals(componentType) || componentType == null) {
+        Map reqHandlers = (Map) map.get(SolrRequestHandler.TYPE);
+        if (reqHandlers == null) map.put(SolrRequestHandler.TYPE, reqHandlers = new LinkedHashMap<>());
+        List<PluginInfo> plugins = this.req.getCore().getImplicitHandlers();
+        for (PluginInfo plugin : plugins) {
+          if (SolrRequestHandler.TYPE.equals(plugin.type)) {
+            if (!reqHandlers.containsKey(plugin.name)) {
+              reqHandlers.put(plugin.name, plugin);
+            }
+          }
+        }
+        if (showParams) {
+          for (Object o : reqHandlers.entrySet()) {
+            Map.Entry e = (Map.Entry) o;
+            if (componentName == null || e.getKey().equals(componentName)) {
+              Map<String, Object> m = expandUseParams(req, e.getValue());
+              e.setValue(m);
+            }
+          }
+        }
+
+      }
+
+      if (req.getParams().getBool("meta", false)) {
+        for (PackageListeners.Listener pkgListener : req.getCore().getListenerRegistry().getListeners()) {
+          PluginInfo meta = pkgListener.pluginInfo();
+          if (meta.pathInConfig != null) {
+            Object obj = Utils.getObjectByPath(map, false, meta.pathInConfig);
+            if (obj instanceof Map) {
+              Map m = (Map) obj;
+              m.put("_packageinfo_", pkgListener.packageInfo());
+            } else if(obj instanceof MapWriter){
+              MapWriter mw = (MapWriter) obj;
+              Utils.setObjectByPath(map, meta.pathInConfig, (MapWriter) ew -> {
+                mw.writeMap(ew);
+                ew.put("_packageinfo_", pkgListener.packageInfo());
+              }, false);
+            }
+          }
         }
       }
 
@@ -351,6 +634,8 @@
           }
         }
       } catch (Exception e) {
+
+        log.error("error executing commands " + Utils.toJSONString(ops), e);
         resp.setException(e);
         resp.add(CommandOperation.ERR_MSGS, singletonList(SchemaManager.getErrorStr(e)));
       }
@@ -425,7 +710,7 @@
 
       List errs = CommandOperation.captureErrors(ops);
       if (!errs.isEmpty()) {
-        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing params", errs);
+        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing params", errs);
       }
 
       SolrResourceLoader loader = req.getCore().getResourceLoader();
@@ -488,7 +773,7 @@
       }
       List errs = CommandOperation.captureErrors(ops);
       if (!errs.isEmpty()) {
-        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST,"error processing commands", errs);
+        throw new ApiBag.ExceptionWithErrObject(SolrException.ErrorCode.BAD_REQUEST, "error processing commands", errs);
       }
 
       SolrResourceLoader loader = req.getCore().getResourceLoader();
@@ -526,20 +811,20 @@
       op.getMap(PluginInfo.INVARIANTS, null);
       op.getMap(PluginInfo.APPENDS, null);
       if (op.hasError()) return overlay;
-      if(info.clazz == PluginBag.RuntimeLib.class) {
-        if(!PluginBag.RuntimeLib.isEnabled()){
+      if (info.clazz == RuntimeLib.class) {
+        if (!RuntimeLib.isEnabled()) {
           op.addError("Solr not started with -Denable.runtime.lib=true");
           return overlay;
         }
         try {
-          new PluginBag.RuntimeLib(req.getCore()).init(new PluginInfo(info.tag, op.getDataMap()));
+          new RuntimeLib(req.getCore().getCoreContainer()).init(new PluginInfo(info.tag, op.getDataMap()));
         } catch (Exception e) {
           op.addError(e.getMessage());
           log.error("can't load this plugin ", e);
           return overlay;
         }
       }
-      if (!verifyClass(op, clz, info.clazz)) return overlay;
+      if (!verifyClass(op, clz, info)) return overlay;
       if (pluginExists(info, overlay, name)) {
         if (isCeate) {
           op.addError(formatString(" ''{0}'' already exists . Do an ''{1}'' , if you want to change it ", name, "update-" + info.getTagCleanLower()));
@@ -559,16 +844,23 @@
 
     private boolean pluginExists(SolrConfig.SolrPluginInfo info, ConfigOverlay overlay, String name) {
       List<PluginInfo> l = req.getCore().getSolrConfig().getPluginInfos(info.clazz.getName());
-      for (PluginInfo pluginInfo : l) if(name.equals( pluginInfo.name)) return true;
+      for (PluginInfo pluginInfo : l) if (name.equals(pluginInfo.name)) return true;
       return overlay.getNamedPlugins(info.getCleanTag()).containsKey(name);
     }
 
-    private boolean verifyClass(CommandOperation op, String clz, Class expected) {
+    private boolean verifyClass(CommandOperation op, String clz, SolrConfig.SolrPluginInfo pluginMeta) {
       if (clz == null) return true;
-      if (!"true".equals(String.valueOf(op.getStr("runtimeLib", null)))) {
+      PluginInfo info = new PluginInfo(pluginMeta.getCleanTag(), op.getDataMap());
+
+      if (info.isRuntimePlugin() && !RuntimeLib.isEnabled()) {
+        op.addError("node not started with enable.runtime.lib=true");
+        return false;
+      }
+
+      if (!"true".equals(String.valueOf(op.getStr(RuntimeLib.TYPE, null)))) {
         //this is not dynamically loaded so we can verify the class right away
         try {
-          req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), expected, clz, "");
+          req.getCore().createInitInstance(new PluginInfo(SolrRequestHandler.TYPE, op.getDataMap()), pluginMeta.clazz, clz, "");
         } catch (Exception e) {
           op.addError(e.getMessage());
           return false;
@@ -666,235 +958,6 @@
 
   }
 
-  public static String validateName(String s) {
-    for (int i = 0; i < s.length(); i++) {
-      char c = s.charAt(i);
-      if ((c >= 'A' && c <= 'Z') ||
-          (c >= 'a' && c <= 'z') ||
-          (c >= '0' && c <= '9') ||
-          c == '_' ||
-          c == '-' ||
-          c == '.'
-          ) continue;
-      else {
-        return formatString("''{0}'' name should only have chars [a-zA-Z_-.0-9] ", s);
-      }
-    }
-    return null;
-  }
-
-  @Override
-  public SolrRequestHandler getSubHandler(String path) {
-    if (subPaths.contains(path)) return this;
-    if (path.startsWith("/params/")) return this;
-    return null;
-  }
-
-
-  private static Set<String> subPaths = new HashSet<>(Arrays.asList("/overlay", "/params", "/updateHandler",
-      "/query", "/jmx", "/requestDispatcher", "/znodeVersion"));
-
-  static {
-    for (SolrConfig.SolrPluginInfo solrPluginInfo : SolrConfig.plugins)
-      subPaths.add("/" + solrPluginInfo.getCleanTag());
-
-  }
-
-  //////////////////////// SolrInfoMBeans methods //////////////////////
-
-
-  @Override
-  public String getDescription() {
-    return "Edit solrconfig.xml";
-  }
-
-  @Override
-  public Category getCategory() {
-    return Category.ADMIN;
-  }
-
-
-  public static final String SET_PROPERTY = "set-property";
-  public static final String UNSET_PROPERTY = "unset-property";
-  public static final String SET_USER_PROPERTY = "set-user-property";
-  public static final String UNSET_USER_PROPERTY = "unset-user-property";
-  public static final String SET = "set";
-  public static final String UPDATE = "update";
-  public static final String CREATE = "create";
-  private static Set<String> cmdPrefixes = ImmutableSet.of(CREATE, UPDATE, "delete", "add");
-
-  /**
-   * Block up to a specified maximum time until we see agreement on the schema
-   * version in ZooKeeper across all replicas for a collection.
-   */
-  private static void waitForAllReplicasState(String collection,
-                                              ZkController zkController,
-                                              String prop,
-                                              int expectedVersion,
-                                              int maxWaitSecs) {
-    final RTimer timer = new RTimer();
-    // get a list of active replica cores to query for the schema zk version (skipping this core of course)
-    List<PerReplicaCallable> concurrentTasks = new ArrayList<>();
-
-    for (String coreUrl : getActiveReplicaCoreUrls(zkController, collection)) {
-      PerReplicaCallable e = new PerReplicaCallable(coreUrl, prop, expectedVersion, maxWaitSecs);
-      concurrentTasks.add(e);
-    }
-    if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
-
-    log.info(formatString("Waiting up to {0} secs for {1} replicas to set the property {2} to be of version {3} for collection {4}",
-        maxWaitSecs, concurrentTasks.size(), prop, expectedVersion, collection));
-
-    // use an executor service to invoke schema zk version requests in parallel with a max wait time
-    int poolSize = Math.min(concurrentTasks.size(), 10);
-    ExecutorService parallelExecutor =
-        ExecutorUtil.newMDCAwareFixedThreadPool(poolSize, new DefaultSolrThreadFactory("solrHandlerExecutor"));
-    try {
-      List<Future<Boolean>> results =
-          parallelExecutor.invokeAll(concurrentTasks, maxWaitSecs, TimeUnit.SECONDS);
-
-      // determine whether all replicas have the update
-      List<String> failedList = null; // lazily init'd
-      for (int f = 0; f < results.size(); f++) {
-        Boolean success = false;
-        Future<Boolean> next = results.get(f);
-        if (next.isDone() && !next.isCancelled()) {
-          // looks to have finished, but need to check if it succeeded
-          try {
-            success = next.get();
-          } catch (ExecutionException e) {
-            // shouldn't happen since we checked isCancelled
-          }
-        }
-
-        if (!success) {
-          String coreUrl = concurrentTasks.get(f).coreUrl;
-          log.warn("Core " + coreUrl + "could not get the expected version " + expectedVersion);
-          if (failedList == null) failedList = new ArrayList<>();
-          failedList.add(coreUrl);
-        }
-      }
-
-      // if any tasks haven't completed within the specified timeout, it's an error
-      if (failedList != null)
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-            formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
-                failedList.size(), concurrentTasks.size() + 1, prop, expectedVersion, maxWaitSecs, failedList));
-
-    } catch (InterruptedException ie) {
-      log.warn(formatString(
-          "Core  was interrupted . trying to set the property {1} to version {2} to propagate to {3} replicas for collection {4}",
-          prop, expectedVersion, concurrentTasks.size(), collection));
-      Thread.currentThread().interrupt();
-    } finally {
-      ExecutorUtil.shutdownAndAwaitTermination(parallelExecutor);
-    }
-
-    log.info("Took {}ms to set the property {} to be of version {} for collection {}",
-        timer.getTime(), prop, expectedVersion, collection);
-  }
-
-  public static List<String> getActiveReplicaCoreUrls(ZkController zkController,
-                                                      String collection) {
-    List<String> activeReplicaCoreUrls = new ArrayList<>();
-    ClusterState clusterState = zkController.getZkStateReader().getClusterState();
-    Set<String> liveNodes = clusterState.getLiveNodes();
-    final DocCollection docCollection = clusterState.getCollectionOrNull(collection);
-    if (docCollection != null && docCollection.getActiveSlices() != null && docCollection.getActiveSlices().size() > 0) {
-      final Collection<Slice> activeSlices = docCollection.getActiveSlices();
-      for (Slice next : activeSlices) {
-        Map<String, Replica> replicasMap = next.getReplicasMap();
-        if (replicasMap != null) {
-          for (Map.Entry<String, Replica> entry : replicasMap.entrySet()) {
-            Replica replica = entry.getValue();
-            if (replica.getState() == Replica.State.ACTIVE && liveNodes.contains(replica.getNodeName())) {
-              activeReplicaCoreUrls.add(replica.getCoreUrl());
-            }
-          }
-        }
-      }
-    }
-    return activeReplicaCoreUrls;
-  }
-
-  @Override
-  public Name getPermissionName(AuthorizationContext ctx) {
-    switch (ctx.getHttpMethod()) {
-      case "GET":
-        return Name.CONFIG_READ_PERM;
-      case "POST":
-        return Name.CONFIG_EDIT_PERM;
-      default:
-        return null;
-    }
-  }
-
-  private static class PerReplicaCallable extends SolrRequest implements Callable<Boolean> {
-    String coreUrl;
-    String prop;
-    int expectedZkVersion;
-    Number remoteVersion = null;
-    int maxWait;
-
-    PerReplicaCallable(String coreUrl, String prop, int expectedZkVersion, int maxWait) {
-      super(METHOD.GET, "/config/" + ZNODEVER);
-      this.coreUrl = coreUrl;
-      this.expectedZkVersion = expectedZkVersion;
-      this.prop = prop;
-      this.maxWait = maxWait;
-    }
-
-    @Override
-    public SolrParams getParams() {
-      return new ModifiableSolrParams()
-          .set(prop, expectedZkVersion)
-          .set(CommonParams.WT, CommonParams.JAVABIN);
-    }
-
-    @Override
-    public Boolean call() throws Exception {
-      final RTimer timer = new RTimer();
-      int attempts = 0;
-      try (HttpSolrClient solr = new HttpSolrClient.Builder(coreUrl).build()) {
-        // eventually, this loop will get killed by the ExecutorService's timeout
-        while (true) {
-          try {
-            long timeElapsed = (long) timer.getTime() / 1000;
-            if (timeElapsed >= maxWait) {
-              return false;
-            }
-            log.info("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait);
-            Thread.sleep(100);
-            NamedList<Object> resp = solr.httpUriRequest(this).future.get();
-            if (resp != null) {
-              Map m = (Map) resp.get(ZNODEVER);
-              if (m != null) {
-                remoteVersion = (Number) m.get(prop);
-                if (remoteVersion != null && remoteVersion.intValue() >= expectedZkVersion) break;
-              }
-            }
-
-            attempts++;
-            log.info(formatString("Could not get expectedVersion {0} from {1} for prop {2}   after {3} attempts", expectedZkVersion, coreUrl, prop, attempts));
-          } catch (Exception e) {
-            if (e instanceof InterruptedException) {
-              break; // stop looping
-            } else {
-              log.warn("Failed to get /schema/zkversion from " + coreUrl + " due to: " + e);
-            }
-          }
-        }
-      }
-      return true;
-    }
-
-
-    @Override
-    protected SolrResponse createResponse(SolrClient client) {
-      return null;
-    }
-  }
-
   @Override
   public Collection<Api> getApis() {
     return ApiBag.wrapRequestHandlers(this,
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java b/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java
index 85033f3..07928a9 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/BaseHandlerApiSupport.java
@@ -17,6 +17,7 @@
 
 package org.apache.solr.handler.admin;
 
+import java.lang.invoke.MethodHandles;
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.Collections;
@@ -38,6 +39,8 @@
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import static org.apache.solr.client.solrj.SolrRequest.METHOD.POST;
 import static org.apache.solr.common.SolrException.ErrorCode.BAD_REQUEST;
@@ -49,28 +52,36 @@
  * to actions and old parameter names to new parameter names
  */
 public abstract class BaseHandlerApiSupport implements ApiSupport {
-  protected final Map<SolrRequest.METHOD, Map<V2EndPoint, List<ApiCommand>>> commandsMapping;
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
-  protected BaseHandlerApiSupport() {
-    commandsMapping = new HashMap<>();
-    for (ApiCommand cmd : getCommands()) {
-      Map<V2EndPoint, List<ApiCommand>> m = commandsMapping.get(cmd.meta().getHttpMethod());
-      if (m == null) commandsMapping.put(cmd.meta().getHttpMethod(), m = new HashMap<>());
-      List<ApiCommand> list = m.get(cmd.meta().getEndPoint());
-      if (list == null) m.put(cmd.meta().getEndPoint(), list = new ArrayList<>());
-      list.add(cmd);
-    }
-  }
+  List<Api> apis;
 
   @Override
   public synchronized Collection<Api> getApis() {
-    ImmutableList.Builder<Api> l = ImmutableList.builder();
-    for (V2EndPoint op : getEndPoints()) l.add(getApi(op));
-    return l.build();
+    if (apis == null) {
+      Map<SolrRequest.METHOD, Map<V2EndPoint, List<ApiCommand>>> commandsMapping = new HashMap<>();
+      for (ApiCommand cmd : getCommands()) {
+        Map<V2EndPoint, List<ApiCommand>> m = commandsMapping.get(cmd.meta().getHttpMethod());
+        if (m == null) commandsMapping.put(cmd.meta().getHttpMethod(), m = new HashMap<>());
+        List<ApiCommand> list = m.get(cmd.meta().getEndPoint());
+        if (list == null) m.put(cmd.meta().getEndPoint(), list = new ArrayList<>());
+        list.add(cmd);
+      }
+      ImmutableList.Builder<Api> l = ImmutableList.builder();
+      for (V2EndPoint op : getEndPoints()) l.add(getApi(commandsMapping, op));
+      l.addAll(getV2OnlyApis());
+      apis = l.build();
+    }
+    return apis;
+
+  }
+
+  protected Collection<Api> getV2OnlyApis() {
+    return Collections.EMPTY_LIST;
   }
 
 
-  private Api getApi(final V2EndPoint op) {
+  private Api getApi(Map<SolrRequest.METHOD, Map<V2EndPoint, List<ApiCommand>>> commandsMapping, final V2EndPoint op) {
     final BaseHandlerApiSupport apiHandler = this;
     return new Api(Utils.getSpec(op.getSpecName())) {
       @Override
@@ -79,6 +90,10 @@
         SolrRequest.METHOD method = SolrRequest.METHOD.valueOf(req.getHttpMethod());
         List<ApiCommand> commands = commandsMapping.get(method).get(op);
         try {
+          if (commands != null && commands.size() == 1 && commands.get(0).isRaw()) {
+            commands.get(0).invoke(req, rsp, apiHandler);
+            return;
+          }
           if (method == POST) {
             List<CommandOperation> cmds = req.getCommands(true);
             if (cmds.size() > 1)
@@ -117,8 +132,10 @@
           }
 
         } catch (SolrException e) {
+          log.error("error running command", e);
           throw e;
         } catch (Exception e) {
+          log.error("error running command", e);
           throw new SolrException(BAD_REQUEST, e); //TODO BAD_REQUEST is a wild guess; should we flip the default?  fail here to investigate how this happens in tests
         } finally {
           req.setParams(params);
@@ -180,7 +197,7 @@
 
           @Override
           public Map toMap(Map<String, Object> suppliedMap) {
-            for(Iterator<String> it=getParameterNamesIterator(); it.hasNext(); ) {
+            for (Iterator<String> it = getParameterNamesIterator(); it.hasNext(); ) {
               final String param = it.next();
               String key = cmd.meta().getParamSubstitute(param);
               Object o = key.indexOf('.') > 0 ?
@@ -195,10 +212,10 @@
                   Number.class.isAssignableFrom(oClass) ||
                   Character.class.isAssignableFrom(oClass) ||
                   Boolean.class.isAssignableFrom(oClass)) {
-                suppliedMap.put(param,String.valueOf(o));
-              } else if (List.class.isAssignableFrom(oClass) && ((List)o).get(0) instanceof String ) {
+                suppliedMap.put(param, String.valueOf(o));
+              } else if (List.class.isAssignableFrom(oClass) && ((List) o).get(0) instanceof String) {
                 List<String> l = (List<String>) o;
-                suppliedMap.put( param, l.toArray(new String[0]));
+                suppliedMap.put(param, l.toArray(new String[0]));
               } else {
                 // Lists pass through but will require special handling downstream
                 // if they contain non-string elements.
@@ -216,9 +233,19 @@
   protected abstract Collection<V2EndPoint> getEndPoints();
 
 
-  public interface ApiCommand  {
+  public interface ApiCommand {
+
     CommandMeta meta();
 
+
+    /**
+     * If true, do not do anything with the payload. The command implementation will do everything
+     */
+
+    default boolean isRaw() {
+      return false;
+    }
+
     void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception;
   }
 
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/ClusterAPI.java b/solr/core/src/java/org/apache/solr/handler/admin/ClusterAPI.java
new file mode 100644
index 0000000..ca31add
--- /dev/null
+++ b/solr/core/src/java/org/apache/solr/handler/admin/ClusterAPI.java
@@ -0,0 +1,420 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler.admin;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+
+import org.apache.http.client.HttpClient;
+import org.apache.solr.api.AnnotatedApi;
+import org.apache.solr.api.Api;
+import org.apache.solr.api.CallInfo;
+import org.apache.solr.api.Command;
+import org.apache.solr.api.EndPoint;
+import org.apache.solr.common.MapWriter;
+import org.apache.solr.common.SolrException;
+import org.apache.solr.common.cloud.ClusterProperties;
+import org.apache.solr.common.cloud.ZkStateReader;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.common.util.ContentStream;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.CoreContainer;
+import org.apache.solr.core.PackageBag;
+import org.apache.solr.handler.SolrConfigHandler;
+import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.util.RTimer;
+import org.apache.solr.util.SimplePostTool;
+import org.apache.zookeeper.data.Stat;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.util.Arrays.asList;
+import static org.apache.solr.common.params.CommonParams.PACKAGES;
+import static org.apache.solr.common.util.StrUtils.formatString;
+import static org.apache.solr.core.BlobRepository.sha256Digest;
+import static org.apache.solr.core.ConfigOverlay.ZNODEVER;
+import static org.apache.solr.security.PermissionNameProvider.Name.COLL_EDIT_PERM;
+import static org.apache.solr.security.PermissionNameProvider.Name.COLL_READ_PERM;
+import static org.apache.solr.security.PermissionNameProvider.Name.FILESTORE_WRITE;
+import static org.apache.solr.security.PermissionNameProvider.Name.PKG_EDIT;
+import static org.apache.solr.security.PermissionNameProvider.Name.PKG_READ;
+
+//implements  v2 only APIs at /cluster/* end point
+public class ClusterAPI {
+  private final CoreContainer coreContainer;
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+  ClusterAPI(CoreContainer coreContainer) {
+    this.coreContainer = coreContainer;
+  }
+
+
+  //sync the cluster props in every node
+  void syncClusterProps(CallInfo info) throws IOException {
+    Stat stat = new Stat();
+    Map<String, Object> clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient()).getClusterProperties(stat);
+    try {
+      coreContainer.getPackageBag().onChange(clusterProperties);
+    } catch (SolrException e) {
+      log.error("error executing command : " + info.command.jsonStr(), e);
+      throw e;
+    } catch (Exception e) {
+      log.error("error executing command : " + info.command.jsonStr(), e);
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "error executing command : ", e);
+    }
+    log.info("current version of clusterprops.json is {} , trying to get every node to update ", stat.getVersion());
+    log.debug("The current clusterprops.json:  {}", clusterProperties);
+    waitForStateSync(stat.getVersion(), coreContainer);
+  }
+
+  void waitForStateSync(int expectedVersion, CoreContainer coreContainer) {
+    final RTimer timer = new RTimer();
+    int waitTimeSecs = 30;
+    // get a list of active replica cores to query for the schema zk version (skipping this core of course)
+    List<ClusterAPI.PerNodeCallable> concurrentTasks = new ArrayList<>();
+
+    ZkStateReader zkStateReader = coreContainer.getZkController().getZkStateReader();
+    for (String nodeName : zkStateReader.getClusterState().getLiveNodes()) {
+      PerNodeCallable e = new PerNodeCallable(coreContainer.getUpdateShardHandler().getDefaultHttpClient(), zkStateReader.getBaseUrlForNodeName(nodeName), expectedVersion, waitTimeSecs);
+      concurrentTasks.add(e);
+    }
+    if (concurrentTasks.isEmpty()) return; // nothing to wait for ...
+
+    log.info("Waiting up to {} secs for {} nodes to update clusterprops to be of version {} ",
+        waitTimeSecs, concurrentTasks.size(), expectedVersion);
+    SolrConfigHandler.execInparallel(concurrentTasks, parallelExecutor -> {
+      try {
+        List<String> failedList = SolrConfigHandler.executeAll(expectedVersion, waitTimeSecs, concurrentTasks, parallelExecutor);
+
+        // if any tasks haven't completed within the specified timeout, it's an error
+        if (failedList != null)
+          throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
+              formatString("{0} out of {1} the property {2} to be of version {3} within {4} seconds! Failed cores: {5}",
+                  failedList.size(), concurrentTasks.size() + 1, expectedVersion, 30, failedList));
+      } catch (InterruptedException e) {
+        log.warn(formatString(
+            "Request was interrupted . trying to set the clusterprops to version {0} to propagate to {1} nodes ",
+            expectedVersion, concurrentTasks.size()));
+        Thread.currentThread().interrupt();
+
+      }
+    });
+
+    log.info("Took {}ms to update the clusterprops to be of version {}  on {} nodes",
+        timer.getTime(), expectedVersion, concurrentTasks.size());
+
+  }
+
+  public List<Api> getAllApis() {
+    List<Api> result = new ArrayList<>();
+    result.add(new AnnotatedApi(new ClusterAPI.ListNodes()));
+    result.add(new AnnotatedApi(new FileStoreWrite()));
+    result.add(new AnnotatedApi(new ClusterAPI.PkgRead()));
+    result.add(new AnnotatedApi(new ClusterAPI.PkgEdit()));
+    result.add(new AnnotatedApi(new ClusterAPI.ClusterCommands()));
+    return result;
+  }
+
+  @EndPoint(
+      spec = "cluster.packages.Commands",
+      permission = PKG_EDIT
+  )
+  public class PkgEdit {
+
+    @Command(name = "add")
+    public void add(CallInfo callInfo) throws Exception {
+      if (addUpdatePackage(callInfo)) {
+        syncClusterProps(callInfo);
+      }
+
+    }
+
+    @Command(name = "update")
+    public void update(CallInfo callInfo) throws Exception {
+      if (addUpdatePackage(callInfo)) {
+        syncClusterProps(callInfo);
+      }
+    }
+
+    @Command(name = "delete")
+    public void delPkg(CallInfo info) throws Exception {
+      if (deletePackage(info)) {
+        syncClusterProps(info);
+      }
+    }
+
+
+    boolean deletePackage(CallInfo params) throws Exception {
+      if (checkEnabled(params)) return false;
+      String name = params.command.getStr(CommandOperation.ROOT_OBJ);
+      ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
+      Map<String, Object> props = clusterProperties.getClusterProperties();
+      List<String> pathToLib = asList(PACKAGES, name);
+      Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib);
+      if (existing == null) {
+        params.command.addError("No such package : " + name);
+        return false;
+      }
+      Map delta = new LinkedHashMap();
+      Utils.setObjectByPath(delta, pathToLib, null, true);
+      clusterProperties.setClusterProperties(delta);
+      return true;
+    }
+
+
+    boolean checkEnabled(CallInfo info) {
+      if (!PackageBag.enablePackage) {
+        info.command.addError("node not started with enable.package=true");
+        return true;
+      }
+      return false;
+    }
+
+    boolean addUpdatePackage(CallInfo params) throws Exception {
+      if (checkEnabled(params)) return false;
+      CommandOperation op = params.command;
+      String name = op.getStr("name");
+      ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
+      Map<String, Object> props = clusterProperties.getClusterProperties();
+      List<String> pathToLib = asList(PACKAGES, name);
+      Map existing = (Map) Utils.getObjectByPath(props, false, pathToLib);
+      Map<String, Object> dataMap = Utils.getDeepCopy(op.getDataMap(), 3);
+      PackageBag.PackageInfo packageInfo = new PackageBag.PackageInfo(dataMap, 0);
+
+      if ("add".equals(op.name)) {
+        if (existing != null) {
+          op.addError(StrUtils.formatString("The package with a name ''{0}'' already exists ", name));
+          return false;
+        }
+      } else {// this is an update command
+        if (existing == null) {
+          op.addError(StrUtils.formatString("The package with a name ''{0}'' does not exist", name));
+          return false;
+        }
+        PackageBag.PackageInfo oldInfo = new PackageBag.PackageInfo(existing, 1);
+        if (Objects.equals(oldInfo, packageInfo)) {
+          op.addError("Trying to update a package with the same data");
+          return false;
+        }
+      }
+      try {
+        List<String> errs = packageInfo.validate(coreContainer);
+        if (!errs.isEmpty()) {
+          for (String err : errs) op.addError(err);
+          return false;
+        }
+      } catch (FileNotFoundException fnfe) {
+        op.addError(fnfe.getMessage());
+        return false;
+
+      } catch (SolrException e) {
+        log.error("Error loading package ", e);
+        op.addError(e.getMessage());
+        return false;
+      }
+
+      Map delta = new LinkedHashMap();
+      Utils.setObjectByPath(delta, pathToLib, packageInfo, true);
+      log.debug( "addUpdate package : {}", Utils.toJSONString( delta));
+      clusterProperties.setClusterProperties(delta);
+      return true;
+
+    }
+
+  }
+
+  @EndPoint(
+      spec = "cluster.packages.GET",
+      permission = PKG_READ
+  )
+  public class PkgRead {
+    @Command
+    public void list(CallInfo info) throws IOException {
+      ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
+      info.rsp.add(PACKAGES, clusterProperties.getClusterProperty(PACKAGES, MapWriter.EMPTY));
+    }
+  }
+
+  @EndPoint(spec = "cluster.filestore",
+      permission = FILESTORE_WRITE)
+  public class FileStoreWrite {
+    @Command
+    public void add(CallInfo info) {
+      Iterable<ContentStream> streams = info.req.getContentStreams();
+      if (streams == null) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "no payload");
+      String sha256 = null;
+      ContentStream stream = streams.iterator().next();
+      try {
+        String name = info.req.getParams().get(CommonParams.NAME);
+        if (name != null) validateName(name);
+        ByteBuffer buf = SimplePostTool.inputStreamToByteArray(stream.getStream());
+        sha256 = sha256Digest(buf);
+        String fileId = name == null ? sha256 : sha256 + "-" + name;
+        coreContainer.getFileStore().distributeFile(buf, fileId);
+        info.rsp.add(CommonParams.ID, fileId);
+      } catch (IOException e) {
+        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, e);
+      }
+    }
+
+  }
+
+  @EndPoint(spec = "cluster.nodes",
+      permission = COLL_READ_PERM)
+
+  public class ListNodes {
+    @Command
+    public void list(CallInfo info) {
+      info.rsp.add("nodes", coreContainer.getZkController().getClusterState().getLiveNodes());
+    }
+  }
+
+  @EndPoint(spec = "cluster.Commands",
+      permission = COLL_EDIT_PERM)
+  public class ClusterCommands {
+    @Command(name = "add-requesthandler")
+    public void addHandler(CallInfo info) throws Exception {
+      if (addRequestHandler(info)) syncClusterProps(info);
+    }
+
+    @Command(name = "delete-requesthandler")
+    public void delHandler(CallInfo info) throws Exception {
+      if (deleteReqHandler(info)) syncClusterProps(info);
+    }
+
+    @Command(name = "set-obj-property")
+    public void setObj(CallInfo info) {
+      ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
+      try {
+        clusterProperties.setClusterProperties(info.command.getDataMap());
+      } catch (Exception e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error in API", e);
+      }
+
+    }
+
+
+    boolean addRequestHandler(CallInfo info) throws Exception {
+      Map data = info.command.getDataMap();
+      String name = (String) data.get("name");
+      ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
+      Map<String, Object> map = clusterProperties.getClusterProperties();
+      if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) != null) {
+        info.command.addError("A requestHandler already exists with the said name");
+        return false;
+      }
+      Map m = new LinkedHashMap();
+      Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), data, true);
+      clusterProperties.setClusterProperties(m);
+      return true;
+    }
+
+    boolean deleteReqHandler(CallInfo info) throws Exception {
+      String name = info.command.getStr("");
+      ClusterProperties clusterProperties = new ClusterProperties(coreContainer.getZkController().getZkClient());
+      Map<String, Object> map = clusterProperties.getClusterProperties();
+      if (Utils.getObjectByPath(map, false, asList(SolrRequestHandler.TYPE, name)) == null) {
+        info.command.addError("NO such requestHandler with name :");
+        return false;
+      }
+      Map m = new LinkedHashMap();
+      Utils.setObjectByPath(m, asList(SolrRequestHandler.TYPE, name), null, true);
+      clusterProperties.setClusterProperties(m);
+      return true;
+    }
+
+  }
+
+  static class PerNodeCallable extends SolrConfigHandler.PerReplicaCallable {
+    private final HttpClient httpClient;
+    final String v2Url;
+
+    static final List<String> path = Arrays.asList("metadata", CommonParams.VERSION);
+
+    PerNodeCallable(HttpClient httpClient, String baseUrl, int expectedversion, int waitTime) {
+      super(baseUrl, ZNODEVER, expectedversion, waitTime);
+      this.httpClient = httpClient;
+      v2Url = baseUrl.replace("/solr", "/api") + "/node/ext?wt=javabin&omitHeader=true";
+    }
+
+    @Override
+    protected boolean verifyResponse(MapWriter mw, int attempts) {
+      remoteVersion = (Number) mw._get(path, -1);
+      if (remoteVersion.intValue() >= expectedZkVersion) return true;
+      log.info(formatString("Could not get expectedVersion {0} from {1} , remote val= {2}   after {3} attempts", expectedZkVersion, coreUrl, remoteVersion, attempts));
+
+      return false;
+    }
+
+
+    @Override
+    public Boolean call() throws Exception {
+      final RTimer timer = new RTimer();
+      int attempts = 0;
+
+      // eventually, this loop will get killed by the ExecutorService's timeout
+      while (true) {
+        try {
+          long timeElapsed = (long) timer.getTime() / 1000;
+          if (timeElapsed >= maxWait) {
+            return false;
+          }
+          log.debug("Time elapsed : {} secs, maxWait {}", timeElapsed, maxWait);
+          Thread.sleep(100);
+          MapWriter resp = (MapWriter) Utils.executeGET(httpClient, v2Url, Utils.JAVABINCONSUMER);
+          if (verifyResponse(resp, attempts)) {
+
+            break;
+          }
+          attempts++;
+        } catch (Exception e) {
+          if (e instanceof InterruptedException) {
+            break; // stop looping
+          } else {
+            log.warn("Failed to execute " + v2Url + " due to: " + e);
+          }
+        }
+      }
+      return true;
+    }
+
+  }
+
+  static final String INVALIDCHARS = " /\\#&*\n\t%@~`=+^$><?{}[]|:;!";
+
+  public static void validateName(String name) {
+    for (int i = 0; i < name.length(); i++) {
+      for (int j = 0; j < INVALIDCHARS.length(); j++) {
+        if (name.charAt(i) == INVALIDCHARS.charAt(j))
+          throw new IllegalArgumentException("Unsupported char in file name: " + name);
+      }
+    }
+  }
+
+
+}
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
index d7d179a..2fb3a9f 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionHandlerApi.java
@@ -18,39 +18,43 @@
 package org.apache.solr.handler.admin;
 
 import java.lang.invoke.MethodHandles;
-import java.util.Arrays;
 import java.util.Collection;
-import java.util.EnumMap;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.solr.api.Api;
 import org.apache.solr.client.solrj.request.CollectionApiMapping;
 import org.apache.solr.client.solrj.request.CollectionApiMapping.CommandMeta;
 import org.apache.solr.client.solrj.request.CollectionApiMapping.Meta;
 import org.apache.solr.client.solrj.request.CollectionApiMapping.V2EndPoint;
-import org.apache.solr.common.Callable;
-import org.apache.solr.common.SolrException;
-import org.apache.solr.common.cloud.ClusterProperties;
-import org.apache.solr.common.util.CommandOperation;
 import org.apache.solr.handler.admin.CollectionsHandler.CollectionOperation;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static java.util.Arrays.asList;
+
 public class CollectionHandlerApi extends BaseHandlerApiSupport {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   final CollectionsHandler handler;
-  static Collection<ApiCommand> apiCommands = createCollMapping();
+  final Collection<ApiCommand> apiCommands;
+  public CollectionHandlerApi(CollectionsHandler handler) {
+    this.handler = handler;
+    apiCommands = createApiMapping();
+  }
 
-  private static Collection<ApiCommand> createCollMapping() {
-    Map<Meta, ApiCommand> result = new EnumMap<>(Meta.class);
+  private Collection<ApiCommand> createApiMapping() {
+
+    //there
+    Map<CommandMeta, ApiCommand> apiMapping = new HashMap<>();
 
     for (Meta meta : Meta.values()) {
       for (CollectionOperation op : CollectionOperation.values()) {
         if (op.action == meta.action) {
-          result.put(meta, new ApiCommand() {
+          apiMapping.put(meta, new ApiCommand() {
             @Override
             public CommandMeta meta() {
               return meta;
@@ -64,57 +68,16 @@
         }
       }
     }
-    //The following APIs have only V2 implementations
-    addApi(result, Meta.GET_NODES, params -> params.rsp.add("nodes", ((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getClusterState().getLiveNodes()));
-    addApi(result, Meta.SET_CLUSTER_PROPERTY_OBJ, params -> {
-      List<CommandOperation> commands = params.req.getCommands(true);
-      if (commands == null || commands.isEmpty()) throw new RuntimeException("Empty commands");
-      ClusterProperties clusterProperties = new ClusterProperties(((CollectionHandlerApi) params.apiHandler).handler.coreContainer.getZkController().getZkClient());
 
-      try {
-        clusterProperties.setClusterProperties(commands.get(0).getDataMap());
-      } catch (Exception e) {
-        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error in API", e);
-      }
-    });
-
-    for (Meta meta : Meta.values()) {
-      if (result.get(meta) == null) {
-        log.error("ERROR_INIT. No corresponding API implementation for : " + meta.commandName);
-      }
-    }
-
-    return result.values();
+    return apiMapping.values();
   }
 
-  private static void addApi(Map<Meta, ApiCommand> result, Meta metaInfo, Callable<ApiParams> fun) {
-    result.put(metaInfo, new ApiCommand() {
-      @Override
-      public CommandMeta meta() {
-        return metaInfo;
-      }
 
-      @Override
-      public void invoke(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) throws Exception {
-        fun.call(new ApiParams(req, rsp, apiHandler));
-      }
-    });
-  }
 
-  static class ApiParams {
-    final SolrQueryRequest req;
-    final SolrQueryResponse rsp;
-    final BaseHandlerApiSupport apiHandler;
 
-    ApiParams(SolrQueryRequest req, SolrQueryResponse rsp, BaseHandlerApiSupport apiHandler) {
-      this.req = req;
-      this.rsp = rsp;
-      this.apiHandler = apiHandler;
-    }
-  }
-
-  public CollectionHandlerApi(CollectionsHandler handler) {
-    this.handler = handler;
+  @Override
+  protected List<V2EndPoint> getEndPoints() {
+    return asList(CollectionApiMapping.EndPoint.values());
   }
 
   @Override
@@ -123,8 +86,8 @@
   }
 
   @Override
-  protected List<V2EndPoint> getEndPoints() {
-    return Arrays.asList(CollectionApiMapping.EndPoint.values());
+  protected Collection<Api> getV2OnlyApis() {
+    return new ClusterAPI(handler.getCoreContainer()).getAllApis();
   }
 
 }
diff --git a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
index 5843a94..445c0c5 100644
--- a/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
+++ b/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
@@ -149,10 +149,10 @@
 import static org.apache.solr.common.params.CommonAdminParams.ASYNC;
 import static org.apache.solr.common.params.CommonAdminParams.IN_PLACE_MOVE;
 import static org.apache.solr.common.params.CommonAdminParams.NUM_SUB_SHARDS;
+import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX;
 import static org.apache.solr.common.params.CommonAdminParams.SPLIT_FUZZ;
 import static org.apache.solr.common.params.CommonAdminParams.SPLIT_METHOD;
 import static org.apache.solr.common.params.CommonAdminParams.WAIT_FOR_FINAL_STATE;
-import static org.apache.solr.common.params.CommonAdminParams.SPLIT_BY_PREFIX;
 import static org.apache.solr.common.params.CommonParams.NAME;
 import static org.apache.solr.common.params.CommonParams.TIMING;
 import static org.apache.solr.common.params.CommonParams.VALUE_LONG;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
index 4f4f232..203d8f7 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
@@ -79,8 +79,8 @@
 import org.apache.solr.common.util.NamedList;
 import org.apache.solr.common.util.SimpleOrderedMap;
 import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.core.XmlConfigFile;
 import org.apache.solr.core.SolrCore;
+import org.apache.solr.core.XmlConfigFile;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.transform.ElevatedMarkerFactory;
 import org.apache.solr.response.transform.ExcludedMarkerFactory;
diff --git a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
index 2d6fdb1..3ede10d 100644
--- a/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
+++ b/solr/core/src/java/org/apache/solr/handler/component/SuggestComponent.java
@@ -90,7 +90,7 @@
 
   protected SolrMetricManager metricManager;
   protected String registryName;
-  
+
   /**
    * Key is the dictionary name used in SolrConfig, value is the corresponding {@link SolrSuggester}
    */
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
index 7d2877d..f029e60 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricManager.java
@@ -690,13 +690,13 @@
     }
   }
 
-  /**
-   * This is a wrapper for {@link Gauge} metrics, which are usually implemented as
-   * lambdas that often keep a reference to their parent instance. In order to make sure that
-   * all such metrics are removed when their parent instance is removed / closed the
-   * metric is associated with an instance tag, which can be used then to remove
-   * wrappers with the matching tag using {@link #unregisterGauges(String, String)}.
-   */
+    /**
+     * This is a wrapper for {@link Gauge} metrics, which are usually implemented as
+     * lambdas that often keep a reference to their parent instance. In order to make sure that
+     * all such metrics are removed when their parent instance is removed / closed the
+     * metric is associated with an instance tag, which can be used then to remove
+     * wrappers with the matching tag using {@link #unregisterGauges(String, String)}.
+     */
   public static class GaugeWrapper<T> implements Gauge<T> {
     private final Gauge<T> gauge;
     private final String tag;
@@ -736,7 +736,7 @@
         removed.incrementAndGet();
         return true;
       } else {
-        return false;
+      return false;
       }
     });
     return removed.get();
@@ -774,6 +774,7 @@
       sb.append(name);
       return sb.toString();
     }
+
   }
 
   /**
diff --git a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
index d5c23b5..deb2b18 100644
--- a/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
+++ b/solr/core/src/java/org/apache/solr/metrics/SolrMetricProducer.java
@@ -23,13 +23,13 @@
 
   /**
    * Initializes metrics specific to this producer
-   * @param manager an instance of {@link SolrMetricManager}
+   * @param manager  an instance of {@link SolrMetricManager}
    * @param registry registry name where metrics are registered
-   * @param tag a symbolic tag that represents this instance of the producer,
-   * or a group of related instances that have the same life-cycle. This tag is
-   * used when managing life-cycle of some metrics and is set when
-   * {@link #initializeMetrics(SolrMetricManager, String, String, String)} is called.
-   * @param scope scope of the metrics (eg. handler name) to separate metrics of
+   * @param tag      a symbolic tag that represents this instance of the producer,
+   *                 or a group of related instances that have the same life-cycle. This tag is
+   *                 used when managing life-cycle of some metrics and is set when
+   *                 {@link #initializeMetrics(SolrMetricManager, String, String, String)} is called.
+   * @param scope    scope of the metrics (eg. handler name) to separate metrics of
    */
   void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope);
 }
diff --git a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
index 4abeedd..f437296 100644
--- a/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
+++ b/solr/core/src/java/org/apache/solr/rest/schema/FieldTypeXmlAdapter.java
@@ -16,12 +16,11 @@
  */
 package org.apache.solr.rest.schema;
 
-import java.util.List;
-import java.util.Map;
-
 import javax.xml.parsers.DocumentBuilder;
 import javax.xml.parsers.DocumentBuilderFactory;
 import javax.xml.parsers.ParserConfigurationException;
+import java.util.List;
+import java.util.Map;
 
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
diff --git a/solr/core/src/java/org/apache/solr/search/CacheConfig.java b/solr/core/src/java/org/apache/solr/search/CacheConfig.java
index 16a9d57..182033f 100644
--- a/solr/core/src/java/org/apache/solr/search/CacheConfig.java
+++ b/solr/core/src/java/org/apache/solr/search/CacheConfig.java
@@ -14,148 +14,148 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.solr.search;
 
 import javax.xml.xpath.XPathConstants;
-import java.lang.invoke.MethodHandles;
-import java.util.Collections;
+import java.io.IOException;
 import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.lucene.analysis.util.ResourceLoader;
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.util.StrUtils;
-import org.apache.solr.common.MapSerializable;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.ConfigOverlay;
+import org.apache.solr.core.PackageBag;
+import org.apache.solr.core.PluginInfo;
 import org.apache.solr.core.SolrConfig;
-import org.apache.solr.core.SolrResourceLoader;
+import org.apache.solr.core.SolrCore;
 import org.apache.solr.util.DOMUtil;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.w3c.dom.Node;
 import org.w3c.dom.NodeList;
 
 import static org.apache.solr.common.params.CommonParams.NAME;
 
-/**
- * Contains the knowledge of how cache config is
- * stored in the solrconfig.xml file, and implements a
- * factory to create caches.
- *
- *
- */
-public class CacheConfig implements MapSerializable{
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
-  private String nodeName;
+public class CacheConfig implements MapWriter {
+  final PluginInfo args;
+  private CacheRegenerator defRegen;
+  private final String name;
+  private String cacheImpl, regenImpl;
+  Object[] persistence = new Object[1];
 
-  private Class<? extends SolrCache> clazz;
-  private Map<String,String> args;
-  private CacheRegenerator regenerator;
 
-  private String cacheImpl;
-
-  private Object[] persistence = new Object[1];
-
-  private String regenImpl;
-
-  public CacheConfig() {}
-
-  public CacheConfig(Class<? extends SolrCache> clazz, Map<String,String> args, CacheRegenerator regenerator) {
-    this.clazz = clazz;
-    this.args = args;
-    this.regenerator = regenerator;
+  public CacheConfig(Map<String, String> args, String path) {
+    this.args = new PluginInfo(SolrCache.TYPE, (Map) copyValsAsString(args));
+    this.name = args.get(NAME);
+    this.cacheImpl = args.getOrDefault("class", "solr.LRUCache");
+    this.regenImpl = args.get("regenerator");
+    this.args.pathInConfig = StrUtils.splitSmart(path, '/', true);
   }
 
-  public CacheRegenerator getRegenerator() {
-    return regenerator;
+  static Map<String, String> copyValsAsString(Map m) {
+    Map<String, String> copy = new LinkedHashMap(m.size());
+    m.forEach((k, v) -> copy.put(String.valueOf(k), String.valueOf(v)));
+    return copy;
   }
 
-  public void setRegenerator(CacheRegenerator regenerator) {
-    this.regenerator = regenerator;
+  public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) {
+    Node node = solrConfig.getNode(xpath, false);
+    if (node == null || !"true".equals(DOMUtil.getAttrOrDefault(node, "enabled", "true"))) {
+      Map<String, String> m = solrConfig.getOverlay().getEditableSubProperties(xpath);
+      if (m == null) return null;
+      List<String> pieces = StrUtils.splitSmart(xpath, '/');
+      String name = pieces.get(pieces.size() - 1);
+      m = Utils.getDeepCopy(m, 2);
+      m.put(NAME, name);
+      return new CacheConfig(m, xpath);
+    } else {
+      Map<String, String> attrs = DOMUtil.toMap(node.getAttributes());
+      attrs.put(NAME, node.getNodeName());
+      return new CacheConfig(applyOverlay(xpath, solrConfig.getOverlay(), attrs), xpath);
+
+    }
+
+
   }
 
-  public static Map<String, CacheConfig> getMultipleConfigs(SolrConfig solrConfig, String configPath) {
+  private static Map applyOverlay(String xpath, ConfigOverlay overlay, Map args) {
+    Map<String, String> map = xpath == null ? null : overlay.getEditableSubProperties(xpath);
+    if (map != null) {
+      HashMap<String, String> mapCopy = new HashMap<>(args);
+      for (Map.Entry<String, String> e : map.entrySet()) {
+        mapCopy.put(e.getKey(), String.valueOf(e.getValue()));
+      }
+      return mapCopy;
+    }
+    return args;
+  }
+
+  public static Map<String, CacheConfig> getConfigs(SolrConfig solrConfig, String configPath) {
     NodeList nodes = (NodeList) solrConfig.evaluate(configPath, XPathConstants.NODESET);
     if (nodes == null || nodes.getLength() == 0) return new LinkedHashMap<>();
     Map<String, CacheConfig> result = new HashMap<>(nodes.getLength());
     for (int i = 0; i < nodes.getLength(); i++) {
-      CacheConfig config = getConfig(solrConfig, nodes.item(i).getNodeName(), DOMUtil.toMap(nodes.item(i).getAttributes()), configPath);
-      result.put(config.args.get(NAME), config);
+      Map<String, String> args = DOMUtil.toMap(nodes.item(i).getAttributes());
+      result.put(args.get(NAME), new CacheConfig(args, configPath+"/"+args.get(NAME)));
     }
     return result;
   }
 
-
-  public static CacheConfig getConfig(SolrConfig solrConfig, String xpath) {
-    Node node = solrConfig.getNode(xpath, false);
-    if(node == null || !"true".equals(DOMUtil.getAttrOrDefault(node, "enabled", "true"))) {
-      Map<String, String> m = solrConfig.getOverlay().getEditableSubProperties(xpath);
-      if(m==null) return null;
-      List<String> parts = StrUtils.splitSmart(xpath, '/');
-      return getConfig(solrConfig,parts.get(parts.size()-1) , Collections.EMPTY_MAP,xpath);
-    }
-    return getConfig(solrConfig, node.getNodeName(),DOMUtil.toMap(node.getAttributes()), xpath);
+  public String getName() {
+    return name;
   }
 
 
-  public static CacheConfig getConfig(SolrConfig solrConfig, String nodeName, Map<String,String> attrs, String xpath) {
-    CacheConfig config = new CacheConfig();
-    config.nodeName = nodeName;
-    Map attrsCopy = new LinkedHashMap<>(attrs.size());
-    for (Map.Entry<String, String> e : attrs.entrySet()) {
-      attrsCopy.put(e.getKey(), String.valueOf(e.getValue()));
-    }
-    attrs = attrsCopy;
-    config.args = attrs;
+  public <K, V> SolrCacheHolder<K, V> newInstance(SolrCore core) {
+    return new SolrCacheHolder(new CacheInfo(this, core));
+  }
 
-    Map<String, String> map = xpath == null ? null : solrConfig.getOverlay().getEditableSubProperties(xpath);
-    if(map != null){
-      HashMap<String, String> mapCopy = new HashMap<>(config.args);
-      for (Map.Entry<String, String> e : map.entrySet()) {
-        mapCopy.put(e.getKey(),String.valueOf(e.getValue()));
+  static class CacheInfo {
+    final CacheConfig cfg;
+    SolrCore core;
+    SolrCache cache = null;
+    String pkg;
+    PackageBag.PackageInfo packageInfo;
+    CacheRegenerator regen = null;
+
+
+    CacheInfo(CacheConfig cfg, SolrCore core) {
+      this.core = core;
+      this.cfg = cfg;
+      pkg = cfg.args.attributes.get(CommonParams.PACKAGE);
+      ResourceLoader loader = pkg == null ? core.getResourceLoader() :
+          core.getCoreContainer().getPackageBag().getResourceLoader(pkg);
+
+      try {
+        cache = loader.findClass(cfg.cacheImpl, SolrCache.class).getConstructor().newInstance();
+        regen = null;
+        if (cfg.regenImpl != null) {
+          regen = loader.findClass(cfg.regenImpl, CacheRegenerator.class).getConstructor().newInstance();
+        }
+      } catch (Exception e) {
+        throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Error loading cache " + cfg.jsonStr(), e);
       }
-      config.args = mapCopy;
-    }
-    String nameAttr = config.args.get(NAME);  // OPTIONAL
-    if (nameAttr==null) {
-      config.args.put(NAME, config.nodeName);
-    }
+      if (regen == null && cfg.defRegen != null) regen = cfg.defRegen;
+      cfg.persistence[0] = cache.init(cfg.args.attributes, cfg.persistence[0], regen);
+      if (pkg != null) {
+        packageInfo = core.getCoreContainer().getPackageBag().getPackageInfo(pkg);
+      }
 
-    SolrResourceLoader loader = solrConfig.getResourceLoader();
-    config.cacheImpl = config.args.get("class");
-    if(config.cacheImpl == null) config.cacheImpl = "solr.LRUCache";
-    config.regenImpl = config.args.get("regenerator");
-    config.clazz = loader.findClass(config.cacheImpl, SolrCache.class);
-    if (config.regenImpl != null) {
-      config.regenerator = loader.newInstance(config.regenImpl, CacheRegenerator.class);
     }
-    
-    return config;
   }
 
-  public SolrCache newInstance() {
-    try {
-      SolrCache cache = clazz.getConstructor().newInstance();
-      persistence[0] = cache.init(args, persistence[0], regenerator);
-      return cache;
-    } catch (Exception e) {
-      SolrException.log(log,"Error instantiating cache",e);
-      // we can carry on without a cache... but should we?
-      // in some cases (like an OOM) we probably should try to continue.
-      return null;
-    }
+
+  public void setDefaultRegenerator(CacheRegenerator regen) {
+    this.defRegen = regen;
   }
 
   @Override
-  public Map<String, Object> toMap(Map<String, Object> map) {
-    Map result = Collections.unmodifiableMap(args);
-    return result;
+  public void writeMap(EntryWriter ew) throws IOException {
+    args.attributes.forEach(ew.getBiConsumer());
   }
-
-  public String getNodeName() {
-    return nodeName;
-  }
-
-
 }
diff --git a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
index 7d648fa..1cec0aa 100644
--- a/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/FastLRUCache.java
@@ -16,6 +16,14 @@
  */
 package org.apache.solr.search;
 
+import java.lang.invoke.MethodHandles;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.TimeUnit;
+
 import com.codahale.metrics.MetricRegistry;
 import org.apache.lucene.util.Accountable;
 import org.apache.lucene.util.RamUsageEstimator;
@@ -26,14 +34,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.lang.invoke.MethodHandles;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.concurrent.CopyOnWriteArrayList;
-import java.util.concurrent.TimeUnit;
-
 /**
  * SolrCache based on ConcurrentLRUCache implementation.
  * <p>
@@ -47,7 +47,7 @@
  * @see org.apache.solr.search.SolrCache
  * @since solr 1.4
  */
-public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K,V>, Accountable {
+public class FastLRUCache<K, V> extends SolrCacheBase implements SolrCache<K, V>, Accountable {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
   private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(FastLRUCache.class);
@@ -64,7 +64,7 @@
   private long warmupTime = 0;
 
   private String description = "Concurrent LRU Cache";
-  private ConcurrentLRUCache<K,V> cache;
+  private ConcurrentLRUCache<K, V> cache;
   private int showItems = 0;
 
   private long maxRamBytes;
@@ -105,7 +105,7 @@
     str = (String) args.get(INITIAL_SIZE_PARAM);
     initialSize = str == null ? maxSize : Integer.parseInt(str);
     str = (String) args.get(CLEANUP_THREAD_PARAM);
-    cleanupThread = str == null ? false : Boolean.parseBoolean(str);
+    cleanupThread = str != null && Boolean.parseBoolean(str);
 
     str = (String) args.get(SHOW_ITEMS_PARAM);
     showItems = str == null ? 0 : Integer.parseInt(str);
@@ -120,7 +120,7 @@
     str = (String) args.get(MAX_RAM_MB_PARAM);
     long maxRamMB = str == null ? -1 : (long) Double.parseDouble(str);
     this.maxRamBytes = maxRamMB < 0 ? Long.MAX_VALUE : maxRamMB * 1024L * 1024L;
-    if (maxRamBytes != Long.MAX_VALUE)  {
+    if (maxRamBytes != Long.MAX_VALUE) {
       ramLowerWatermark = Math.round(maxRamBytes * 0.8);
       description = generateDescription(maxRamBytes, ramLowerWatermark, cleanupThread);
       cache = new ConcurrentLRUCache<>(ramLowerWatermark, maxRamBytes, cleanupThread, null, maxIdleTimeSec);
@@ -159,7 +159,7 @@
    */
   protected String generateDescription(int limit, int initialSize, int minLimit, int acceptableLimit, boolean newThread) {
     String description = "Concurrent LRU Cache(maxSize=" + limit + ", initialSize=" + initialSize +
-        ", minSize="+minLimit + ", acceptableSize="+acceptableLimit+", cleanupThread="+newThread;
+        ", minSize=" + minLimit + ", acceptableSize=" + acceptableLimit + ", cleanupThread=" + newThread;
     if (isAutowarmingOn()) {
       description += ", " + getAutowarmDescription();
     }
@@ -220,10 +220,9 @@
       for (int i = itemsArr.length - 1; i >= 0; i--) {
         try {
           boolean continueRegen = regenerator.regenerateItem(searcher,
-                  this, old, itemsArr[i].getKey(), itemsArr[i].getValue());
+              this, old, itemsArr[i].getKey(), itemsArr[i].getValue());
           if (!continueRegen) break;
-        }
-        catch (Exception e) {
+        } catch (Exception e) {
           SolrException.log(log, "Error during auto-warming of key:" + itemsArr[i].getKey(), e);
         }
       }
@@ -303,14 +302,14 @@
         map.put("cumulative_idleEvictions", cIdleEvictions);
 
         if (detailed && showItems != 0) {
-          Map items = cache.getLatestAccessedItems( showItems == -1 ? Integer.MAX_VALUE : showItems );
-          for (Map.Entry e : (Set <Map.Entry>)items.entrySet()) {
+          Map items = cache.getLatestAccessedItems(showItems == -1 ? Integer.MAX_VALUE : showItems);
+          for (Map.Entry e : (Set<Map.Entry>) items.entrySet()) {
             Object k = e.getKey();
             Object v = e.getValue();
 
             String ks = "item_" + k;
             String vs = v.toString();
-            map.put(ks,vs);
+            map.put(ks, vs);
           }
 
         }
diff --git a/solr/core/src/java/org/apache/solr/search/LFUCache.java b/solr/core/src/java/org/apache/solr/search/LFUCache.java
index 20cf664..b9a4820 100644
--- a/solr/core/src/java/org/apache/solr/search/LFUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LFUCache.java
@@ -17,10 +17,10 @@
 package org.apache.solr.search;
 
 import java.lang.invoke.MethodHandles;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.CopyOnWriteArrayList;
 import java.util.concurrent.TimeUnit;
 
@@ -116,14 +116,14 @@
     str = (String) args.get(AUTOWARM_COUNT_PARAM);
     autowarmCount = str == null ? 0 : Integer.parseInt(str);
     str = (String) args.get(CLEANUP_THREAD_PARAM);
-    cleanupThread = str == null ? false : Boolean.parseBoolean(str);
+    cleanupThread = str != null && Boolean.parseBoolean(str);
 
     str = (String) args.get(SHOW_ITEMS_PARAM);
     showItems = str == null ? 0 : Integer.parseInt(str);
 
     // Don't make this "efficient" by removing the test, default is true and omitting the param will make it false.
     str = (String) args.get(TIME_DECAY_PARAM);
-    timeDecay = (str == null) ? true : Boolean.parseBoolean(str);
+    timeDecay = (str == null) || Boolean.parseBoolean(str);
 
     str = (String) args.get(MAX_IDLE_TIME_PARAM);
     if (str == null) {
diff --git a/solr/core/src/java/org/apache/solr/search/LRUCache.java b/solr/core/src/java/org/apache/solr/search/LRUCache.java
index c733c07..bcb56cf 100644
--- a/solr/core/src/java/org/apache/solr/search/LRUCache.java
+++ b/solr/core/src/java/org/apache/solr/search/LRUCache.java
@@ -18,11 +18,11 @@
 
 import java.lang.invoke.MethodHandles;
 import java.util.Collection;
-import java.util.concurrent.ConcurrentHashMap;
 import java.util.Iterator;
 import java.util.LinkedHashMap;
 import java.util.Map;
 import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
 import java.util.concurrent.TimeUnit;
 import java.util.concurrent.atomic.LongAdder;
 
@@ -234,8 +234,8 @@
   }
 
   /**
-   * 
-   * @return Returns the description of this cache. 
+   *
+   * @return Returns the description of this cache.
    */
   private String generateDescription() {
     String description = "LRU Cache(maxSize=" + getMaxSize() + ", initialSize=" + initialSize;
@@ -341,9 +341,9 @@
 
       // Don't do the autowarming in the synchronized block, just pull out the keys and values.
       synchronized (other.map) {
-        
+
         int sz = autowarm.getWarmCount(other.map.size());
-        
+
         keys = new Object[sz];
         vals = new Object[sz];
 
@@ -383,7 +383,6 @@
 
   }
 
-
   //////////////////////// SolrInfoMBeans methods //////////////////////
 
 
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCache.java b/solr/core/src/java/org/apache/solr/search/SolrCache.java
index 5988ad5..9fe186a 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCache.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCache.java
@@ -16,16 +16,17 @@
  */
 package org.apache.solr.search;
 
+import java.util.Map;
+
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricProducer;
 
-import java.util.Map;
-
 
 /**
  * Primary API for dealing with Solr's internal caches.
  */
 public interface SolrCache<K,V> extends SolrInfoBean, SolrMetricProducer {
+  String TYPE = "cache";
 
   String HIT_RATIO_PARAM = "hitratio";
   String HITS_PARAM = "hits";
@@ -60,7 +61,7 @@
    * regenerate an item in the new cache from an entry in the old cache.
    *
    */
-  public Object init(Map args, Object persistence, CacheRegenerator regenerator);
+  Object init(Map args, Object persistence, CacheRegenerator regenerator);
   // I don't think we need a factory for faster creation given that these
   // will be associated with slow-to-create SolrIndexSearchers.
   // change to NamedList when other plugins do?
@@ -76,29 +77,29 @@
    *
    * :TODO: verify this.
    */
-  public String name();
+  String name();
 
 
   // Should SolrCache just extend the java.util.Map interface?
   // Following the conventions of the java.util.Map interface in any case.
 
   /** :TODO: copy from Map */
-  public int size();
+  int size();
 
   /** :TODO: copy from Map */
-  public V put(K key, V value);
+  V put(K key, V value);
 
   /** :TODO: copy from Map */
-  public V get(K key);
+  V get(K key);
 
   /** :TODO: copy from Map */
-  public void clear();
+  void clear();
 
   /** 
    * Enumeration of possible States for cache instances.
    * :TODO: only state that seems to ever be set is LIVE ?
   */
-  public enum State { 
+  enum State {
     /** :TODO */
     CREATED, 
     /** :TODO */
@@ -115,14 +116,14 @@
    * The cache user (SolrIndexSearcher) will take care of switching
    * cache states.
    */
-  public void setState(State state);
+  void setState(State state);
 
   /**
    * Returns the last State set on this instance
    *
    * @see #setState
    */
-  public State getState();
+  State getState();
 
   /**
    * Warm this cache associated with <code>searcher</code> using the <code>old</code>
@@ -134,7 +135,7 @@
 
 
   /** Frees any non-memory resources */
-  public void close();
+  void close();
 
   /** Returns maximum size limit (number of items) if set and supported, -1 otherwise. */
   int getMaxSize();
diff --git a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
index 66b8ab1..10bf769 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrCacheHolder.java
@@ -22,8 +22,11 @@
 import java.util.Set;
 
 import com.codahale.metrics.MetricRegistry;
-import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.PackageBag.PackageInfo;
+import org.apache.solr.core.PackageListeners;
+import org.apache.solr.core.PluginInfo;
 import org.apache.solr.metrics.SolrMetricManager;
+import org.apache.solr.metrics.SolrMetricProducer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -31,12 +34,53 @@
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
 
 
-  private final CacheConfig factory;
+  private CacheConfig.CacheInfo info;
   protected volatile SolrCache<K, V> delegate;
 
-  public SolrCacheHolder(SolrCache<K, V> delegate, CacheConfig factory) {
-    this.delegate = delegate;
-    this.factory = factory;
+
+
+  public SolrCacheHolder(CacheConfig.CacheInfo cacheInfo) {
+    this.info = cacheInfo;
+    this.delegate = cacheInfo.cache;
+
+    if (info.pkg != null) {
+      info.core.getListenerRegistry().addListener(new PackageListeners.Listener() {
+        @Override
+        public String packageName() {
+          return info.pkg;
+        }
+
+        @Override
+        public PluginInfo pluginInfo() {
+          return info.cfg.args;
+        }
+
+        @Override
+        public PackageInfo packageInfo() {
+          return info.packageInfo;
+        }
+
+        @Override
+        public void changed(PackageInfo pkgInfo) {
+          reloadCache(pkgInfo);
+        }
+      });
+    }
+  }
+
+  private void reloadCache(PackageInfo packageInfo) {
+    int znodeVersion = info.packageInfo == null ? -1 : info.packageInfo.znodeVersion;
+    if (packageInfo.znodeVersion > znodeVersion) {
+      log.info("Cache {} being reloaded, package: {} version: {} ", delegate.getClass().getSimpleName(), packageInfo.name, packageInfo.version);
+      info = new CacheConfig.CacheInfo(info.cfg, info.core);
+      info.packageInfo = packageInfo;
+      delegate.close();
+      delegate = info.cache;
+      if(metricsInfo != null){
+        metricsInfo.init(delegate);
+
+      }
+    }
   }
 
   public int size() {
@@ -141,12 +185,31 @@
     return delegate.getCategory();
   }
 
-  @Override
-  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
-    log.debug("Going to register cachemetrics " + Utils.toJSONString(factory));
 
-    delegate.initializeMetrics(manager, registry, tag,scope);
+  private MetricsInfo metricsInfo;
 
+  public static class MetricsInfo {
+    final SolrMetricManager manager;
+    final String registry;
+    final String tag;
+    final String scope;
+
+    MetricsInfo(SolrMetricManager manager, String registry, String tag, String scope) {
+      this.manager = manager;
+      this.registry = registry;
+      this.tag = tag;
+      this.scope = scope;
+    }
+
+    public void init(SolrMetricProducer metricProducer) {
+      metricProducer.initializeMetrics(manager,registry,tag,scope);
+    }
   }
 
+  @Override
+  public void initializeMetrics(SolrMetricManager manager, String registry, String tag, String scope) {
+    this.metricsInfo = new MetricsInfo(manager, registry, tag, scope);
+    delegate.initializeMetrics(manager, registry, tag, scope);
+
+  }
 }
diff --git a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java
index 313d91b..f7bc263 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrDocumentFetcher.java
@@ -111,7 +111,8 @@
     this.searcher = searcher;
     this.enableLazyFieldLoading = solrConfig.enableLazyFieldLoading;
     if (cachingEnabled) {
-      documentCache = solrConfig.documentCacheConfig == null ? null : solrConfig.documentCacheConfig.newInstance();
+      documentCache = solrConfig.documentCacheConfig == null ? null :
+          solrConfig.documentCacheConfig.newInstance(searcher.getCore());
     } else {
       documentCache = null;
     }
diff --git a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
index 9b78431..192adb1 100644
--- a/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
+++ b/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
@@ -267,12 +267,12 @@
     if (cachingEnabled) {
       final ArrayList<SolrCache> clist = new ArrayList<>();
       fieldValueCache = solrConfig.fieldValueCacheConfig == null ? null
-          : solrConfig.fieldValueCacheConfig.newInstance();
-      if (fieldValueCache != null) clist.add(fieldValueCache);
-      filterCache = solrConfig.filterCacheConfig == null ? null : solrConfig.filterCacheConfig.newInstance();
+          : solrConfig.fieldValueCacheConfig.newInstance(core);
+      if (fieldValueCache != null) clist.add( fieldValueCache);
+      filterCache = solrConfig.filterCacheConfig == null ? null : solrConfig.filterCacheConfig.newInstance(core);
       if (filterCache != null) clist.add(filterCache);
       queryResultCache = solrConfig.queryResultCacheConfig == null ? null
-          : solrConfig.queryResultCacheConfig.newInstance();
+          : solrConfig.queryResultCacheConfig.newInstance(core);
       if (queryResultCache != null) clist.add(queryResultCache);
       SolrCache<Integer, Document> documentCache = docFetcher.getDocumentCache();
       if (documentCache != null) clist.add(documentCache);
@@ -281,8 +281,8 @@
         cacheMap = NO_GENERIC_CACHES;
       } else {
         cacheMap = new HashMap<>(solrConfig.userCacheConfigs.size());
-        for (Map.Entry<String,CacheConfig> e : solrConfig.userCacheConfigs.entrySet()) {
-          SolrCache cache = e.getValue().newInstance();
+        for (Map.Entry<String, CacheConfig> e : solrConfig.userCacheConfigs.entrySet()) {
+          SolrCache cache = e.getValue().newInstance(core);
           if (cache != null) {
             cacheMap.put(cache.name(), cache);
             clist.add(cache);
@@ -502,8 +502,8 @@
   // Set default regenerators on filter and query caches if they don't have any
   //
   public static void initRegenerators(SolrConfig solrConfig) {
-    if (solrConfig.fieldValueCacheConfig != null && solrConfig.fieldValueCacheConfig.getRegenerator() == null) {
-      solrConfig.fieldValueCacheConfig.setRegenerator(new CacheRegenerator() {
+    if (solrConfig.fieldValueCacheConfig != null) {
+      solrConfig.fieldValueCacheConfig.setDefaultRegenerator(new CacheRegenerator() {
         @Override
         public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache,
             Object oldKey, Object oldVal) throws IOException {
@@ -515,8 +515,8 @@
       });
     }
 
-    if (solrConfig.filterCacheConfig != null && solrConfig.filterCacheConfig.getRegenerator() == null) {
-      solrConfig.filterCacheConfig.setRegenerator(new CacheRegenerator() {
+    if (solrConfig.filterCacheConfig != null ) {
+      solrConfig.filterCacheConfig.setDefaultRegenerator(new CacheRegenerator() {
         @Override
         public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache,
             Object oldKey, Object oldVal) throws IOException {
@@ -526,9 +526,9 @@
       });
     }
 
-    if (solrConfig.queryResultCacheConfig != null && solrConfig.queryResultCacheConfig.getRegenerator() == null) {
+    if (solrConfig.queryResultCacheConfig != null) {
       final int queryResultWindowSize = solrConfig.queryResultWindowSize;
-      solrConfig.queryResultCacheConfig.setRegenerator(new CacheRegenerator() {
+      solrConfig.queryResultCacheConfig.setDefaultRegenerator(new CacheRegenerator() {
         @Override
         public boolean regenerateItem(SolrIndexSearcher newSearcher, SolrCache newCache, SolrCache oldCache,
             Object oldKey, Object oldVal) throws IOException {
@@ -623,7 +623,7 @@
 
   /** expert: internal API, subject to change */
   public SolrCache<String,UnInvertedField> getFieldValueCache() {
-    return fieldValueCache;
+    return fieldValueCache ;
   }
 
   /** Returns a weighted sort according to this searcher */
@@ -2468,7 +2468,7 @@
 
     @Override
     public int hashCode() {
-      return classHash() 
+      return classHash()
           + 31 * Objects.hashCode(topFilter)
           + 31 * Objects.hashCode(weights);
     }
diff --git a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
index 5fd18a1..31f5a74 100644
--- a/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
+++ b/solr/core/src/java/org/apache/solr/security/AuthenticationPlugin.java
@@ -29,12 +29,11 @@
 import com.codahale.metrics.Meter;
 import com.codahale.metrics.MetricRegistry;
 import com.codahale.metrics.Timer;
+import org.apache.http.HttpRequest;
+import org.apache.http.protocol.HttpContext;
 import org.apache.solr.core.SolrInfoBean;
 import org.apache.solr.metrics.SolrMetricManager;
 import org.apache.solr.metrics.SolrMetricProducer;
-
-import org.apache.http.HttpRequest;
-import org.apache.http.protocol.HttpContext;
 import org.eclipse.jetty.client.api.Request;
 
 /**
@@ -66,7 +65,7 @@
    * @param pluginConfig Config parameters, possibly from a ZK source
    */
   public abstract void init(Map<String, Object> pluginConfig);
- 
+
   /**
    * This method attempts to authenticate the request. Upon a successful authentication, this
    * must call the next filter in the filter chain and set the user principal of the request,
@@ -107,10 +106,10 @@
    * delegate to {@link PKIAuthenticationPlugin}. Return true to indicate that your plugin
    * did handle the request, or false to signal that PKI plugin should handle it. This method
    * will be called by {@link PKIAuthenticationPlugin}'s interceptor.
-   * 
+   *
    * <p>
    *   If not overridden, this method will return true for plugins implementing {@link HttpClientBuilderPlugin}.
-   *   This method can be overridden by subclasses e.g. to set HTTP headers, even if you don't use a clientBuilder. 
+   *   This method can be overridden by subclasses e.g. to set HTTP headers, even if you don't use a clientBuilder.
    * </p>
    * @param httpRequest the httpRequest that is about to be sent to another internal Solr node
    * @param httpContext the context of that request.
@@ -137,7 +136,7 @@
   protected boolean interceptInternodeRequest(Request request) {
     return this instanceof HttpClientBuilderPlugin;
   }
-  
+
   /**
    * Cleanup any per request  data
    */
@@ -161,7 +160,7 @@
     metricNames.addAll(Arrays.asList("errors", "requests", "authenticated", "passThrough",
         "failWrongCredentials", "failMissingCredentials", "requestTimes", "totalTime"));
   }
-  
+
   @Override
   public String getName() {
     return this.getClass().getName();
@@ -186,5 +185,5 @@
   public MetricRegistry getMetricRegistry() {
     return registry;
   }
-  
+
 }
diff --git a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
index 79b4d29..aae627a 100644
--- a/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
+++ b/solr/core/src/java/org/apache/solr/security/PermissionNameProvider.java
@@ -39,6 +39,7 @@
     CORE_READ_PERM("core-admin-read", null),
     CORE_EDIT_PERM("core-admin-edit", null),
     READ_PERM("read", "*"),
+    CUSTOM_PERM("custom-op", null),//custom operation , user-defined
     UPDATE_PERM("update", "*"),
     CONFIG_EDIT_PERM("config-edit", unmodifiableSet(new HashSet<>(asList("*", null)))),
     CONFIG_READ_PERM("config-read", "*"),
@@ -51,6 +52,10 @@
     AUTOSCALING_WRITE_PERM("autoscaling-write", null),
     AUTOSCALING_HISTORY_READ_PERM("autoscaling-history-read", null),
     METRICS_HISTORY_READ_PERM("metrics-history-read", null),
+    FILESTORE_READ("filestore-read", null),
+    FILESTORE_WRITE("filestore-write", null),
+    PKG_EDIT("package-write", null),
+    PKG_READ("package-read", null),
     ALL("all", unmodifiableSet(new HashSet<>(asList("*", null))))
     ;
     final String name;
diff --git a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
index 90d6b17..86a1d85 100644
--- a/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
+++ b/solr/core/src/java/org/apache/solr/servlet/SolrDispatchFilter.java
@@ -82,9 +82,9 @@
 import org.apache.solr.security.PKIAuthenticationPlugin;
 import org.apache.solr.security.PublicKeyHandler;
 import org.apache.solr.util.SolrFileCleaningTracker;
-import org.apache.solr.util.tracing.GlobalTracer;
 import org.apache.solr.util.StartupLoggingUtils;
 import org.apache.solr.util.configuration.SSLConfigurationsFactory;
+import org.apache.solr.util.tracing.GlobalTracer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -180,6 +180,7 @@
       final Path solrHomePath = solrHome == null ? SolrResourceLoader.locateSolrHome() : Paths.get(solrHome);
       coresInit = createCoreContainer(solrHomePath, extraProperties);
       SolrResourceLoader.ensureUserFilesDataDir(solrHomePath);
+      SolrResourceLoader.ensureFileStoreDir(solrHomePath);
       this.httpClient = coresInit.getUpdateShardHandler().getDefaultHttpClient();
       setupJvmMetrics(coresInit);
       log.debug("user.dir=" + System.getProperty("user.dir"));
@@ -626,8 +627,8 @@
             public void close() {
               // even though we skip closes, we let local tests know not to close so that a full understanding can take
               // place
-              assert Thread.currentThread().getStackTrace()[2].getClassName().matches(
-                  "org\\.apache\\.(?:solr|lucene).*") ? false : true : CLOSE_STREAM_MSG;
+              assert !Thread.currentThread().getStackTrace()[2].getClassName().matches(
+                  "org\\.apache\\.(?:solr|lucene).*") : CLOSE_STREAM_MSG;
               this.stream = ClosedServletInputStream.CLOSED_SERVLET_INPUT_STREAM;
             }
           };
@@ -661,9 +662,8 @@
             public void close() {
               // even though we skip closes, we let local tests know not to close so that a full understanding can take
               // place
-              assert Thread.currentThread().getStackTrace()[2].getClassName().matches(
-                  "org\\.apache\\.(?:solr|lucene).*") ? false
-                      : true : CLOSE_STREAM_MSG;
+              assert !Thread.currentThread().getStackTrace()[2].getClassName().matches(
+                  "org\\.apache\\.(?:solr|lucene).*") : CLOSE_STREAM_MSG;
               stream = ClosedServletOutputStream.CLOSED_SERVLET_OUTPUT_STREAM;
             }
           };
diff --git a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
index eb3c08b..c45f0c6 100644
--- a/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
+++ b/solr/core/src/java/org/apache/solr/update/processor/UpdateRequestProcessorChain.java
@@ -328,7 +328,7 @@
   public static class LazyUpdateProcessorFactoryHolder extends PluginBag.PluginHolder<UpdateRequestProcessorFactory> {
     private volatile UpdateRequestProcessorFactory lazyFactory;
 
-    public LazyUpdateProcessorFactoryHolder(final PluginBag.LazyPluginHolder holder) {
+    public LazyUpdateProcessorFactoryHolder(final PluginBag.PluginHolder<UpdateRequestProcessorFactory> holder) {
       super(holder.getPluginInfo());
       lazyFactory = new LazyUpdateRequestProcessorFactory(holder);
     }
@@ -340,26 +340,20 @@
     }
 
     public class LazyUpdateRequestProcessorFactory extends UpdateRequestProcessorFactory {
-      private final PluginBag.LazyPluginHolder holder;
-      UpdateRequestProcessorFactory delegate;
+      private final PluginBag.PluginHolder<UpdateRequestProcessorFactory> holder;
 
-      public LazyUpdateRequestProcessorFactory(PluginBag.LazyPluginHolder holder) {
+      public LazyUpdateRequestProcessorFactory(PluginBag.PluginHolder holder) {
         this.holder = holder;
       }
 
       public UpdateRequestProcessorFactory getDelegate() {
-        return delegate;
+        return holder.get();
       }
 
       @Override
       public UpdateRequestProcessor getInstance(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor next) {
-        if (delegate != null) return delegate.getInstance(req, rsp, next);
+        return holder.get().getInstance(req, rsp, next);
 
-        synchronized (this) {
-          if (delegate == null)
-            delegate = (UpdateRequestProcessorFactory) holder.get();
-        }
-        return delegate.getInstance(req, rsp, next);
       }
     }
   }
diff --git a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
index 0835b59..4574aa2 100644
--- a/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
+++ b/solr/core/src/java/org/apache/solr/util/CryptoKeys.java
@@ -21,7 +21,8 @@
 import javax.crypto.IllegalBlockSizeException;
 import javax.crypto.spec.IvParameterSpec;
 import javax.crypto.spec.SecretKeySpec;
-
+import java.io.IOException;
+import java.io.InputStream;
 import java.lang.invoke.MethodHandles;
 import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
@@ -42,6 +43,7 @@
 import java.util.HashMap;
 import java.util.Map;
 
+import com.google.common.collect.ImmutableMap;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.util.Base64;
 import org.slf4j.Logger;
@@ -52,7 +54,7 @@
  */
 public final class CryptoKeys implements CLIO {
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  private final Map<String, PublicKey> keys;
+  public final Map<String, PublicKey> keys;
   private Exception exception;
 
   public CryptoKeys(Map<String, byte[]> trustedKeys) throws Exception {
@@ -61,7 +63,7 @@
       m.put(e.getKey(), getX509PublicKey(e.getValue()));
 
     }
-    this.keys = m;
+    this.keys = ImmutableMap.copyOf(m);
   }
 
   /**
@@ -73,11 +75,11 @@
       boolean verified;
       try {
         verified = CryptoKeys.verify(entry.getValue(), Base64.base64ToByteArray(sig), data);
-        log.info("verified {} ", verified);
+        log.debug("verified {} ", verified);
         if (verified) return entry.getKey();
       } catch (Exception e) {
         exception = e;
-        log.info("NOT verified  ");
+        log.debug("NOT verified  ");
       }
 
     }
@@ -104,24 +106,43 @@
    * @param data      The data tha is signed
    */
   public static boolean verify(PublicKey publicKey, byte[] sig, ByteBuffer data) throws InvalidKeyException, SignatureException {
-    int oldPos = data.position();
-    Signature signature = null;
+    data = ByteBuffer.wrap(data.array(), data.arrayOffset(), data.limit());
     try {
-      signature = Signature.getInstance("SHA1withRSA");
+      Signature signature = Signature.getInstance("SHA1withRSA");
       signature.initVerify(publicKey);
       signature.update(data);
-      boolean verify = signature.verify(sig);
-      return verify;
+      return signature.verify(sig);
+    } catch (NoSuchAlgorithmException e) {
+      //wil not happen
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
+    }
 
+  }
+
+  public static boolean verify(PublicKey publicKey, byte[] sig, InputStream is)
+      throws InvalidKeyException, SignatureException, IOException {
+    try {
+      Signature signature = Signature.getInstance("SHA1withRSA");
+      signature.initVerify(publicKey);
+      byte[] buf = new byte[1024];
+      while (true) {
+        int sz = is.read(buf);
+        if (sz == -1) break;
+        signature.update(buf, 0, sz);
+      }
+      try {
+        return signature.verify(sig);
+      } catch (SignatureException e) {
+        return false;
+      }
     } catch (NoSuchAlgorithmException e) {
       //will not happen
-    } finally {
-      //Signature.update resets the position. set it back to old
-      data.position(oldPos);
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
     }
-    return false;
+
   }
 
+
   private static byte[][] evpBytesTokey(int key_len, int iv_len, MessageDigest md,
                                         byte[] salt, byte[] data, int count) {
     byte[][] both = new byte[2][];
diff --git a/solr/core/src/test-files/cryptokeys/priv_key2048.pem b/solr/core/src/test-files/cryptokeys/priv_key2048.pem
new file mode 100644
index 0000000..4d2c8c2
--- /dev/null
+++ b/solr/core/src/test-files/cryptokeys/priv_key2048.pem
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpQIBAAKCAQEA1fSq/8iz1sIppHhSKrC0g2uzfFKZzWZAbcvVQbyS/pwxC7VB
+hR93DVINyGGT3XHnpemt/h0wrifCIEMyqSLTIhiu5bRJpfE7UO9vGgTcP5+i2wTe
+cKHqrxDvbQ4D7co96Gvu2cShySbOHsFjZXL4eaqU2W2x8S7U+OjRBwtwMxB4vstX
+5u75WtwVXwNRj+uXIfTTisplE/nA/slqByW4Q9QAg+du+Ejh4W7nF+Z9GRMR7MZe
+c1TeGOYZd8YzYku7WyUZ1SRQ6JjaZrdphlLtysMgqP0MMajEoFs/ajeNHef0iCz0
+TnB05PQd+GPO5+JrLPZ399mucl/jM+cbixn9pwIDAQABAoIBAQCpfA51XryvU9F+
++t1D+gSU0p00z44MeoJxN3WwhDwBOyNS/ftwA/Pf9m76m+lxEwkIkesFkIP+gXoy
+6mhYOUD9EoaBaeapcEWnoHPnLZB3SfLNArdei7UHhyfSwLZ2CK4vzkg/85ahbH79
+N/6P35pbbrhI4K+DubB1mJ/0r6fqmh6/04L47/liAPvsSM9ZJIMwbuZbYY21ggI9
+ZGk+kO0C/CyzxplaVLJ8P86KnRloEfjSmMhP72z7bja/BE2NX42G12YbjY7tVMn7
+duTWU2F4JWYriWAHr+4GwODDdtvn/R5jPirDIJeHCd6Bg1t7KibHRTcgYgtwDBqG
+F65g4zqRAoGBAP2fry+6uXe3rAJDJrCSKPQVTv5QhOvG1466xsOaWRSe/rx1Mvnd
+Z4pe+T8bdvsvqFnNMAkZKzzPjJ+oCSVKHhcerzMm2Cw6Gpv2yywA/1VykIoZmdNM
+/vHjC7w35q7xwEUHxB/rt2vvijrAYnhaq86uIXzoiqTGaKJ/z34QsCppAoGBANf1
+1wsISnZPjIipMIYtC7Co3GCUhsQ+ksVBhtsOHaKfon3Q69Qbz93l7dbCwgFbL6td
+HW/ppnABZLVFHnoLJ5YrriVZ1Wizx90+RFGdNj74UTV8bfqr/C32UKTjqoYjPAZO
+vEOzHkmpc9I1mrxm1Mcff5EHDFmXGXoZ2GLCpEWPAoGAOXroVFPoVtacuEKJ0Ti+
+6Vqu9XpANcNx9RollA02JTNHnmSdcf2YysZtjLznwVPyvq9/NICsyPJs93443Geo
+3CqLIHesRJHCmBhdwZJUTART98iHkVkA6sc/UKAGux11Ku/wph9hCahXVqtlZct+
+5q+WTV3SljeVXUbEOtkDZAkCgYEArnd0R/xls5jmbs1IX01q4Ug56Wh0S3xFtEgQ
+u013EZcnfb9Xld72Gk0TzOlANDpHk4hBLNU02c22X188lNoIHCCjqpcdel2rPIh+
+RvTcCxku+ifQ7a8dpsAUPHGUpJM4fdwD6il9cYMNB6i4njXw9gDzXOW1y3bvZR4W
+GwsmDO8CgYEA5vG0TdwkvdDcsJYimm3WQJ/VnYidE6JfjnAxnPwFFPjQoDRIS32f
+TMMJFTHSSH4xgQLEhEfaAbrkptpPORM9QAjjRx2RXoa5yu2GMpDWua4MxpHdqiSY
+v/rOw+6fZbe8YC9bZ8AE+GPuHdJDQFoSU7ieCGiF/iwWB2jhwCm7OyY=
+-----END RSA PRIVATE KEY-----
diff --git a/solr/core/src/test-files/cryptokeys/priv_key512.pem b/solr/core/src/test-files/cryptokeys/priv_key512.pem
new file mode 100644
index 0000000..53c032c
--- /dev/null
+++ b/solr/core/src/test-files/cryptokeys/priv_key512.pem
@@ -0,0 +1,9 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIBOQIBAAJBAMgmSVfl+E2Nj/oKAP2TceWh17pk6Ugsw5A5nLH+OeMB/WeWJZg/
+NEDda8SXfQDEVRmw5P+2IZypPASzfCrc6yECAwEAAQJAbZFwEztky+fUSrhRRIAE
+GQaZV4PIpWdEA99WJaabv+YsWN5UUd7y+Evu50mhH3RQIxQd+R6SYs1ke9OlHlV2
+cQIhAP8367gybVEu2A+Cg1fE9vbHfnHrurpDQrh9r0ZKooTtAiEAyMMxvlHlSh6Q
+2cUTSxuyUEaQfN+W4efehgfIWBVlzIUCIEHBMZ0qeNnCvO36DUbuu0ZHjb9iIaDd
+tXH9B8yPbCHdAiAaV3o0ZZx3MDGDUVdpuHWaENguekva0kihP24rGIul3QIgNqZS
+EzA2aoQdNPl5oDfkhqAGjs5pb7qLgtmXJvVhi/Q=
+-----END RSA PRIVATE KEY-----
diff --git a/solr/core/src/test-files/cryptokeys/pub_key2048.der b/solr/core/src/test-files/cryptokeys/pub_key2048.der
new file mode 100644
index 0000000..0e0e36b
--- /dev/null
+++ b/solr/core/src/test-files/cryptokeys/pub_key2048.der
Binary files differ
diff --git a/solr/core/src/test-files/cryptokeys/pub_key512.der b/solr/core/src/test-files/cryptokeys/pub_key512.der
new file mode 100644
index 0000000..4c926dd
--- /dev/null
+++ b/solr/core/src/test-files/cryptokeys/pub_key512.der
Binary files differ
diff --git a/solr/core/src/test-files/runtimecode/MyDocCache.java b/solr/core/src/test-files/runtimecode/MyDocCache.java
new file mode 100644
index 0000000..406b950
--- /dev/null
+++ b/solr/core/src/test-files/runtimecode/MyDocCache.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package runtimecode;
+
+import org.apache.lucene.document.Document;
+import org.apache.lucene.document.StoredField;
+import org.apache.solr.search.LRUCache;
+
+public  class MyDocCache<K,V> extends LRUCache<K,V> {
+
+  static String fld_name= "my_synthetic_fld_s";
+  @Override
+  public V put(K key, V value) {
+    if(value instanceof Document){
+      Document d = (Document) value;
+      d.add(new StoredField(fld_name, "version_2"));
+    }
+    return super.put(key, value);
+  }
+}
diff --git a/solr/core/src/test-files/runtimecode/cache.jar.bin b/solr/core/src/test-files/runtimecode/cache.jar.bin
new file mode 100644
index 0000000..0729896
--- /dev/null
+++ b/solr/core/src/test-files/runtimecode/cache.jar.bin
Binary files differ
diff --git a/solr/core/src/test-files/runtimecode/cache_v2.jar.bin b/solr/core/src/test-files/runtimecode/cache_v2.jar.bin
new file mode 100644
index 0000000..6105993
--- /dev/null
+++ b/solr/core/src/test-files/runtimecode/cache_v2.jar.bin
Binary files differ
diff --git a/solr/core/src/test-files/runtimecode/runtimelibs_v3.jar.bin b/solr/core/src/test-files/runtimecode/runtimelibs_v3.jar.bin
new file mode 100644
index 0000000..6b5bad6
--- /dev/null
+++ b/solr/core/src/test-files/runtimecode/runtimelibs_v3.jar.bin
Binary files differ
diff --git a/solr/core/src/test-files/runtimecode/sig.txt b/solr/core/src/test-files/runtimecode/sig.txt
new file mode 100644
index 0000000..50d3892
--- /dev/null
+++ b/solr/core/src/test-files/runtimecode/sig.txt
@@ -0,0 +1,106 @@
+================priv_key2048.pem===================
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs.jar.bin | openssl enc -base64
+
+NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCc
+fNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37
+w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvd
+RxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQ
+KJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C
+6uRCKCeFMrzQ/k5inasXLw==
+
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs_v2.jar.bin | openssl enc -base64
+
+jsPpNMs74ogRbx9M4n/OH3j3s85KOq9dOtgGJkUf6O5D8T9d9zU2lDwxnTYjQCaW
+cRTLGH3Z8vpc0wyT3g4aXepgLUTSnrepbPffSFhQtFrCNxurPOLzbp6ERhwjZ0RL
+GvZrlbbjR2SxqZ3BpHiGxslj0tPCkdevNCEy1glLhl8RWG5xsLCrRL1mrEtLg97A
+53oCCrfGAHLEvW+olGeB1r7jqUaSrbfAUfDMSIvZfOIV+xdlvabkNiuzvsAc+B6Q
+pXWm+Em2f5TO/bkOh2m/UInGXcNHCa0oqRMGKP1H252Cv9eXm/d0h3Dqxv+f80Gz
+LfyA6/OKQ9FfskY4pltCsQ==
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key2048.pem runtimelibs_v3.jar.bin | openssl enc -base64
+
+
+YxFr6SpYrDwG85miDfRWHTjU9UltjtIWQZEhcV55C2rczRUVowCYBxmsDv5mAM8j
+0CTv854xpI1DtBT86wpoTdbF95LQuP9FJId4TS1j8bZ9cxHP5Cqyz1uBHFfUUNUr
+npzTHQkVTp02O9NAjh3c2W41bL4U7j6jQ32+4CW2M+x00TDG0y0H75rQDR8zbLt3
+1oWCz+sBOdZ3rGKJgAvdoGm/wVCTmsabZN+xoz4JaDeBXF16O9Uk9SSq4G0dz5YX
+FuLxHK7ciB5t0+q6pXlF/tdlDqF76Abze0R3d2/0MhXBzyNp3UxJmj6DiprgysfB
+0TbQtJG0XGfdSmx0VChvcA==
+
+YxFr6SpYrDwG85miDfRWHTjU9UltjtIWQZEhcV55C2rczRUVowCYBxmsDv5mAM8j0CTv854xpI1DtBT86wpoTdbF95LQuP9FJId4TS1j8bZ9cxHP5Cqyz1uBHFfUUNUrnpzTHQkVTp02O9NAjh3c2W41bL4U7j6jQ32+4CW2M+x00TDG0y0H75rQDR8zbLt31oWCz+sBOdZ3rGKJgAvdoGm/wVCTmsabZN+xoz4JaDeBXF16O9Uk9SSq4G0dz5YXFuLxHK7ciB5t0+q6pXlF/tdlDqF76Abze0R3d2/0MhXBzyNp3UxJmj6DiprgysfB0TbQtJG0XGfdSmx0VChvcA==
+
+=====================priv_key512.pem=====================
+openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs.jar.bin | openssl enc -base64
+
+L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1
+f/U3bOlMPINlSOM6LK3JpQ==
+L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs_v2.jar.bin | openssl enc -base64
+
+j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJu
+XbHkVLj638yZ0Lp1ssnoYA==
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem runtimelibs_v3.jar.bin | openssl enc -base64
+
+a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEq
+DVLhQoL3WqYtQmLPti0G4Q==
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem cache.jar.bin | openssl enc -base64
+
+A2CDnReirpII005KRN1C3pvt4NM4kItsagQPNaa3ljj/5R3LKVgiPuNvqBsffU8n
+81LOAfr5VMyGFcb4QMHpyg==
+
+openssl dgst -sha1 -sign ../cryptokeys/priv_key512.pem cache_v2.jar.bin | openssl enc -base64
+
+SOrekHt+uup+z2z+nZU5indk2huRRfmbM+W+vQ0variHrcZEG9EXt5LuPFl8Ki9A
+hr6klMHdVP8nj4wuQhu/Hg==
+
+====================sha512====================
+
+openssl dgst -sha512 runtimelibs.jar.bin
+
+d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420
+
+
+openssl dgst -sha512 runtimelibs_v2.jar.bin
+
+bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417
+
+openssl dgst -sha512 runtimelibs_v3.jar.bin
+
+60ec88c2a2e9b409f7afc309273383810a0d07a078b482434eda9674f7e25b8adafa8a67c9913c996cbfb78a7f6ad2b9db26dbd4fe0ca4068f248d5db563f922
+
+openssl dgst -sha512 cache.jar.bin
+
+8946650ba88919cea2f81e4771c418411f61837b2a276088c2f2c86ef2d730f152ccf5975fa8a2c7035a1f00de1994a7788676d95dc7ccea6aaf28c7fff1f46b
+
+openssl dgst -sha512 cache_v2.jar.bin
+
+873337e67b90b7ff99df012b2e9093c63079c37a564643d34861a88c4cbaf0698ebb096905929d69cdbde3b4d29d55e31db24ee05c01b39c0b75a16e54eb4335
+
+=============sha256============================
+
+openssl dgst -sha256 runtimelibs.jar.bin
+
+e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc
+
+openssl dgst -sha512 runtimelibs_v2.jar.bin
+
+79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4
+
+openssl dgst -sha256 runtimelibs_v3.jar.bin
+
+20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3
+
+openssl dgst -sha256 cache.jar.bin
+
+32e8b5b2a95ea306538b52017f0954aa1b0f8a8b2d0acbc498fd0e66a223f7bd
+
+openssl dgst -sha256 cache_v2.jar.bin
+
+0f670f6dcc2b00f9a448a7ebd457d4ff985ab702c85cdb3608dcae9889e8d702
+
+
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
index c082e37..1bc54f2 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestClusterProperties.java
@@ -27,7 +27,7 @@
 public class TestClusterProperties extends SolrCloudTestCase {
 
   private ClusterProperties props;
-  
+
   @BeforeClass
   public static void setupCluster() throws Exception {
     configureCluster(1).configure();
@@ -49,7 +49,7 @@
     CollectionAdminRequest.setClusterProperty(ZkStateReader.LEGACY_CLOUD, "false").process(cluster.getSolrClient());
     assertEquals("false", props.getClusterProperty(ZkStateReader.LEGACY_CLOUD, "true"));
   }
-  
+
   @Test
   public void testSetPluginClusterProperty() throws Exception {
     String propertyName = ClusterProperties.EXT_PROPRTTY_PREFIX + "pluginA.propertyA";
@@ -57,7 +57,7 @@
         .process(cluster.getSolrClient());
     assertEquals("valueA", props.getClusterProperty(propertyName, null));
   }
-  
+
   @Test(expected = SolrException.class)
   public void testSetInvalidPluginClusterProperty() throws Exception {
     String propertyName = "pluginA.propertyA";
diff --git a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java
index 146ad82..253d5cd 100644
--- a/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java
+++ b/solr/core/src/test/org/apache/solr/cloud/TestCryptoKeys.java
@@ -25,6 +25,7 @@
 import org.apache.solr.client.solrj.impl.HttpSolrClient;
 import org.apache.solr.common.LinkedHashMapWriter;
 import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.util.Base64;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.core.MemClassLoader;
 import org.apache.solr.core.TestDynamicLoading;
@@ -71,6 +72,19 @@
     result = cryptoKeys.verify(pk2sig, samplefile);
     assertEquals("pubk2.der", result);
 
+    try (FileInputStream fis = new FileInputStream(getFile("cryptokeys/samplefile.bin"))) {
+      assertTrue(CryptoKeys.verify(cryptoKeys.keys.get("pubk2.der"), Base64.base64ToByteArray(pk2sig) , fis));
+    }
+    try (FileInputStream fis = new FileInputStream(getFile("cryptokeys/samplefile.bin"))) {
+      assertFalse(CryptoKeys.verify(cryptoKeys.keys.get("pubk1.der"), Base64.base64ToByteArray(pk2sig) , fis));
+    }
+
+    try (FileInputStream fis = new FileInputStream(getFile("cryptokeys/samplefile.bin"))) {
+      assertTrue(CryptoKeys.verify(cryptoKeys.keys.get("pubk1.der"), Base64.base64ToByteArray(pk1sig) , fis));
+    }
+    try (FileInputStream fis = new FileInputStream(getFile("cryptokeys/samplefile.bin"))) {
+      assertFalse(CryptoKeys.verify(cryptoKeys.keys.get("pubk2.der"), Base64.base64ToByteArray(pk1sig) , fis));
+    }
 
     result = cryptoKeys.verify(pk1sig, samplefile);
     assertEquals("pubk1.der", result);
@@ -195,7 +209,7 @@
   }
 
 
-  private byte[] readFile(String fname) throws IOException {
+  public static byte[] readFile(String fname) throws IOException {
     byte[] buf = null;
     try (FileInputStream fis = new FileInputStream(getFile(fname))) {
       buf = new byte[fis.available()];
diff --git a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
index 4a0f1ba..bb986d9 100644
--- a/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
+++ b/solr/core/src/test/org/apache/solr/core/BlobRepositoryMockingTest.java
@@ -22,7 +22,6 @@
 import java.io.StringWriter;
 import java.nio.ByteBuffer;
 import java.nio.charset.Charset;
-import java.util.Objects;
 import java.util.concurrent.ConcurrentHashMap;
 
 import org.apache.commons.io.IOUtils;
@@ -36,7 +35,6 @@
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertNotNull;
 import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
 import static org.mockito.Mockito.any;
 import static org.mockito.Mockito.eq;
 import static org.mockito.Mockito.mock;
@@ -62,9 +60,7 @@
   ByteBuffer blobData = ByteBuffer.wrap(BLOBSTR.getBytes(UTF8));
   boolean blobFetched = false;
   String blobKey = "";
-  String url = null;
-  ByteBuffer filecontent = null;
-  
+
   @BeforeClass
   public static void beforeClass() {
     SolrTestCaseJ4.assumeWorkingMockito();
@@ -84,14 +80,6 @@
       }
 
       @Override
-      ByteBuffer fetchFromUrl(String key, String url) {
-        if(!Objects.equals(url, BlobRepositoryMockingTest.this.url)) return null;
-        blobKey = key;
-        blobFetched = true;
-        return filecontent;
-      }
-
-      @Override
       ConcurrentHashMap<String, BlobContent> createMap() {
         return mapMock;
       }
@@ -124,30 +112,6 @@
     verify(mapMock).put(eq("foo!"), any(BlobRepository.BlobContent.class));
   }
 
-  @SuppressWarnings("unchecked")
-  @Test
-  public void testGetBlobIncrRefByUrl() throws Exception{
-    when(mockContainer.isZooKeeperAware()).thenReturn(true);
-    filecontent = TestDynamicLoading.getFileContent("runtimecode/runtimelibs_v2.jar.bin");
-    url = "http://localhost:8080/myjar/location.jar";
-    BlobRepository.BlobContentRef ref = repository.getBlobIncRef( "filefoo",null,url,
-        "bc5ce45ad281b6a08fb7e529b1eb475040076834816570902acb6ebdd809410e31006efdeaa7f78a6c35574f3504963f5f7e4d92247d0eb4db3fc9abdda5d417");
-    assertTrue("filefoo".equals(blobKey));
-    assertTrue(blobFetched);
-    assertNotNull(ref.blob);
-    assertEquals(filecontent, ref.blob.get());
-    verify(mockContainer).isZooKeeperAware();
-    try {
-      repository.getBlobIncRef( "filefoo",null,url,
-          "WRONG-SHA512-KEY");
-      fail("expected exception");
-    } catch (Exception e) {
-      assertTrue(e.getMessage().contains(" expected sha512 hash : WRONG-SHA512-KEY , actual :"));
-    }
-
-    url = null;
-    filecontent = null;
-  }
 
   @SuppressWarnings("unchecked")
   @Test
diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
index 22ee299..3a8f2e6 100644
--- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
+++ b/solr/core/src/test/org/apache/solr/core/TestDynamicLoading.java
@@ -48,6 +48,7 @@
   // 12-Jun-2018 @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028")
   //17-Aug-2018 commented @BadApple(bugUrl="https://issues.apache.org/jira/browse/SOLR-12028") // added 20-Jul-2018
   public void testDynamicLoading() throws Exception {
+
     System.setProperty("enable.runtime.lib", "true");
     setupRestTestHarnesses();
 
@@ -97,7 +98,7 @@
 
 
     assertNotNull(map = (Map) map.get("error"));
-    assertTrue("full output " + map, map.get("msg").toString().contains("no such blob or version available: colltest/1" ));
+    assertTrue("full output " + map, map.get("msg").toString().contains("no such resource available: colltest/1" ));
     payload = " {\n" +
         "  'set' : {'watched': {" +
         "                    'x':'X val',\n" +
@@ -128,9 +129,6 @@
     }
     ByteBuffer jar = null;
 
-//     jar = persistZip("/tmp/runtimelibs.jar.bin", TestDynamicLoading.class, RuntimeLibReqHandler.class, RuntimeLibResponseWriter.class, RuntimeLibSearchComponent.class);
-//    if(true) return;
-
     jar = getFileContent("runtimecode/runtimelibs.jar.bin");
     TestBlobHandler.postAndCheck(cloudClient, baseURL, blobName, jar, 1);
 
@@ -284,4 +282,8 @@
     return bos.getByteBuffer();
   }
 
+/*  public static void main(String[] args) throws Exception {
+    persistZip("/tmp/runtimelibs_v3.jar.bin", TestDynamicLoading.class, RuntimeLibReqHandler.class, RuntimeLibResponseWriter.class, RuntimeLibSearchComponent.class);
+    if(true) return;
+  }*/
 }
diff --git a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java b/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java
deleted file mode 100644
index 575cf9e..0000000
--- a/solr/core/src/test/org/apache/solr/core/TestDynamicLoadingUrl.java
+++ /dev/null
@@ -1,127 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.core;
-
-import javax.servlet.http.HttpServletRequest;
-import javax.servlet.http.HttpServletResponse;
-import java.io.IOException;
-import java.nio.ByteBuffer;
-import java.util.Arrays;
-import java.util.Map;
-
-import com.google.common.collect.ImmutableMap;
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.cloud.AbstractFullDistribZkTestBase;
-import org.apache.solr.common.util.Pair;
-import org.apache.solr.util.RestTestHarness;
-import org.eclipse.jetty.server.Connector;
-import org.eclipse.jetty.server.Request;
-import org.eclipse.jetty.server.Server;
-import org.eclipse.jetty.server.ServerConnector;
-import org.eclipse.jetty.server.handler.AbstractHandler;
-import org.junit.BeforeClass;
-
-import static java.util.Arrays.asList;
-import static org.apache.solr.core.TestDynamicLoading.getFileContent;
-import static org.apache.solr.handler.TestSolrConfigHandlerCloud.compareValues;
-
-@SolrTestCaseJ4.SuppressSSL
-public class TestDynamicLoadingUrl extends AbstractFullDistribZkTestBase {
-
-  @BeforeClass
-  public static void enableRuntimeLib() throws Exception {
-    System.setProperty("enable.runtime.lib", "true");
-  }
-
-  public static Pair<Server, Integer> runHttpServer(Map<String, Object> jars) throws Exception {
-    final Server server = new Server();
-    final ServerConnector connector = new ServerConnector(server);
-    server.setConnectors(new Connector[] { connector });
-    server.setHandler(new AbstractHandler() {
-      @Override
-      public void handle(String s, Request request, HttpServletRequest req, HttpServletResponse rsp)
-        throws IOException {
-        ByteBuffer b = (ByteBuffer) jars.get(s);
-        if (b != null) {
-          rsp.getOutputStream().write(b.array(), 0, b.limit());
-          rsp.setContentType("application/octet-stream");
-          rsp.setStatus(HttpServletResponse.SC_OK);
-          request.setHandled(true);
-        }
-      }
-    });
-    server.start();
-    return new Pair<>(server, connector.getLocalPort());
-  }
-
-  public void testDynamicLoadingUrl() throws Exception {
-    setupRestTestHarnesses();
-    Pair<Server, Integer> pair = runHttpServer(ImmutableMap.of("/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin")));
-    Integer port = pair.second();
-
-    try {
-      String payload = "{\n" +
-          "'add-runtimelib' : { 'name' : 'urljar', url : 'http://localhost:" + port + "/jar1.jar'" +
-          "  'sha512':'e01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" +
-          "}";
-      RestTestHarness client = randomRestTestHarness();
-      TestSolrConfigHandler.runConfigCommandExpectFailure(client, "/config", payload, "Invalid jar");
-
-
-      payload = "{\n" +
-          "'add-runtimelib' : { 'name' : 'urljar', url : 'http://localhost:" + port + "/jar1.jar'" +
-          "  'sha512':'d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}" +
-          "}";
-      client = randomRestTestHarness();
-      TestSolrConfigHandler.runConfigCommand(client, "/config", payload);
-      TestSolrConfigHandler.testForResponseElement(client,
-          null,
-          "/config/overlay",
-          null,
-          Arrays.asList("overlay", "runtimeLib", "urljar", "sha512"),
-          "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420", 120);
-
-      payload = "{\n" +
-          "'create-requesthandler' : { 'name' : '/runtime', 'class': 'org.apache.solr.core.RuntimeLibReqHandler', 'runtimeLib' : true}" +
-          "}";
-      client = randomRestTestHarness();
-      TestSolrConfigHandler.runConfigCommand(client, "/config", payload);
-
-      TestSolrConfigHandler.testForResponseElement(client,
-          null,
-          "/config/overlay",
-          null,
-          Arrays.asList("overlay", "requestHandler", "/runtime", "class"),
-          "org.apache.solr.core.RuntimeLibReqHandler", 120);
-
-      Map result = TestSolrConfigHandler.testForResponseElement(client,
-          null,
-          "/runtime",
-          null,
-          Arrays.asList("class"),
-          "org.apache.solr.core.RuntimeLibReqHandler", 120);
-      compareValues(result, MemClassLoader.class.getName(), asList("loader"));
-    } finally {
-      pair.first().stop();
-
-    }
-
-
-  }
-}
-
diff --git a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
index 17494e0..5f6a1c2 100644
--- a/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
+++ b/solr/core/src/test/org/apache/solr/core/TestSolrConfigHandler.java
@@ -41,7 +41,7 @@
 import org.apache.solr.handler.TestSolrConfigHandlerConcurrent;
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.search.SolrCache;
+import org.apache.solr.search.SolrCacheHolder;
 import org.apache.solr.util.RESTfulServerProvider;
 import org.apache.solr.util.RestTestBase;
 import org.apache.solr.util.RestTestHarness;
@@ -543,8 +543,8 @@
         HashMap m = new HashMap();
         rsp.add("caches", m);
         for (String c : caches) {
-          SolrCache cache = req.getSearcher().getCache(c);
-          if(cache != null) m.put(c, cache.getClass().getName());
+          SolrCacheHolder cache = (SolrCacheHolder) req.getSearcher().getCache(c);
+          if(cache != null) m.put(c, cache.get().getClass().getName());
         }
       }
     }
diff --git a/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java b/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java
deleted file mode 100644
index 2af16c5..0000000
--- a/solr/core/src/test/org/apache/solr/handler/TestContainerReqHandler.java
+++ /dev/null
@@ -1,433 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.solr.handler;
-
-import java.io.IOException;
-import java.lang.invoke.MethodHandles;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.function.Predicate;
-
-import org.apache.solr.SolrTestCaseJ4;
-import org.apache.solr.client.solrj.SolrClient;
-import org.apache.solr.client.solrj.SolrRequest;
-import org.apache.solr.client.solrj.SolrResponse;
-import org.apache.solr.client.solrj.SolrServerException;
-import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
-import org.apache.solr.client.solrj.request.V2Request;
-import org.apache.solr.client.solrj.response.V2Response;
-import org.apache.solr.cloud.MiniSolrCloudCluster;
-import org.apache.solr.cloud.SolrCloudTestCase;
-import org.apache.solr.common.cloud.ClusterProperties;
-import org.apache.solr.common.cloud.SolrZkClient;
-import org.apache.solr.common.params.MapSolrParams;
-import org.apache.solr.common.util.Pair;
-import org.apache.solr.common.util.Utils;
-import org.apache.solr.core.MemClassLoader;
-import org.apache.solr.core.RuntimeLib;
-import org.apache.solr.request.SolrRequestHandler;
-import org.apache.solr.util.LogLevel;
-import org.apache.zookeeper.CreateMode;
-import org.apache.zookeeper.data.Stat;
-import org.eclipse.jetty.server.Server;
-import org.junit.BeforeClass;
-import org.junit.Test;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import com.google.common.collect.ImmutableMap;
-
-import static org.apache.solr.cloud.TestCryptoKeys.readFile;
-import static org.apache.solr.common.util.Utils.getObjectByPath;
-import static org.apache.solr.core.TestDynamicLoading.getFileContent;
-import static org.apache.solr.core.TestDynamicLoadingUrl.runHttpServer;
-
-@SolrTestCaseJ4.SuppressSSL
-@LogLevel("org.apache.solr.common.cloud.ZkStateReader=DEBUG;org.apache.solr.handler.admin.CollectionHandlerApi=DEBUG;org.apache.solr.core.LibListener=DEBUG;org.apache.solr.common.cloud.ClusterProperties=DEBUG")
-public class TestContainerReqHandler extends SolrCloudTestCase {
-  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-
-
-  @BeforeClass
-  public static void setupCluster() throws Exception {
-    System.setProperty("enable.runtime.lib", "true");
-
-  }
-
-  static void assertResponseValues(int repeats, SolrClient client, SolrRequest req, Map vals) throws Exception {
-    for (int i = 0; i < repeats; i++) {
-      if (i > 0) {
-        Thread.sleep(100);
-      }
-      try {
-        SolrResponse rsp = req.process(client);
-        try {
-          for (Object e : vals.entrySet()) {
-            Map.Entry entry = (Map.Entry) e;
-            String key = (String) entry.getKey();
-            Object val = entry.getValue();
-            Predicate p = val instanceof Predicate ? (Predicate) val : o -> {
-              String v = o == null ? null : String.valueOf(o);
-              return Objects.equals(val, o);
-            };
-            assertTrue("attempt: " + i + " Mismatch for value : '" + key + "' in response " + Utils.toJSONString(rsp),
-                p.test(rsp.getResponse()._get(key, null)));
-
-          }
-          return;
-        } catch (Exception e) {
-          if (i >= repeats - 1) throw e;
-          continue;
-        }
-
-      } catch (Exception e) {
-        if (i >= repeats - 1) throw e;
-        log.error("exception in request", e);
-        continue;
-      }
-    }
-
-
-  }
-
-  private static Map<String, Object> assertVersionInSync(SolrZkClient zkClient, SolrClient solrClient) throws SolrServerException, IOException {
-    Stat stat = new Stat();
-    Map<String, Object> map = new ClusterProperties(zkClient).getClusterProperties(stat);
-    assertEquals(String.valueOf(stat.getVersion()), getExtResponse(solrClient)._getStr("metadata/version", null));
-    return map;
-  }
-
-  private static V2Response getExtResponse(SolrClient solrClient) throws SolrServerException, IOException {
-    return new V2Request.Builder("/node/ext")
-        .withMethod(SolrRequest.METHOD.GET)
-        .build().process(solrClient);
-  }
-
-  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13781")
-  @Test
-  public void testRuntimeLib() throws Exception {
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
-        "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-    MiniSolrCloudCluster cluster = configureCluster(4).configure();
-    try {
-      String payload = null;
-      try {
-        payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
-            "sha512 : 'wrong-sha512'}}";
-        new V2Request.Builder("/cluster")
-            .withPayload(payload)
-            .withMethod(SolrRequest.METHOD.POST)
-            .build().process(cluster.getSolrClient());
-        fail("Expected error");
-      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
-        assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("expected sha512 hash :"));
-      }
-
-      try {
-        payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar0.jar', " +
-            "sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
-        new V2Request.Builder("/cluster")
-            .withPayload(payload)
-            .withMethod(SolrRequest.METHOD.POST)
-            .build().process(cluster.getSolrClient());
-        fail("Expected error");
-      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
-        assertTrue("Actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("no such resource available: foo"));
-      }
-
-      payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-runtimelib/sha512"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
-
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler'}}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
-
-
-      V2Request request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", null));
-
-
-      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
-
-
-      payload = "{update-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar3.jar', " +
-          "sha512 : 'f67a7735a89b4348e273ca29e4651359d6d976ba966cb871c4b468ea1dbd452e42fcde9d188b7788e5a1ef668283c690606032922364759d19588666d5862653'}}";
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-runtimelib/sha512"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
-
-
-      request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", "3")
-      );
-
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{delete-requesthandler: 'bar'}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      request = new V2Request.Builder("/node/ext")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(SolrRequestHandler.TYPE,
-          (Predicate<Object>) o -> o instanceof List && ((List) o).isEmpty()));
-      new V2Request.Builder("/cluster")
-          .withPayload("{delete-runtimelib : 'foo'}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(RuntimeLib.TYPE,
-          (Predicate<Object>) o -> o instanceof List && ((List) o).isEmpty()));
-
-
-    } finally {
-      server.first().stop();
-      cluster.shutdown();
-    }
-  }
-
-  @Test
-  public void testRuntimeLibWithSig2048() throws Exception {
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
-        "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-    MiniSolrCloudCluster cluster =  configureCluster(4).configure();
-
-    try {
-
-      byte[] derFile = readFile("cryptokeys/pub_key2048.der");
-      cluster.getZkClient().makePath("/keys/exe", true);
-      cluster.getZkClient().create("/keys/exe/pub_key2048.der", derFile, CreateMode.PERSISTENT, true);
-
-      String signature = "NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCcfNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvdRxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQKJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C6uRCKCeFMrzQ/k5inasXLw==";
-
-      String payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sig : 'EdYkvRpMZbvElN93/xUmyKXcj6xHP16AVk71TlTascEwCb5cFQ2AeKhPIlwYpkLWXEOcLZKfeXoWwOLaV5ZNhg==' ," +
-          "sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
-      try {
-        new V2Request.Builder("/cluster")
-            .withPayload(payload)
-            .withMethod(SolrRequest.METHOD.POST)
-            .build().process(cluster.getSolrClient());
-      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
-        //No key matched signature for jar
-        assertTrue(e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("No key matched signature for jar"));
-      }
-
-
-      payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sig : '" + signature + "'," +
-          "sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-runtimelib/sha512"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler'}}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
-
-
-      V2Request request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", null));
-
-
-      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
-
-      payload = "{update-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar3.jar', " +
-          "sig : 'BSx/v0eKWX+LzkWF+iIAzwGL9rezWMePsyRzi4TvV6boATZ9cSfeUAqUgRW50f/hAHX4/hrHr2Piy8za9tIUoXbLqn3xJNNroOqpcVEgwh1Zii4c7zPwUSB9gtd9zlAK4LAPLdjxILS8NXpTD2zLycc8kSpcyTpSTITqz6HA3HsPGC81WIq2k3IRqYAkacn46viW+nnEjA7OxDCOqoL//evjxDWQ6R1YggTGh4u5MSWZJCiCPJNQnTlPRzUZOAJjtX7PblDrKeiunKGbjtiOhFLYkupe1lSlIRLiJV/qqopO4TQGO1bhbxeCKAX2vEz5Ch5bGOa+VZLJJGaDo318UQ==' ," +
-          "sha512 : 'f67a7735a89b4348e273ca29e4651359d6d976ba966cb871c4b468ea1dbd452e42fcde9d188b7788e5a1ef668283c690606032922364759d19588666d5862653'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-runtimelib/sha512"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
-
-
-      request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", "3"));
-
-
-    } finally {
-      server.first().stop();
-      cluster.shutdown();
-    }
-
-  }
-
-  @Test
-  public void testRuntimeLibWithSig512() throws Exception {
-    Map<String, Object> jars = Utils.makeMap(
-        "/jar1.jar", getFileContent("runtimecode/runtimelibs.jar.bin"),
-        "/jar2.jar", getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
-        "/jar3.jar", getFileContent("runtimecode/runtimelibs_v3.jar.bin"));
-
-    Pair<Server, Integer> server = runHttpServer(jars);
-    int port = server.second();
-    MiniSolrCloudCluster cluster =  configureCluster(4).configure();
-
-    try {
-
-      byte[] derFile = readFile("cryptokeys/pub_key512.der");
-      cluster.getZkClient().makePath("/keys/exe", true);
-      cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
-
-      String signature = "L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==";
-
-      String payload = "{add-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar1.jar', " +
-          "sig : '" + signature + "'," +
-          "sha512 : 'd01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add-runtimelib/sha512"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler'}}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
-
-
-      V2Request request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", null));
-
-
-      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "bar", "class")));
-
-      payload = "{update-runtimelib:{name : 'foo', url: 'http://localhost:" + port + "/jar3.jar', " +
-          "sig : 'pnH8uDHsTF0HWyQqABqVWmvo3rM/Mp2qpuo6S9YXZA9Ifg8NjHX8WzPe6EzlaqBcYcusrEV0b+5NCBx4AS0TGA==' ," +
-          "sha512 : 'f67a7735a89b4348e273ca29e4651359d6d976ba966cb871c4b468ea1dbd452e42fcde9d188b7788e5a1ef668283c690606032922364759d19588666d5862653'}}";
-
-      new V2Request.Builder("/cluster")
-          .withPayload(payload)
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update-runtimelib/sha512"),
-          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "runtimeLib/foo/sha512"));
-
-
-      request = new V2Request.Builder("/node/ext/bar")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build();
-      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
-          "class", "org.apache.solr.core.RuntimeLibReqHandler",
-          "loader", MemClassLoader.class.getName(),
-          "version", "3"));
-
-    } finally {
-      server.first().stop();
-      cluster.shutdown();
-    }
-
-  }
-
-  @Test
-  public void testSetClusterReqHandler() throws Exception {
-    MiniSolrCloudCluster cluster = configureCluster(4).configure();
-    try {
-      SolrZkClient zkClient = cluster.getZkClient();
-      new V2Request.Builder("/cluster")
-          .withPayload("{add-requesthandler:{name : 'foo', class : 'org.apache.solr.handler.DumpRequestHandler'}}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-
-      Map<String, Object> map = assertVersionInSync(zkClient, cluster.getSolrClient());
-
-      assertEquals("org.apache.solr.handler.DumpRequestHandler",
-          getObjectByPath(map, true, Arrays.asList("requestHandler", "foo", "class")));
-
-      assertVersionInSync(zkClient, cluster.getSolrClient());
-      V2Response rsp = new V2Request.Builder("/node/ext/foo")
-          .withMethod(SolrRequest.METHOD.GET)
-          .withParams(new MapSolrParams((Map) Utils.makeMap("testkey", "testval")))
-          .build().process(cluster.getSolrClient());
-      assertEquals("testval", rsp._getStr("params/testkey", null));
-
-      new V2Request.Builder("/cluster")
-          .withPayload("{delete-requesthandler: 'foo'}")
-          .withMethod(SolrRequest.METHOD.POST)
-          .build().process(cluster.getSolrClient());
-
-      assertNull(getObjectByPath(map, true, Arrays.asList("requestHandler", "foo")));
-    } finally {
-      cluster.shutdown();
-    }
-
-  }
-}
diff --git a/solr/core/src/test/org/apache/solr/handler/TestPackages.java b/solr/core/src/test/org/apache/solr/handler/TestPackages.java
new file mode 100644
index 0000000..d5fc5c0
--- /dev/null
+++ b/solr/core/src/test/org/apache/solr/handler/TestPackages.java
@@ -0,0 +1,832 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.solr.handler;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.Reader;
+import java.lang.invoke.MethodHandles;
+import java.nio.ByteBuffer;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.concurrent.Callable;
+import java.util.function.Predicate;
+
+import com.google.common.collect.ImmutableMap;
+import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.client.solrj.ResponseParser;
+import org.apache.solr.client.solrj.SolrClient;
+import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrRequest;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.embedded.JettySolrRunner;
+import org.apache.solr.client.solrj.impl.BaseHttpSolrClient;
+import org.apache.solr.client.solrj.impl.HttpSolrClient;
+import org.apache.solr.client.solrj.request.AbstractUpdateRequest;
+import org.apache.solr.client.solrj.request.CollectionAdminRequest;
+import org.apache.solr.client.solrj.request.GenericSolrRequest;
+import org.apache.solr.client.solrj.request.QueryRequest;
+import org.apache.solr.client.solrj.request.UpdateRequest;
+import org.apache.solr.client.solrj.request.V2Request;
+import org.apache.solr.client.solrj.response.V2Response;
+import org.apache.solr.cloud.ConfigRequest;
+import org.apache.solr.cloud.MiniSolrCloudCluster;
+import org.apache.solr.cloud.SolrCloudTestCase;
+import org.apache.solr.common.NavigableObject;
+import org.apache.solr.common.cloud.ClusterProperties;
+import org.apache.solr.common.cloud.SolrZkClient;
+import org.apache.solr.common.params.CommonParams;
+import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.common.params.ModifiableSolrParams;
+import org.apache.solr.common.params.SolrParams;
+import org.apache.solr.common.util.NamedList;
+import org.apache.solr.common.util.StrUtils;
+import org.apache.solr.common.util.Utils;
+import org.apache.solr.core.ConfigOverlay;
+import org.apache.solr.core.MemClassLoader;
+import org.apache.solr.core.PackageBag;
+import org.apache.solr.request.SolrRequestHandler;
+import org.apache.solr.util.LogLevel;
+import org.apache.zookeeper.CreateMode;
+import org.apache.zookeeper.data.Stat;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import static java.nio.charset.StandardCharsets.UTF_8;
+import static java.util.Arrays.asList;
+import static org.apache.solr.cloud.SolrCloudAuthTestCase.NOT_NULL_PREDICATE;
+import static org.apache.solr.cloud.TestCryptoKeys.readFile;
+import static org.apache.solr.common.params.CommonParams.JAVABIN;
+import static org.apache.solr.common.params.CommonParams.WT;
+import static org.apache.solr.common.util.Utils.JAVABINCONSUMER;
+import static org.apache.solr.common.util.Utils.getObjectByPath;
+import static org.apache.solr.common.util.Utils.newBytesConsumer;
+import static org.apache.solr.core.BlobRepository.sha256Digest;
+import static org.apache.solr.core.TestDynamicLoading.getFileContent;
+
+@SolrTestCaseJ4.SuppressSSL
+@LogLevel("org.apache.solr.common.cloud.ZkStateReader=DEBUG;org.apache.solr.handler.admin.ClusterAPI=DEBUG;org.apache.solr.core.PackageBag=DEBUG;org.apache.solr.common.cloud.ClusterProperties=DEBUG")
+public class TestPackages extends SolrCloudTestCase {
+  private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
+
+
+  @BeforeClass
+  public static void setupCluster() throws Exception {
+    System.setProperty("enable.package", "true");
+
+  }
+
+  static NavigableObject assertResponseValues(int repeats, SolrClient client, SolrRequest req, Map vals) throws Exception {
+    Callable<NavigableObject> callable = () -> req.process(client);
+
+    return assertResponseValues(repeats, callable,vals);
+  }
+
+  static NavigableObject assertResponseValues(int repeats,  Callable<NavigableObject> callable,Map vals) throws Exception {
+    NavigableObject rsp = null;
+
+    for (int i = 0; i < repeats; i++) {
+      if (i > 0) {
+        Thread.sleep(100);
+      }
+      try {
+        rsp = callable.call();
+      } catch (Exception e) {
+        if (i >= repeats - 1) throw e;
+        continue;
+      }
+      for (Object e : vals.entrySet()) {
+        Map.Entry entry = (Map.Entry) e;
+        String k = (String) entry.getKey();
+        List<String> key = StrUtils.split(k, '/');
+
+        Object val = entry.getValue();
+        Predicate p = val instanceof Predicate ? (Predicate) val : o -> {
+          String v = o == null ? null : String.valueOf(o);
+          return Objects.equals(val, o);
+        };
+        boolean isPass = p.test(rsp._get(key, null));
+        if (isPass) return rsp;
+        else if (i >= repeats - 1) {
+          fail("req: " + callable.toString() +" . attempt: " + i + " Mismatch for value : '" + key + "' in response , " + Utils.toJSONString(rsp));
+        }
+
+      }
+
+    }
+    return rsp;
+  }
+
+  private static Map<String, Object> assertVersionInSync(SolrZkClient zkClient, SolrClient solrClient) throws SolrServerException, IOException {
+    Stat stat = new Stat();
+    Map<String, Object> map = new ClusterProperties(zkClient).getClusterProperties(stat);
+    assertEquals(String.valueOf(stat.getVersion()), getExtResponse(solrClient)._getStr("metadata/version", null));
+    return map;
+  }
+
+  private static V2Response getExtResponse(SolrClient solrClient) throws SolrServerException, IOException {
+    return new V2Request.Builder("/node/ext")
+        .withMethod(SolrRequest.METHOD.GET)
+        .build().process(solrClient);
+  }
+
+//  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13781")
+  @Test
+  public void testPackageAPI() throws Exception {
+    System.setProperty("enable.package", "true");
+    MiniSolrCloudCluster cluster = configureCluster(4)
+        .withJettyConfig(jetty -> jetty.enableV2(true))
+        .configure();
+    byte[] derFile = readFile("cryptokeys/pub_key512.der");
+    cluster.getZkClient().makePath("/keys/exe", true);
+    cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
+    try {
+
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"),
+          "e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc", "runtimelibs.jar");
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
+          "79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4", "runtimelibs_v2.jar");
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v3.jar.bin"),
+          "20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3","runtimelibs_v3.jar");
+
+      String payload = null;
+      try {
+        payload = "{add:{name : 'global' , version:'0.1', file: {id : 'wrong-id' , sig:'wrong-sig'}}}";
+        new V2Request.Builder("/cluster/package")
+            .withPayload(payload)
+            .withMethod(SolrRequest.METHOD.POST)
+            .build().process(cluster.getSolrClient());
+        fail("Error expected");
+      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
+        assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()),
+            e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("No such file: "));
+      }
+
+
+      payload = "{add:{name : 'global', version  :'1' , file : {" +
+          " id : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar' , " +
+          "sig : 'L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ==' }}}";
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add/file/id"),
+          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true,
+              "packages/global/file/id"));
+
+
+      new V2Request.Builder("/cluster")
+          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler', package : global}}")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
+
+
+      V2Request request = new V2Request.Builder("/node/ext/bar")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build();
+      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
+          "class", "org.apache.solr.core.RuntimeLibReqHandler",
+          "loader", PackageBag.PackageResourceLoader.class.getName(),
+          "version", null));
+
+
+      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
+          getObjectByPath(map, true, asList("requestHandler", "bar", "class")));
+
+
+      payload = "{update:{name : 'global' , version: '3'," +
+          " file: {id : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3-runtimelibs_v3.jar', " +
+          "sig: 'a400n4T7FT+2gM0SC6+MfSOExjud8MkhTSFylhvwNjtWwUgKdPFn434Wv7Qc4QEqDVLhQoL3WqYtQmLPti0G4Q==' }}}";
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      Map<String, Object> clusterProperties = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
+      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update/file/id"),
+          getObjectByPath(clusterProperties, true, "packages/global/file/id"));
+
+
+      request = new V2Request.Builder("/node/ext/bar")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build();
+      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
+          "class", "org.apache.solr.core.RuntimeLibReqHandler",
+          "loader", MemClassLoader.class.getName(),
+          "version", "3")
+      );
+
+
+      new V2Request.Builder("/cluster")
+          .withPayload("{delete-requesthandler: 'bar'}")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      request = new V2Request.Builder("/node/ext")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build();
+      assertResponseValues(10, cluster.getSolrClient(), request, ImmutableMap.of(SolrRequestHandler.TYPE,
+          (Predicate<Object>) o -> o instanceof List && ((List) o).isEmpty()));
+      new V2Request.Builder("/cluster/package")
+          .withPayload("{delete : 'global'}")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+
+      assertResponseValues(10, cluster.getSolrClient(),
+          new V2Request.Builder("/cluster/package")
+              .forceV2(true)
+              .withMethod(SolrRequest.METHOD.GET) .build(),
+          ImmutableMap.of(CommonParams.PACKAGES,
+          (Predicate<Object>) o -> o instanceof Map && ((Map) o).isEmpty()));
+
+
+      String baseUrl = cluster.getRandomJetty(random()).getBaseUrl().toString();
+      try (HttpSolrClient client = new HttpSolrClient.Builder(baseUrl).build()) {
+        V2Response rsp = new V2Request.Builder("/node/filestore/package")
+            .withMethod(SolrRequest.METHOD.GET)
+            .forceV2(true)
+            .build()
+            .process(client);
+        assertNotNull(rsp._get(asList("files", "package", "e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar"), null));
+        assertNotNull(rsp._get(asList("files", "package", "20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3-runtimelibs_v3.jar"), null));
+
+        ByteBuffer buf = Utils.executeGET(client.getHttpClient(),
+            baseUrl.replace("/solr", "/api") +
+                "/node/filestore/package/e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar",
+            newBytesConsumer(Integer.MAX_VALUE));
+        assertEquals("e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc", sha256Digest(buf));
+
+
+        buf = Utils.executeGET(client.getHttpClient(), baseUrl.replace("/solr", "/api")
+                + "/node/filestore/package/20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3-runtimelibs_v3.jar",
+            newBytesConsumer(Integer.MAX_VALUE));
+        assertEquals("20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3", sha256Digest(buf));
+      }
+
+
+    } finally {
+      cluster.shutdown();
+    }
+  }
+
+  @Test
+  public void testRuntimeLibWithSig2048() throws Exception {
+    MiniSolrCloudCluster cluster = configureCluster(4)
+        .withJettyConfig(jetty -> jetty.enableV2(true))
+        .configure();
+
+    try {
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"),
+          "e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc", "runtimelibs.jar");
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
+          "79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4", "runtimelibs_v2.jar");
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v3.jar.bin"),
+          "20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3",
+          "runtimelibs_v3.jar");
+
+      byte[] derFile = readFile("cryptokeys/pub_key2048.der");
+      cluster.getZkClient().makePath("/keys/exe", true);
+      cluster.getZkClient().create("/keys/exe/pub_key2048.der", derFile, CreateMode.PERSISTENT, true);
+
+      String signature = "NaTm3+i99/ZhS8YRsLc3NLz2Y6VuwEbu7DihY8GAWwWIGm+jpXgn1JiuaenfxFCcfNKCC9WgZmEgbTZTzmV/OZMVn90u642YJbF3vTnzelW1pHB43ZRAJ1iesH0anM37w03n3es+vFWQtuxc+2Go888fJoMkUX2C6Zk6Jn116KE45DWjeyPM4mp3vvGzwGvdRxP5K9Q3suA+iuI/ULXM7m9mV4ruvs/MZvL+ELm5Jnmk1bBtixVJhQwJP2z++8tQKJghhyBxPIC/2fkAHobQpkhZrXu56JjP+v33ul3Ku4bbvfVMY/LVwCAEnxlvhk+C6uRCKCeFMrzQ/k5inasXLw==";
+
+      String payload = "{add:{name : 'global', version: '1', file: {" +
+          "  sig : 'EdYkvRpMZbvElN93/xUmyKXcj6xHP16AVk71TlTascEwCb5cFQ2AeKhPIlwYpkLWXEOcLZKfeXoWwOLaV5ZNhg==' ," +
+          "id : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar'}}}";
+      try {
+        new V2Request.Builder("/cluster/package")
+            .withPayload(payload)
+            .withMethod(SolrRequest.METHOD.POST)
+            .build().process(cluster.getSolrClient());
+        fail("should have failed");
+      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
+        //No key matched signature for jar
+        assertTrue(e.getMetaData()._getStr("/error/details[0]/errorMessages[0]", "")
+            .contains("Invalid signature for file"));
+      }
+
+
+      payload = "{add:{name : 'global', version : '1', file:{  sig : '" + signature +
+          "', id : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar'}}}";
+
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add/file/id"),
+          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "packages/global/file/id"));
+
+      new V2Request.Builder("/cluster")
+          .withPayload("{add-requesthandler:{name : 'bar', class : 'org.apache.solr.core.RuntimeLibReqHandler' package : global}}")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      Map<String, Object> map = new ClusterProperties(cluster.getZkClient()).getClusterProperties();
+
+
+      V2Request request = new V2Request.Builder("/node/ext/bar")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build();
+      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
+          "class", "org.apache.solr.core.RuntimeLibReqHandler",
+          "loader", MemClassLoader.class.getName(),
+          "version", null));
+
+
+      assertEquals("org.apache.solr.core.RuntimeLibReqHandler",
+          getObjectByPath(map, true, asList("requestHandler", "bar", "class")));
+
+      payload = "{update:{name : 'global', version : '3', file:{" +
+          " sig : 'YxFr6SpYrDwG85miDfRWHTjU9UltjtIWQZEhcV55C2rczRUVowCYBxmsDv5mAM8j0CTv854xpI1DtBT86wpoTdbF95LQuP9FJId4TS1j8bZ9cxHP5Cqyz1uBHFfUUNUrnpzTHQkVTp02O9NAjh3c2W41bL4U7j6jQ32+4CW2M+x00TDG0y0H75rQDR8zbLt31oWCz+sBOdZ3rGKJgAvdoGm/wVCTmsabZN+xoz4JaDeBXF16O9Uk9SSq4G0dz5YXFuLxHK7ciB5t0+q6pXlF/tdlDqF76Abze0R3d2/0MhXBzyNp3UxJmj6DiprgysfB0TbQtJG0XGfdSmx0VChvcA==' ," +
+          "id : '20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3-runtimelibs_v3.jar'}}}";
+
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update/file/id"),
+          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "packages/global/file/id"));
+
+
+      request = new V2Request.Builder("/node/ext/bar")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build();
+      assertResponseValues(10, cluster.getSolrClient(), request, Utils.makeMap(
+          "class", "org.apache.solr.core.RuntimeLibReqHandler",
+          "loader", MemClassLoader.class.getName(),
+          "version", "3"));
+      assertResponseValues(5, cluster.getSolrClient(),
+          new V2Request.Builder("/cluster/package")
+          .forceV2(true)
+          .withMethod(SolrRequest.METHOD.GET)
+          .build(),
+          Utils.makeMap("/packages/global/file/id",
+              "20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3-runtimelibs_v3.jar"));
+
+    } finally {
+      cluster.shutdown();
+    }
+
+  }
+
+
+  @Test
+  public void testSetClusterReqHandler() throws Exception {
+    MiniSolrCloudCluster cluster = configureCluster(4)
+        .withJettyConfig(jetty -> jetty.enableV2(true))
+        .configure();
+    try {
+      SolrZkClient zkClient = cluster.getZkClient();
+      new V2Request.Builder("/cluster")
+          .withPayload("{add-requesthandler:{name : 'foo', class : 'org.apache.solr.handler.DumpRequestHandler'}}")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+
+      Map<String, Object> map = assertVersionInSync(zkClient, cluster.getSolrClient());
+
+      assertEquals("org.apache.solr.handler.DumpRequestHandler",
+          getObjectByPath(map, true, asList("requestHandler", "foo", "class")));
+
+      assertVersionInSync(zkClient, cluster.getSolrClient());
+      V2Response rsp = new V2Request.Builder("/node/ext/foo")
+          .withMethod(SolrRequest.METHOD.GET)
+          .withParams(new MapSolrParams((Map) Utils.makeMap("testkey", "testval")))
+          .build().process(cluster.getSolrClient());
+      assertEquals("testval", rsp._getStr("params/testkey", null));
+
+      new V2Request.Builder("/cluster")
+          .withPayload("{delete-requesthandler: 'foo'}")
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+
+      assertNull(getObjectByPath(map, true, asList("requestHandler", "foo")));
+    } finally {
+      cluster.shutdown();
+    }
+
+  }
+
+  @Test
+  public void testPluginFrompackage() throws Exception {
+    String COLLECTION_NAME = "globalLoaderColl";
+
+    MiniSolrCloudCluster cluster = configureCluster(4)
+        .withJettyConfig(jetty -> jetty.enableV2(true))
+        .addConfig("conf", configset("cloud-minimal"))
+        .configure();
+
+    byte[] derFile = readFile("cryptokeys/pub_key512.der");
+    cluster.getZkClient().makePath("/keys/exe", true);
+    cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
+
+    try {
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"),
+          "e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc","runtimelibs.jar");
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v2.jar.bin"),
+          "79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4",
+          "runtimelibs_v2.jar");
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs_v3.jar.bin"),
+          "20e0bfaec71b2e93c4da9f2ed3745dda04dc3fc915b66cc0275863982e73b2a3",
+          "runtimelibs_v3.jar");
+
+      CollectionAdminRequest
+          .createCollection(COLLECTION_NAME, "conf", 2, 1)
+          .setMaxShardsPerNode(100)
+          .process(cluster.getSolrClient());
+
+
+      cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2);
+      String payload = "{add:{name : 'global', version : '1'," +
+          " file:{ id : 'e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar' ," +
+          "sig : 'L3q/qIGs4NaF6JiO0ZkMUFa88j0OmYc+I6O7BOdNuMct/xoZ4h73aZHZGc0+nmI1f/U3bOlMPINlSOM6LK3JpQ=='}}}";
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      String fileId = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add/file/id");
+
+      assertEquals(fileId,
+          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true,
+              "packages/global/file/id"));
+
+
+      payload = "{\n" +
+          "'create-requesthandler' : { 'name' : '/runtime', 'class': 'org.apache.solr.core.RuntimeLibReqHandler' , 'package':global }," +
+          "'create-searchcomponent' : { 'name' : 'get', 'class': 'org.apache.solr.core.RuntimeLibSearchComponent' , 'package':global }," +
+          "'create-queryResponseWriter' : { 'name' : 'json1', 'class': 'org.apache.solr.core.RuntimeLibResponseWriter' , 'package':global }" +
+          "}";
+      cluster.getSolrClient().request(new ConfigRequest(payload) {
+        @Override
+        public String getCollection() {
+          return COLLECTION_NAME;
+        }
+      });
+
+      SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      GenericSolrRequest req1 = new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/queryResponseWriter/json1", params);
+//      SimpleSolrResponse rsp = req1.process(cluster.getSolrClient());
+//      System.out.println(rsp.jsonStr());
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          req1,
+          Utils.makeMap(
+              "/config/queryResponseWriter/json1/_packageinfo_/file/id", fileId
+          ));
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/searchComponent/get", params),
+          Utils.makeMap(
+              "config/searchComponent/get/_packageinfo_/file/id", fileId
+          ));
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/requestHandler/runtime", params),
+          Utils.makeMap(
+              ":config:requestHandler:/runtime:_packageinfo_:file:id", fileId
+          ));
+
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, JAVABIN));
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/overlay", params),
+          Utils.makeMap(
+              "overlay/queryResponseWriter/json1/class", "org.apache.solr.core.RuntimeLibResponseWriter",
+              "overlay/searchComponent/get/class", "org.apache.solr.core.RuntimeLibSearchComponent"
+          ));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/runtime", params),
+          Utils.makeMap("class", "org.apache.solr.core.RuntimeLibReqHandler",
+              "loader", PackageBag.PackageResourceLoader.class.getName()));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/get?abc=xyz", params),
+          Utils.makeMap("get", "org.apache.solr.core.RuntimeLibSearchComponent",
+              "loader", PackageBag.PackageResourceLoader.class.getName()));
+
+      GenericSolrRequest req = new GenericSolrRequest(SolrRequest.METHOD.GET, "/runtime",
+          new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, "json1")));
+      req.setResponseParser(new ResponseParser() {
+        @Override
+        public String getWriterType() {
+          return "json1";
+        }
+
+        @Override
+        public NamedList<Object> processResponse(InputStream body, String encoding) {
+          return new NamedList<>((Map) Utils.fromJSON(body));
+        }
+
+        @Override
+        public NamedList<Object> processResponse(Reader reader) {
+          return new NamedList<>((Map) Utils.fromJSON(reader));
+
+        }
+
+      });
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          req,
+          Utils.makeMap("wt", "org.apache.solr.core.RuntimeLibResponseWriter",
+              "loader", PackageBag.PackageResourceLoader.class.getName()));
+
+
+      payload = "{update:{name : 'global', version : '2'" +
+          "file : { id : '79298d7d5c3e60d91154efe7d72f4536eac46698edfa22ab894b85492d562ed4-runtimelibs_v2.jar'," +
+          " sig : 'j+Rflxi64tXdqosIhbusqi6GTwZq8znunC/dzwcWW0/dHlFGKDurOaE1Nz9FSPJuXbHkVLj638yZ0Lp1ssnoYA=='}}}";
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      fileId = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update/file/id");
+
+      assertEquals(fileId,
+          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true,
+              "packages/global/file/id"));
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/queryResponseWriter/json1", params),
+          Utils.makeMap(
+              "/config/queryResponseWriter/json1/_packageinfo_/file/id", fileId
+          ));
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/searchComponent/get", params),
+          Utils.makeMap(
+              "/config/searchComponent/get/_packageinfo_/file/id", fileId
+          ));
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/requestHandler/runtime", params),
+          Utils.makeMap(
+              ":config:requestHandler:/runtime:_packageinfo_:file:id", fileId
+          ));
+
+
+      try {
+        new V2Request.Builder("/cluster/package")
+            .withPayload(payload)
+            .withMethod(SolrRequest.METHOD.POST)
+            .build().process(cluster.getSolrClient());
+        fail("should have failed");
+      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
+        assertTrue("actual output : " + Utils.toJSONString(e.getMetaData()), e.getMetaData()._getStr("error/details[0]/errorMessages[0]", "").contains("Trying to update a package with the same data"));
+      }
+
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/get?abc=xyz", params),
+          Utils.makeMap("get", "org.apache.solr.core.RuntimeLibSearchComponent",
+              "loader", MemClassLoader.class.getName(),
+              "Version", "2"));
+    } finally {
+      cluster.deleteAllCollections();
+      cluster.shutdown();
+    }
+
+  }
+
+  @AwaitsFix(bugUrl = "https://issues.apache.org/jira/browse/SOLR-13650")
+  public void testCacheLoadFromPackage() throws Exception {
+    String COLLECTION_NAME = "globalCacheColl";
+
+    String overlay = "{" +
+        "    \"props\":{\"query\":{\"documentCache\":{\n" +
+        "          \"class\":\"org.apache.solr.core.MyDocCache\",\n" +
+        "          \"size\":\"512\",\n" +
+        "          \"initialSize\":\"512\" , \"package\":\"cache_pkg\"}}}}";
+    MiniSolrCloudCluster cluster = configureCluster(4)
+        .withJettyConfig(jetty -> jetty.enableV2(true))
+        .addConfig("conf", configset("cloud-minimal"),
+            Collections.singletonMap(ConfigOverlay.RESOURCE_NAME, overlay.getBytes(UTF_8)))
+        .configure();
+    try {
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/cache.jar.bin"),
+          "32e8b5b2a95ea306538b52017f0954aa1b0f8a8b2d0acbc498fd0e66a223f7bd","cache.jar");
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/cache_v2.jar.bin"),
+          "0f670f6dcc2b00f9a448a7ebd457d4ff985ab702c85cdb3608dcae9889e8d702",
+          "cache_v2.jar");
+      String payload = "{add:{name : 'cache_pkg', version : '1', " +
+           " file: { id : '32e8b5b2a95ea306538b52017f0954aa1b0f8a8b2d0acbc498fd0e66a223f7bd', " +
+          "sig : 'A2CDnReirpII005KRN1C3pvt4NM4kItsagQPNaa3ljj/5R3LKVgiPuNvqBsffU8n81LOAfr5VMyGFcb4QMHpyg==' }}}";
+
+      try {
+        new V2Request.Builder("/cluster/package")
+            .withPayload(payload)
+            .withMethod(SolrRequest.METHOD.POST)
+            .build().process(cluster.getSolrClient());
+        fail("should have failed");
+      } catch (BaseHttpSolrClient.RemoteExecutionException e) {
+        //No key matched signature for jar
+        assertTrue(e.getMetaData()._getStr("/error/details[0]/errorMessages[0]", "")
+            .contains("No public keys in ZK : /keys/exe"));
+      }
+
+      byte[] derFile = readFile("cryptokeys/pub_key512.der");
+      cluster.getZkClient().makePath("/keys/exe", true);
+      cluster.getZkClient().create("/keys/exe/pub_key512.der", derFile, CreateMode.PERSISTENT, true);
+
+
+
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+
+      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "add/file/id"),
+          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(), true, "packages/cache_pkg/file/id"));
+
+      CollectionAdminRequest
+          .createCollection(COLLECTION_NAME, "conf", 2, 1)
+          .setMaxShardsPerNode(100)
+          .process(cluster.getSolrClient());
+
+
+      cluster.waitForActiveCollection(COLLECTION_NAME, 2, 2);
+      SolrParams params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME, WT, JAVABIN));
+
+      NamedList<Object> rsp = cluster.getSolrClient().request(new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/overlay", params));
+      assertEquals("org.apache.solr.core.MyDocCache", rsp._getStr("overlay/props/query/documentCache/class", null));
+
+      String fileId = (String) getObjectByPath(Utils.fromJSONString(payload), true, "add/file/id");
+
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/query/documentCache", params),
+          Utils.makeMap(
+              "/config/query/documentCache/_packageinfo_/file/id", fileId
+          ));
+
+
+      UpdateRequest req = new UpdateRequest();
+
+      req.add("id", "1", "desc_s", "document 1")
+          .setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true)
+          .setWaitSearcher(true);
+      cluster.getSolrClient().request(req, COLLECTION_NAME);
+
+      SolrQuery solrQuery = new SolrQuery("q", "id:1", "collection", COLLECTION_NAME);
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new QueryRequest(solrQuery),
+          Utils.makeMap("/response[0]/my_synthetic_fld_s", "version_1"));
+
+
+      payload = "{update:{name : 'cache_pkg', version : '2', " +
+          "file: { id : '0f670f6dcc2b00f9a448a7ebd457d4ff985ab702c85cdb3608dcae9889e8d702-cache_v2.jar' ," +
+          " sig : 'SOrekHt+uup+z2z+nZU5indk2huRRfmbM+W+vQ0variHrcZEG9EXt5LuPFl8Ki9Ahr6klMHdVP8nj4wuQhu/Hg==' }}}";
+
+      new V2Request.Builder("/cluster/package")
+          .withPayload(payload)
+          .withMethod(SolrRequest.METHOD.POST)
+          .build().process(cluster.getSolrClient());
+      fileId = (String) getObjectByPath(Utils.fromJSONString(payload), true, "update/file/id");
+      assertEquals(getObjectByPath(Utils.fromJSONString(payload), true, "update/file/id"),
+          getObjectByPath(new ClusterProperties(cluster.getZkClient()).getClusterProperties(),
+              true, "packages/cache_pkg/file/id"));
+
+      params = new MapSolrParams((Map) Utils.makeMap("collection", COLLECTION_NAME,
+          WT, JAVABIN,
+          "meta", "true"));
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new GenericSolrRequest(SolrRequest.METHOD.GET, "/config/query/documentCache", params),
+          Utils.makeMap(
+              "/config/query/documentCache/_packageinfo_/file/id", fileId
+          ));
+      req = new UpdateRequest();
+      req.add("id", "2", "desc_s", "document 1")
+          .setAction(AbstractUpdateRequest.ACTION.COMMIT, true, true)
+          .setWaitSearcher(true);
+      cluster.getSolrClient().request(req, COLLECTION_NAME);
+
+
+      solrQuery = new SolrQuery("q", "id:2", "collection", COLLECTION_NAME);
+      NavigableObject result = assertResponseValues(10,
+          cluster.getSolrClient(),
+          new QueryRequest(solrQuery),
+          Utils.makeMap("response[0]/my_synthetic_fld_s", "version_2"));
+
+    } finally {
+      cluster.deleteAllCollections();
+      cluster.shutdown();
+    }
+  }
+
+  public void testFileStoreManagement() throws Exception {
+    MiniSolrCloudCluster cluster = configureCluster(4)
+        .withJettyConfig(jetty -> jetty.enableV2(true))
+        .addConfig("conf", configset("cloud-minimal"))
+        .configure();
+    try {
+      postFile(cluster.getSolrClient(), getFileContent("runtimecode/runtimelibs.jar.bin"),
+          "e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc", "runtimelibs.jar" );
+
+      assertResponseValues(10,
+          cluster.getSolrClient(),
+          new V2Request.Builder("/node/filestore/package")
+              .withMethod(SolrRequest.METHOD.GET)
+              .build(),
+          Utils.makeMap("/files/package/e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar", NOT_NULL_PREDICATE )
+          );
+
+      Map expected = Utils.makeMap("/files/package/e1f9e23988c19619402f1040c9251556dcd6e02b9d3e3b966a129ea1be5c70fc-runtimelibs.jar",
+          (Predicate<?>) o -> o != null);
+      for (JettySolrRunner jettySolrRunner : cluster.getJettySolrRunners()) {
+        String url =  jettySolrRunner.getBaseUrl().toString().replace("/solr", "/api") + "/node/filestore/package?wt=javabin";
+        assertResponseValues(20, new Callable() {
+          @Override
+          public NavigableObject call() throws Exception {
+            try (HttpSolrClient solrClient = (HttpSolrClient) jettySolrRunner.newClient()) {
+              return (NavigableObject) Utils.executeGET(solrClient.getHttpClient(), url, JAVABINCONSUMER);
+            }
+          }
+
+          @Override
+          public String toString() {
+            return url;
+          }
+        }, expected);
+
+      }
+
+
+    } finally {
+      cluster.shutdown();
+    }
+
+
+  }
+
+  private void postFile(SolrClient client, ByteBuffer buffer, String sh256, String name) throws SolrServerException, IOException {
+    ModifiableSolrParams params = new ModifiableSolrParams();
+    if(name !=null) {
+      params.add("name", name);
+    }
+    V2Response rsp = new V2Request.Builder("/cluster/filestore/package")
+        .withMethod(SolrRequest.METHOD.POST)
+        .withPayload(buffer)
+        .forceV2(true)
+        .withParams(params)
+        .withMimeType("application/octet-stream")
+        .build()
+        .process(client);
+    assertEquals(sh256+"-"+name, rsp.getResponse().get(CommonParams.ID));
+  }
+
+}
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
index a6dbd9e..356e865 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/MetricsHandlerTest.java
@@ -53,12 +53,12 @@
   @AfterClass
   public static void cleanupMetrics() throws Exception {
     if (null != h) {
-      h.getCoreContainer().getMetricManager().registry("solr.jvm"  ).remove("solrtest_foo");
+      h.getCoreContainer().getMetricManager().registry("solr.jvm").remove("solrtest_foo");
       h.getCoreContainer().getMetricManager().registry("solr.jetty").remove("solrtest_foo");
       h.getCoreContainer().getMetricManager().registry("solr.jetty").remove("solrtest_foo:bar");
     }
   }
-  
+
   @Test
   public void test() throws Exception {
     MetricsHandler handler = new MetricsHandler(h.getCoreContainer());
@@ -145,7 +145,7 @@
     assertNotNull(values.get("metrics"));
     values = (NamedList) values.get("metrics");
     assertEquals(1, values.size());
-    assertEquals(13, ((NamedList)values.get("solr.node")).size());
+    assertEquals(13, ((NamedList) values.get("solr.node")).size());
     assertNotNull(values.get("solr.node"));
     values = (NamedList) values.get("solr.node");
     assertNotNull(values.get("CONTAINER.cores.lazy")); // this is a gauge node
@@ -171,7 +171,7 @@
     assertNotNull(values.get("solr.core.collection1"));
     values = (NamedList) values.get("solr.core.collection1");
     assertEquals(1, values.size());
-    Map m = (Map)values.get("CACHE.core.fieldCache");
+    Map m = (Map) values.get("CACHE.core.fieldCache");
     assertNotNull(m);
     assertNotNull(m.get("entries_count"));
 
@@ -223,7 +223,7 @@
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
       assertTrue(v instanceof Map);
-      Map map = (Map)v;
+      Map map = (Map) v;
       assertTrue(map.size() > 2);
     });
 
@@ -238,7 +238,7 @@
     assertTrue(nl.size() > 0);
     nl.forEach((k, v) -> {
       assertTrue(v instanceof Map);
-      Map map = (Map)v;
+      Map map = (Map) v;
       assertEquals(2, map.size());
       assertNotNull(map.get("inserts"));
       assertNotNull(map.get("size"));
@@ -257,7 +257,7 @@
     Object val = values.findRecursive("metrics", key1);
     assertNotNull(val);
     assertTrue(val instanceof Map);
-    assertTrue(((Map)val).size() >= 2);
+    assertTrue(((Map) val).size() >= 2);
 
     String key2 = "solr.core.collection1:CACHE.core.fieldCache:entries_count";
     resp = new SolrQueryResponse();
@@ -276,7 +276,7 @@
     val = values.findRecursive("metrics", key3);
     assertNotNull(val);
     assertTrue(val instanceof Number);
-    assertEquals(3, ((Number)val).intValue());
+    assertEquals(3, ((Number) val).intValue());
 
     // test multiple keys
     resp = new SolrQueryResponse();
@@ -306,7 +306,7 @@
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "foo", MetricsHandler.KEY_PARAM, "foo:bar:baz:xyz"), resp);
     values = resp.getValues();
-    NamedList metrics = (NamedList)values.get("metrics");
+    NamedList metrics = (NamedList) values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "foo"));
     assertNotNull(values.findRecursive("errors", "foo:bar:baz:xyz"));
@@ -316,7 +316,7 @@
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "foo:bar:baz"), resp);
     values = resp.getValues();
-    metrics = (NamedList)values.get("metrics");
+    metrics = (NamedList) values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "foo:bar:baz"));
 
@@ -325,7 +325,7 @@
     handler.handleRequestBody(req(CommonParams.QT, "/admin/metrics", CommonParams.WT, "json",
         MetricsHandler.KEY_PARAM, "solr.jetty:unknown:baz"), resp);
     values = resp.getValues();
-    metrics = (NamedList)values.get("metrics");
+    metrics = (NamedList) values.get("metrics");
     assertEquals(0, metrics.size());
     assertNotNull(values.findRecursive("errors", "solr.jetty:unknown:baz"));
   }
diff --git a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
index 01a5bca..1762ec6 100644
--- a/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
+++ b/solr/core/src/test/org/apache/solr/handler/admin/TestApiFramework.java
@@ -19,14 +19,20 @@
 
 import java.util.Collections;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.solr.SolrTestCaseJ4;
+import org.apache.solr.api.Api;
 import org.apache.solr.api.ApiBag;
+import org.apache.solr.api.V2HttpCall;
 import org.apache.solr.api.V2HttpCall.CompositeApi;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.common.params.MapSolrParams;
+import org.apache.solr.common.util.CommandOperation;
+import org.apache.solr.common.util.PathTrie;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.common.util.Utils;
 import org.apache.solr.common.util.ValidatingJsonMap;
@@ -39,10 +45,6 @@
 import org.apache.solr.request.SolrQueryRequest;
 import org.apache.solr.request.SolrRequestHandler;
 import org.apache.solr.response.SolrQueryResponse;
-import org.apache.solr.api.Api;
-import org.apache.solr.api.V2HttpCall;
-import org.apache.solr.common.util.CommandOperation;
-import org.apache.solr.common.util.PathTrie;
 
 import static org.apache.solr.api.ApiBag.EMPTY_SPEC;
 import static org.apache.solr.client.solrj.SolrRequest.METHOD.GET;
@@ -71,7 +73,7 @@
     Map<String, String> parts = new HashMap<>();
     String fullPath = "/collections/hello/shards";
     Api api = V2HttpCall.getApiInfo(containerHandlers, fullPath, "POST",
-       fullPath, parts);
+        fullPath, parts);
     assertNotNull(api);
     assertConditions(api.getSpec(), Utils.makeMap(
         "/methods[0]", "POST",
@@ -81,7 +83,7 @@
 
     parts = new HashMap<>();
     api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello/shards", "POST",
-      null, parts);
+        null, parts);
     assertConditions(api.getSpec(), Utils.makeMap(
         "/methods[0]", "POST",
         "/commands/split", NOT_NULL,
@@ -102,7 +104,7 @@
 
     parts = new HashMap<>();
     api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello", "POST",
-       null, parts);
+        null, parts);
     assertConditions(api.getSpec(), Utils.makeMap(
         "/methods[0]", "POST",
         "/commands/add-replica-property", NOT_NULL,
@@ -111,7 +113,7 @@
     assertEquals("hello", parts.get("collection"));
 
     api = V2HttpCall.getApiInfo(containerHandlers, "/collections/hello/shards/shard1/replica1", "DELETE",
-       null, parts);
+        null, parts);
     assertConditions(api.getSpec(), Utils.makeMap(
         "/methods[0]", "DELETE",
         "/url/params/onlyIfDown/type", "boolean"
@@ -122,18 +124,21 @@
 
     SolrQueryResponse rsp = invoke(containerHandlers, null, "/collections/_introspect", GET, mockCC);
 
-    assertConditions(rsp.getValues().asMap(2), Utils.makeMap(
-        "/spec[0]/methods[0]", "DELETE",
-        "/spec[1]/methods[0]", "POST",
-        "/spec[2]/methods[0]", "GET"
+    Set<String> methodNames = new HashSet<>();
+    methodNames.add(rsp.getValues()._getStr("/spec[0]/methods[0]", null));
+    methodNames.add(rsp.getValues()._getStr("/spec[1]/methods[0]", null));
+    methodNames.add(rsp.getValues()._getStr("/spec[2]/methods[0]", null));
+    assertTrue(methodNames.contains("DELETE"));
+    assertTrue(methodNames.contains("POST"));
+    assertTrue(methodNames.contains("GET"));
 
-    ));
+    methodNames = new HashSet<>();
 
     rsp = invoke(coreHandlers, "/schema/_introspect", "/collections/hello/schema/_introspect", GET, mockCC);
-    assertConditions(rsp.getValues().asMap(2), Utils.makeMap(
-        "/spec[0]/methods[0]", "POST",
-        "/spec[0]/commands", NOT_NULL,
-        "/spec[1]/methods[0]", "GET"));
+    methodNames.add(rsp.getValues()._getStr("/spec[0]/methods[0]", null));
+    methodNames.add(rsp.getValues()._getStr("/spec[1]/methods[0]", null));
+    assertTrue(methodNames.contains("POST"));
+    assertTrue(methodNames.contains("GET"));
 
     rsp = invoke(coreHandlers, "/", "/collections/hello/_introspect", GET, mockCC);
     assertConditions(rsp.getValues().asMap(2), Utils.makeMap(
@@ -146,16 +151,17 @@
     ));
 
   }
-  public void testTrailingTemplatePaths(){
-    PathTrie<Api> registry =  new PathTrie<>();
+
+  public void testTrailingTemplatePaths() {
+    PathTrie<Api> registry = new PathTrie<>();
     Api api = new Api(EMPTY_SPEC) {
       @Override
       public void call(SolrQueryRequest req, SolrQueryResponse rsp) {
 
       }
     };
-    Api intropsect = new ApiBag.IntrospectApi(api,false);
-    ApiBag.registerIntrospect(Collections.emptyMap(),registry,"/c/.system/blob/{name}",intropsect);
+    Api intropsect = new ApiBag.IntrospectApi(api, false);
+    ApiBag.registerIntrospect(Collections.emptyMap(), registry, "/c/.system/blob/{name}", intropsect);
     ApiBag.registerIntrospect(Collections.emptyMap(), registry, "/c/.system/{x}/{name}", intropsect);
     assertEquals(intropsect, registry.lookup("/c/.system/blob/random_string/_introspect", new HashMap<>()));
     assertEquals(intropsect, registry.lookup("/c/.system/blob/_introspect", new HashMap<>()));
@@ -163,6 +169,7 @@
     assertEquals(intropsect, registry.lookup("/c/.system/v1/_introspect", new HashMap<>()));
     assertEquals(intropsect, registry.lookup("/c/.system/v1/v2/_introspect", new HashMap<>()));
   }
+
   private SolrQueryResponse invoke(PluginBag<SolrRequestHandler> reqHandlers, String path,
                                    String fullPath, SolrRequest.METHOD method,
                                    CoreContainer mockCC) {
@@ -184,14 +191,14 @@
     }
 
     SolrQueryResponse rsp = new SolrQueryResponse();
-    LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, new MapSolrParams(new HashMap<>())){
+    LocalSolrQueryRequest req = new LocalSolrQueryRequest(null, new MapSolrParams(new HashMap<>())) {
       @Override
       public List<CommandOperation> getCommands(boolean validateInput) {
         return Collections.emptyList();
       }
     };
 
-    api.call(req,rsp);
+    api.call(req, rsp);
     return rsp;
 
   }
@@ -201,12 +208,12 @@
     for (Object o : conditions.entrySet()) {
       Map.Entry e = (Map.Entry) o;
       String path = (String) e.getKey();
-      List<String> parts = StrUtils.splitSmart(path, path.charAt(0) == '/' ?  '/':' ', true);
+      List<String> parts = StrUtils.splitSmart(path, path.charAt(0) == '/' ? '/' : ' ', true);
       Object val = Utils.getObjectByPath(root, false, parts);
       if (e.getValue() instanceof ValidatingJsonMap.PredicateWithErrMsg) {
         ValidatingJsonMap.PredicateWithErrMsg value = (ValidatingJsonMap.PredicateWithErrMsg) e.getValue();
         String err = value.test(val);
-        if(err != null){
+        if (err != null) {
           assertEquals(err + " for " + e.getKey() + " in :" + Utils.toJSONString(root), e.getValue(), val);
         }
 
diff --git a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
index 7989d8e..05e7557 100644
--- a/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
+++ b/solr/core/src/test/org/apache/solr/search/TestLFUCache.java
@@ -63,7 +63,7 @@
   @Test
   public void testTimeDecayParams() throws IOException {
     h.getCore().withSearcher(searcher -> {
-      LFUCache cacheDecayTrue = (LFUCache) searcher.getCache("lfuCacheDecayTrue");
+      LFUCache cacheDecayTrue = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayTrue")).get();
       assertNotNull(cacheDecayTrue);
       Map<String,Object> stats = cacheDecayTrue.getMetricsMap().getValue();
       assertTrue((Boolean) stats.get("timeDecay"));
@@ -74,7 +74,7 @@
       addCache(cacheDecayTrue, 11, 12, 13, 14, 15);
       assertCache(cacheDecayTrue, 1, 2, 3, 4, 5, 12, 13, 14, 15);
 
-      LFUCache cacheDecayDefault = (LFUCache) searcher.getCache("lfuCacheDecayDefault");
+      LFUCache cacheDecayDefault = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayDefault")).get();
       assertNotNull(cacheDecayDefault);
       stats = cacheDecayDefault.getMetricsMap().getValue();
       assertTrue((Boolean) stats.get("timeDecay"));
@@ -88,7 +88,7 @@
       addCache(cacheDecayDefault, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21);
       assertCache(cacheDecayDefault, 1, 2, 3, 4, 5, 17, 18, 19, 20, 21);
 
-      LFUCache cacheDecayFalse = (LFUCache) searcher.getCache("lfuCacheDecayFalse");
+      LFUCache cacheDecayFalse = (LFUCache) ((SolrCacheHolder) searcher.getCache("lfuCacheDecayFalse")).get();
       assertNotNull(cacheDecayFalse);
       stats = cacheDecayFalse.getMetricsMap().getValue();
       assertFalse((Boolean) stats.get("timeDecay"));
diff --git a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
index 39c5e1c..96c82ab 100644
--- a/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
+++ b/solr/core/src/test/org/apache/solr/security/BasicAuthIntegrationTest.java
@@ -138,7 +138,9 @@
 
       final SolrRequest genericReq;
       if (isUseV2Api) {
-        genericReq = new V2Request.Builder("/cluster/security/authentication").withMethod(SolrRequest.METHOD.POST).build();
+        genericReq = new V2Request.Builder("/cluster/security/authentication")
+            .withPayload(command)
+            .withMethod(SolrRequest.METHOD.POST).build();
       } else {
         genericReq = new GenericSolrRequest(SolrRequest.METHOD.POST, authcPrefix, new ModifiableSolrParams());
         ((GenericSolrRequest)genericReq).setContentWriter(new StringPayloadContentWriter(command, CommonParams.JSON_MIME));
diff --git a/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java b/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java
index 889b0bf..6cee3d9 100644
--- a/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java
+++ b/solr/core/src/test/org/apache/solr/update/processor/RuntimeUrp.java
@@ -31,7 +31,7 @@
     List<String>  names = new ArrayList<>();
     for (UpdateRequestProcessorFactory p : processorChain.getProcessors()) {
       if (p instanceof UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) {
-        p = ((UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) p).delegate;
+        p = ((UpdateRequestProcessorChain.LazyUpdateProcessorFactoryHolder.LazyUpdateRequestProcessorFactory) p).getDelegate();
       }
       names.add(p.getClass().getSimpleName());
     }
diff --git a/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc b/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc
index 2f5ed58..5738c6f 100644
--- a/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc
+++ b/solr/solr-ref-guide/src/adding-custom-plugins-in-solrcloud-mode.adoc
@@ -128,11 +128,11 @@
 ----
  curl -o runtimelibs.jar   -LO https://github.com/apache/lucene-solr/blob/master/solr/core/src/test-files/runtimecode/runtimelibs.jar.bin?raw=true
 ----
-Step 2: Get the `sha512` hash of the jar
+Step 2: Get the `sha256` hash of the jar
 
 [source,bash]
 ----
- openssl dgst -sha512 runtimelibs.jar
+ openssl dgst -sha256 runtimelibs.jar
 ----
 
 Step 3 :  Start solr with runtime lib enabled
@@ -154,9 +154,9 @@
 [source,bash]
 ----
  curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{
-    "add-runtimelib": { "name" : "testjar",
+    "add-package": { "name" : "my-pkg",
     "url":"http://localhost:8000/runtimelibs.jar" ,
-    "sha512" : "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"}
+    "sha256" : "d01b51de67ae1680a84a813983b1de3b592fc32f1a22b662fc9057da5953abd1b72476388ba342cad21671cd0b805503c78ab9075ff2f3951fdf75fa16981420"}
     }'
 ----
 
@@ -166,7 +166,7 @@
 ----
 curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{
     "create-requesthandler": { "name" : "/test",
-    'class': 'org.apache.solr.core.RuntimeLibReqHandler', 'runtimeLib' : true}
+    "class": "org.apache.solr.core.RuntimeLibReqHandler", "package" : "my-pkg" }
     }'
 ----
 
@@ -198,15 +198,15 @@
 Example:
 
 * Host the new jar to a new url. eg:  http://localhost:8000/runtimelibs_v2.jar
-* get the `sha512` hash of the new jar
+* get the `sha256` hash of the new jar
 * run the update-runtime lib command
 
 [source,bash]
 ----
  curl http://localhost:8983/solr/gettingstarted/config -H 'Content-type:application/json' -d '{
-    "update-runtimelib": { "name" : "testjar",
+    "update-package": { "name" : "my-pkg",
     "url":"http://localhost:8000/runtimelibs_v2.jar" ,
-    "sha512" : "<replace-the-new-sha512-digest-here>"}
+    "sha256" : "<replace-the-new-sha256-digest-here>"}
     }'
 ----
 NOTE: Always upload your jar to a new url as the Solr cluster is still referring to the old jar. If the existing jar is modified it can cause errors as the hash may not match
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java
index ef52eb2..73eb863 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/SolrResponse.java
@@ -18,10 +18,12 @@
 
 import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
+import java.io.IOException;
 import java.io.ObjectInputStream;
 import java.io.ObjectOutputStream;
 import java.io.Serializable;
 
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.util.NamedList;
@@ -32,7 +34,7 @@
  * 
  * @since solr 1.3
  */
-public abstract class SolrResponse implements Serializable {
+public abstract class SolrResponse implements Serializable, MapWriter {
 
   /** Elapsed time in milliseconds for the request as seen from the client. */
   public abstract long getElapsedTime();
@@ -43,6 +45,11 @@
   
   public abstract NamedList<Object> getResponse();
 
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    getResponse().writeMap(ew);
+  }
+
   public Exception getException() {
     NamedList exp = (NamedList) getResponse().get("exception");
     if (exp == null) {
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java
index 74d0bbc..4f0fa2d 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/CollectionApiMapping.java
@@ -46,7 +46,6 @@
 import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_CMD;
 import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_CMD_STATUS;
 import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_CMD_STATUS_DELETE;
-import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.CLUSTER_NODES;
 import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.COLLECTIONS;
 import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.COLLECTIONS_COMMANDS;
 import static org.apache.solr.client.solrj.request.CollectionApiMapping.EndPoint.COLLECTION_STATE;
@@ -203,10 +202,7 @@
         POST,
         CLUSTERPROP,
         "set-property",null),
-    SET_CLUSTER_PROPERTY_OBJ(CLUSTER_CMD,
-        POST,
-        null,
-        "set-obj-property", null),
+
     UTILIZE_NODE(CLUSTER_CMD,
         POST,
         UTILIZENODE,
@@ -223,10 +219,8 @@
         "restore-collection",
         null
     ),
-    GET_NODES(CLUSTER_NODES, GET, null),
     FORCE_LEADER(PER_COLLECTION_PER_SHARD_COMMANDS, POST, CollectionAction.FORCELEADER, "force-leader", null),
-    BALANCE_SHARD_UNIQUE(PER_COLLECTION, POST, BALANCESHARDUNIQUE,"balance-shard-unique" , null)
-    ;
+    BALANCE_SHARD_UNIQUE(PER_COLLECTION, POST, BALANCESHARDUNIQUE,"balance-shard-unique" , null);
 
     public final String commandName;
     public final EndPoint endPoint;
@@ -454,7 +448,6 @@
   }
   public interface CommandMeta {
     String getName();
-
     /**
      * the http method supported by this command
      */
diff --git a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
index 4236177..0d19e1f 100644
--- a/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
+++ b/solr/solrj/src/java/org/apache/solr/client/solrj/request/V2Request.java
@@ -18,11 +18,15 @@
 package org.apache.solr.client.solrj.request;
 
 import java.io.IOException;
+import java.io.InputStream;
 import java.io.OutputStream;
+import java.nio.ByteBuffer;
 import java.util.concurrent.atomic.AtomicLong;
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.io.IOUtils;
+import org.apache.solr.client.solrj.ResponseParser;
 import org.apache.solr.client.solrj.SolrClient;
 import org.apache.solr.client.solrj.SolrRequest;
 import org.apache.solr.client.solrj.response.V2Response;
@@ -42,8 +46,10 @@
   private SolrParams solrParams;
   public final boolean useBinary;
   private String collection;
+  private String mimeType;
   private boolean forceV2 = false;
   private boolean isPerCollectionRequest = false;
+  private ResponseParser parser;
 
   private V2Request(METHOD m, String resource, boolean useBinary) {
     super(m, resource);
@@ -75,6 +81,15 @@
     return new RequestWriter.ContentWriter() {
       @Override
       public void write(OutputStream os) throws IOException {
+        if(payload instanceof ByteBuffer){
+          ByteBuffer b = (ByteBuffer) payload;
+          os.write(b.array(), b.arrayOffset(), b.limit());
+          return;
+        }
+        if(payload instanceof InputStream){
+          IOUtils.copy((InputStream) payload, os);
+          return;
+        }
         if (useBinary) {
           new JavaBinCodec().marshal(payload, os);
         } else {
@@ -84,6 +99,7 @@
 
       @Override
       public String getContentType() {
+        if(mimeType != null) return mimeType;
         return useBinary ? JAVABIN_MIME : JSON_MIME;
       }
     };
@@ -111,6 +127,12 @@
     ew.putIfNotNull("command", payload);
   }
 
+  @Override
+  public ResponseParser getResponseParser() {
+    if(parser != null) return parser;
+    return super.getResponseParser();
+  }
+
   public static class Builder {
     private String resource;
     private METHOD method = METHOD.GET;
@@ -119,6 +141,8 @@
     private boolean useBinary = false;
 
     private boolean forceV2EndPoint = false;
+    private ResponseParser parser;
+    private String mimeType;
 
     /**
      * Create a Builder object based on the provided resource.
@@ -173,11 +197,24 @@
       return this;
     }
 
+    public Builder withResponseParser(ResponseParser parser){
+      this.parser = parser;
+      return this;
+    }
+
+    public Builder withMimeType(String mimeType){
+      this.mimeType = mimeType;
+      return this;
+
+    }
+
     public V2Request build() {
       V2Request v2Request = new V2Request(method, resource, useBinary);
       v2Request.solrParams = params;
       v2Request.payload = payload;
       v2Request.forceV2 = forceV2EndPoint;
+      v2Request.mimeType = mimeType;
+      v2Request.parser = parser;
       return v2Request;
     }
   }
diff --git a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
index 926cf4c..75a38d2 100644
--- a/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
+++ b/solr/solrj/src/java/org/apache/solr/common/MapWriter.java
@@ -160,4 +160,5 @@
       return (k, v) -> putNoEx(k,v);
     }
   }
+  MapWriter EMPTY = ew -> { };
 }
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
index 96e5371..fa35e88 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ClusterProperties.java
@@ -89,14 +89,18 @@
     return value;
   }
 
+  public Map<String, Object> getClusterProperties() throws IOException {
+    return getClusterProperties(new Stat());
+
+  }
   /**
    * Return the cluster properties
    * @throws IOException if there is an error reading properties from the cluster
    */
   @SuppressWarnings("unchecked")
-  public Map<String, Object> getClusterProperties() throws IOException {
+  public Map<String, Object> getClusterProperties(Stat stat) throws IOException {
     try {
-      Map<String, Object> properties = (Map<String, Object>) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, new Stat(), true));
+      Map<String, Object> properties = (Map<String, Object>) Utils.fromJSON(client.getData(ZkStateReader.CLUSTER_PROPS, null, stat, true));
       return convertCollectionDefaultsToNestedFormat(properties);
     } catch (KeeperException.NoNodeException e) {
       return Collections.emptyMap();
@@ -105,6 +109,12 @@
     }
   }
 
+  /**This applies the new map over the existing map. it's a merge operation, not an overwrite
+   * This applies the changes atomically over an existing object tree even if multiple nodes are
+   * trying to update this simultaneously
+   *
+   * @param properties The partial Object tree that needs to be applied
+   */
   public void setClusterProperties(Map<String, Object> properties) throws IOException, KeeperException, InterruptedException {
     client.atomicUpdate(ZkStateReader.CLUSTER_PROPS, zkData -> {
       if (zkData == null) return Utils.toJSON(convertCollectionDefaultsToNestedFormat(properties));
diff --git a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
index dcf7d9e..31ff3f5 100644
--- a/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
+++ b/solr/solrj/src/java/org/apache/solr/common/cloud/ZkStateReader.java
@@ -110,6 +110,7 @@
   public static final String ALIASES = "/aliases.json";
   public static final String CLUSTER_STATE = "/clusterstate.json";
   public static final String CLUSTER_PROPS = "/clusterprops.json";
+  public static final String PACKAGE_REPO = "/package_repo.json";
   public static final String COLLECTION_PROPS_ZKNODE = "collectionprops.json";
   public static final String REJOIN_AT_HEAD_PROP = "rejoinAtHead";
   public static final String SOLR_SECURITY_CONF_PATH = "/security.json";
@@ -156,7 +157,6 @@
 
   private static final int GET_LEADER_RETRY_INTERVAL_MS = 50;
   private static final int GET_LEADER_RETRY_DEFAULT_TIMEOUT = Integer.parseInt(System.getProperty("zkReaderGetLeaderRetryTimeoutMs", "4000"));
-  ;
 
   public static final String LEADER_ELECT_ZKNODE = "leader_elect";
 
@@ -194,6 +194,7 @@
   private final ConcurrentHashMap<String, PropsWatcher> collectionPropsWatchers = new ConcurrentHashMap<>();
 
   private volatile SortedSet<String> liveNodes = emptySortedSet();
+  private volatile int clusterPropsVersion = -1;
 
   private volatile Map<String, Object> clusterProperties = Collections.emptyMap();
 
@@ -493,40 +494,20 @@
     return collection.getZNodeVersion();
   }
 
-  public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException,
-      InterruptedException {
-    // We need to fetch the current cluster state and the set of live nodes
-
-    log.debug("Updating cluster state from ZooKeeper... ");
-
-    // Sanity check ZK structure.
-    if (!zkClient.exists(CLUSTER_STATE, true)) {
-      throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
-          "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready");
+  private final Watcher clusterPropertiesWatcher = event -> {
+    // session events are not change events, and do not remove the watcher
+    if (Watcher.Event.EventType.None.equals(event.getType())) {
+      return;
     }
-
-    // on reconnect of SolrZkClient force refresh and re-add watches.
     loadClusterProperties();
-    refreshLiveNodes(new LiveNodeWatcher());
-    refreshLegacyClusterState(new LegacyClusterStateWatcher());
-    refreshStateFormat2Collections();
-    refreshCollectionList(new CollectionsChildWatcher());
-    refreshAliases(aliasesManager);
+  };
 
-    if (securityNodeListener != null) {
-      addSecurityNodeWatcher(pair -> {
-        ConfigData cd = new ConfigData();
-        cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false);
-        cd.version = pair.second() == null ? -1 : pair.second().getVersion();
-        securityData = cd;
-        securityNodeListener.run();
-      });
-      securityData = getSecurityProps(true);
+  public void forceRefreshClusterProps(int expectedVersion) {
+    log.debug("Expected version of clusterprops.json is {} , my version is {}", expectedVersion, clusterPropsVersion);
+    if (expectedVersion > clusterPropsVersion) {
+      log.info("reloading clusterprops.json");
+      loadClusterProperties();
     }
-
-    collectionPropsObservers.forEach((k, v) -> {
-      collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
-    });
   }
 
   private void addSecurityNodeWatcher(final Callable<Pair<byte[], Stat>> callback)
@@ -1102,22 +1083,52 @@
     return Collections.unmodifiableMap(clusterProperties);
   }
 
-  private final Watcher clusterPropertiesWatcher = event -> {
-    // session events are not change events, and do not remove the watcher
-    if (Watcher.Event.EventType.None.equals(event.getType())) {
-      return;
+  public synchronized void createClusterStateWatchersAndUpdate() throws KeeperException,
+      InterruptedException {
+    // We need to fetch the current cluster state and the set of live nodes
+
+    log.debug("Updating cluster state from ZooKeeper... ");
+
+    // Sanity check ZK structure.
+    if (!zkClient.exists(CLUSTER_STATE, true)) {
+      throw new SolrException(ErrorCode.SERVICE_UNAVAILABLE,
+          "Cannot connect to cluster at " + zkClient.getZkServerAddress() + ": cluster not found/not ready");
     }
+
+    // on reconnect of SolrZkClient force refresh and re-add watches.
     loadClusterProperties();
-  };
+    refreshLiveNodes(new LiveNodeWatcher());
+    refreshLegacyClusterState(new LegacyClusterStateWatcher());
+    refreshStateFormat2Collections();
+    refreshCollectionList(new CollectionsChildWatcher());
+    refreshAliases(aliasesManager);
+
+    if (securityNodeListener != null) {
+      addSecurityNodeWatcher(pair -> {
+        ConfigData cd = new ConfigData();
+        cd.data = pair.first() == null || pair.first().length == 0 ? EMPTY_MAP : Utils.getDeepCopy((Map) fromJSON(pair.first()), 4, false);
+        cd.version = pair.second() == null ? -1 : pair.second().getVersion();
+        securityData = cd;
+        securityNodeListener.run();
+      });
+      securityData = getSecurityProps(true);
+    }
+
+    collectionPropsObservers.forEach((k, v) -> {
+      collectionPropsWatchers.computeIfAbsent(k, PropsWatcher::new).refreshAndWatch(true);
+    });
+  }
 
   @SuppressWarnings("unchecked")
   private void loadClusterProperties() {
     try {
       while (true) {
         try {
-          byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, new Stat(), true);
+          Stat stat = new Stat();
+          byte[] data = zkClient.getData(ZkStateReader.CLUSTER_PROPS, clusterPropertiesWatcher, stat, true);
           this.clusterProperties = ClusterProperties.convertCollectionDefaultsToNestedFormat((Map<String, Object>) Utils.fromJSON(data));
-          log.debug("Loaded cluster properties: {}", this.clusterProperties);
+          this.clusterPropsVersion = stat.getVersion();
+          log.debug("Loaded cluster properties: {} to version {}", this.clusterProperties, clusterPropsVersion);
 
           for (ClusterPropertiesListener listener : clusterPropertiesListeners) {
             listener.onChange(getClusterProperties());
@@ -1125,6 +1136,7 @@
           return;
         } catch (KeeperException.NoNodeException e) {
           this.clusterProperties = Collections.emptyMap();
+          this.clusterPropsVersion = -1;
           log.debug("Loaded empty cluster properties");
           // set an exists watch, and if the node has been created since the last call,
           // read the data again
@@ -1137,6 +1149,10 @@
     }
   }
 
+  public int getClusterPropsVersion() {
+    return clusterPropsVersion;
+  }
+
   /**
    * Get collection properties for a given collection. If the collection is watched, simply return it from the cache,
    * otherwise fetch it directly from zookeeper. This is a convenience for {@code getCollectionProperties(collection,0)}
@@ -1556,7 +1572,7 @@
         Stat stat = new Stat();
         byte[] data = zkClient.getData(collectionPath, watcher, stat, true);
         ClusterState state = ClusterState.load(stat.getVersion(), data,
-            Collections.<String>emptySet(), collectionPath);
+            Collections.emptySet(), collectionPath);
         ClusterState.CollectionRef collectionRef = state.getCollectionStates().get(coll);
         return collectionRef == null ? null : collectionRef.get();
       } catch (KeeperException.NoNodeException e) {
diff --git a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
index d8f1410..6529a86 100644
--- a/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
+++ b/solr/solrj/src/java/org/apache/solr/common/params/CommonParams.java
@@ -231,7 +231,7 @@
       }
       return null;
     }
-  };
+  }
 
   /** which parameters to log (if not supplied all parameters will be logged) **/
   String LOG_PARAMS_LIST = "logParamsList";
@@ -293,5 +293,8 @@
 
   String JAVABIN_MIME = "application/javabin";
 
+  String PACKAGES = "packages";
+  String PACKAGE = "package";
+
 }
 
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
index 277324a..3804f78 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/CommandOperation.java
@@ -28,6 +28,7 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.solr.common.MapWriter;
 import org.apache.solr.common.SolrException;
 import org.noggit.JSONParser;
 import org.noggit.ObjectBuilder;
@@ -38,7 +39,7 @@
 import static org.apache.solr.common.util.StrUtils.formatString;
 import static org.apache.solr.common.util.Utils.toJSON;
 
-public class CommandOperation {
+public class CommandOperation implements MapWriter {
   public final String name;
   private Object commandData;//this is most often a map
   private List<String> errors = new ArrayList<>();
@@ -386,4 +387,10 @@
     if (o == null) return null;
     return getInt(name, null);
   }
+
+  @Override
+  public void writeMap(EntryWriter ew) throws IOException {
+    ew.put(name, commandData);
+    ew.putIfNotNull("errors", errors);
+  }
 }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java
index a053a18..e5bad27 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/ExecutorUtil.java
@@ -72,6 +72,7 @@
   }
 
   public static void shutdownAndAwaitTermination(ExecutorService pool) {
+    if(pool == null) return;
     pool.shutdown(); // Disable new tasks from being submitted
     awaitTermination(pool);
   }
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java
index c0b19f5..9a68c3b 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/StrUtils.java
@@ -40,6 +40,15 @@
 
   }
 
+  static final String DELIM_CHARS = "/:;.,%#";
+  public static List<String> split(String s, char sep){
+    if(DELIM_CHARS.indexOf(s.charAt(0)) >-1){
+     sep = s.charAt(0);
+    }
+    return splitSmart(s,sep, true);
+
+  }
+
   public static List<String> splitSmart(String s, char separator, boolean trimEmpty) {
     List<String> l = splitSmart(s, separator);
     if(trimEmpty){
@@ -148,7 +157,7 @@
    */
   public static List<String> splitFileNames(String fileNames) {
     if (fileNames == null)
-      return Collections.<String>emptyList();
+      return Collections.emptyList();
 
     List<String> result = new ArrayList<>();
     for (String file : fileNames.split("(?<!\\\\),")) {
diff --git a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
index 4a8d987..cd2e8cc 100644
--- a/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
+++ b/solr/solrj/src/java/org/apache/solr/common/util/Utils.java
@@ -28,6 +28,7 @@
 import java.lang.invoke.MethodHandles;
 import java.net.URL;
 import java.net.URLDecoder;
+import java.nio.BufferOverflowException;
 import java.nio.ByteBuffer;
 import java.nio.charset.StandardCharsets;
 import java.util.AbstractMap;
@@ -53,6 +54,9 @@
 import java.util.regex.Pattern;
 
 import org.apache.http.HttpEntity;
+import org.apache.http.HttpResponse;
+import org.apache.http.client.HttpClient;
+import org.apache.http.client.methods.HttpGet;
 import org.apache.http.util.EntityUtils;
 import org.apache.solr.client.solrj.cloud.DistribStateManager;
 import org.apache.solr.client.solrj.cloud.autoscaling.VersionedData;
@@ -91,7 +95,7 @@
   public static final Function NEW_SYNCHRONIZED_ARRAYLIST_FUN = o -> Collections.synchronizedList(new ArrayList<>());
   public static final Function NEW_HASHSET_FUN = o -> new HashSet<>();
   private static final Logger log = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
-  
+
   public static Map getDeepCopy(Map map, int maxDepth) {
     return getDeepCopy(map, maxDepth, true, false);
   }
@@ -100,21 +104,18 @@
     return getDeepCopy(map, maxDepth, mutable, false);
   }
 
-  public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) {
-    if(map == null) return null;
-    if (maxDepth < 1) return map;
-    Map copy;
-    if (sorted) {
-      copy = new TreeMap();
-    } else {
-      copy = map instanceof LinkedHashMap?  new LinkedHashMap(map.size()): new HashMap(map.size());
+  public static final Function<JSONParser, ObjectBuilder> MAPWRITEROBJBUILDER = jsonParser -> {
+    try {
+      return new ObjectBuilder(jsonParser) {
+        @Override
+        public Object newObject() {
+          return new LinkedHashMapWriter();
+        }
+      };
+    } catch (IOException e) {
+      throw new RuntimeException(e);
     }
-    for (Object o : map.entrySet()) {
-      Map.Entry e = (Map.Entry) o;
-      copy.put(e.getKey(), makeDeepCopy(e.getValue(),maxDepth, mutable, sorted));
-    }
-    return mutable ? copy : Collections.unmodifiableMap(copy);
-  }
+  };
 
   public static void forEachMapEntry(Object o, String path, BiConsumer fun) {
     Object val = Utils.getObjectByPath(o, false, path);
@@ -144,6 +145,40 @@
       ((Map) o).forEach((k, v) -> fun.accept(k, v));
     }
   }
+  public static final Function<JSONParser, ObjectBuilder> MAPOBJBUILDER = jsonParser -> {
+    try {
+      return new ObjectBuilder(jsonParser) {
+        @Override
+        public Object newObject() {
+          return new HashMap();
+        }
+      };
+    } catch (IOException e) {
+      throw new RuntimeException(e);
+    }
+  };
+  public static final Pattern ARRAY_ELEMENT_INDEX = Pattern
+      .compile("(\\S*?)\\[([-]?\\d+)\\]");
+
+  public static Map getDeepCopy(Map map, int maxDepth, boolean mutable, boolean sorted) {
+    if (map == null) return null;
+    if (maxDepth < 1) return map;
+    Map copy;
+    if (sorted) {
+      copy = new TreeMap();
+    } else {
+      copy = map instanceof LinkedHashMap ? new LinkedHashMap(map.size()) : new HashMap(map.size());
+    }
+    for (Object o : map.entrySet()) {
+      Map.Entry e = (Map.Entry) o;
+      copy.put(e.getKey(), makeDeepCopy(e.getValue(), maxDepth, mutable, sorted));
+    }
+    return mutable ? copy : Collections.unmodifiableMap(copy);
+  }
+
+  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
+    return getDeepCopy(c, maxDepth, mutable, false);
+  }
 
   private static Object makeDeepCopy(Object v, int maxDepth, boolean mutable, boolean sorted) {
     if (v instanceof MapWriter && maxDepth > 1) {
@@ -151,7 +186,7 @@
     } else if (v instanceof IteratorWriter && maxDepth > 1) {
       v = ((IteratorWriter) v).toList(new ArrayList<>());
       if (sorted) {
-        Collections.sort((List)v);
+        Collections.sort((List) v);
       }
     }
 
@@ -163,29 +198,6 @@
     return v;
   }
 
-  public static InputStream toJavabin(Object o) throws IOException {
-    try (final JavaBinCodec jbc = new JavaBinCodec()) {
-      BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS();
-      jbc.marshal(o,baos);
-      return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(),0,baos.size()));
-    }
-  }
-
-  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable) {
-    return getDeepCopy(c, maxDepth, mutable, false);
-  }
-
-  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) {
-    if (c == null || maxDepth < 1) return c;
-    Collection result = c instanceof Set ?
-        ( sorted? new TreeSet() : new HashSet()) : new ArrayList();
-    for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted));
-    if (sorted && (result instanceof List)) {
-      Collections.sort((List)result);
-    }
-    return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result);
-  }
-
   public static void writeJson(Object o, OutputStream os, boolean indent) throws IOException {
     writeJson(o, new OutputStreamWriter(os, UTF_8), indent)
         .flush();
@@ -199,35 +211,12 @@
     return writer;
   }
 
-  private static class MapWriterJSONWriter extends JSONWriter {
-
-    public MapWriterJSONWriter(CharArr out, int indentSize) {
-      super(out, indentSize);
+  public static InputStream toJavabin(Object o) throws IOException {
+    try (final JavaBinCodec jbc = new JavaBinCodec()) {
+      BinaryRequestWriter.BAOS baos = new BinaryRequestWriter.BAOS();
+      jbc.marshal(o, baos);
+      return new ByteBufferInputStream(ByteBuffer.wrap(baos.getbuf(), 0, baos.size()));
     }
-
-    @Override
-    public void handleUnknownClass(Object o) {
-      if (o instanceof MapWriter) {
-        Map m = ((MapWriter)o).toMap(new LinkedHashMap<>());
-        write(m);
-      } else {
-        super.handleUnknownClass(o);
-      }
-    }
-  }
-
-  public static byte[] toJSON(Object o) {
-    if(o == null) return new byte[0];
-    CharArr out = new CharArr();
-    if (!(o instanceof List) && !(o instanceof Map)) {
-      if (o instanceof MapWriter)  {
-        o = ((MapWriter)o).toMap(new LinkedHashMap<>());
-      } else if(o instanceof IteratorWriter){
-        o = ((IteratorWriter)o).toList(new ArrayList<>());
-      }
-    }
-    new MapWriterJSONWriter(out, 2).write(o); // indentation by default
-    return toUTF8(out);
   }
 
   public static String toJSONString(Object o) {
@@ -274,15 +263,29 @@
     return propMap;
   }
 
-  public static Object fromJSON(InputStream is){
-    return fromJSON(new InputStreamReader(is, UTF_8));
-  }
-  public static Object fromJSON(Reader is){
-    try {
-      return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal();
-    } catch (IOException e) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
+  public static Collection getDeepCopy(Collection c, int maxDepth, boolean mutable, boolean sorted) {
+    if (c == null || maxDepth < 1) return c;
+    Collection result = c instanceof Set ?
+        (sorted ? new TreeSet() : new HashSet()) : new ArrayList();
+    for (Object o : c) result.add(makeDeepCopy(o, maxDepth, mutable, sorted));
+    if (sorted && (result instanceof List)) {
+      Collections.sort((List) result);
     }
+    return mutable ? result : result instanceof Set ? unmodifiableSet((Set) result) : unmodifiableList((List) result);
+  }
+
+  public static byte[] toJSON(Object o) {
+    if (o == null) return new byte[0];
+    CharArr out = new CharArr();
+    if (!(o instanceof List) && !(o instanceof Map)) {
+      if (o instanceof MapWriter) {
+        o = ((MapWriter) o).toMap(new LinkedHashMap<>());
+      } else if (o instanceof IteratorWriter) {
+        o = ((IteratorWriter) o).toList(new ArrayList<>());
+      }
+    }
+    new MapWriterJSONWriter(out, 2).write(o); // indentation by default
+    return toUTF8(out);
   }
 
 
@@ -293,35 +296,14 @@
       throw new RuntimeException(e);
     }
   };
-  public static final Function<JSONParser, ObjectBuilder> MAPWRITEROBJBUILDER = jsonParser -> {
-    try {
-      return new ObjectBuilder(jsonParser){
-        @Override
-        public Object newObject() {
-          return new LinkedHashMapWriter();
-        }
-      };
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  };
 
-  public static final Function<JSONParser, ObjectBuilder> MAPOBJBUILDER = jsonParser -> {
-    try {
-      return new ObjectBuilder(jsonParser){
-        @Override
-        public Object newObject() {
-          return new HashMap();
-        }
-      };
-    } catch (IOException e) {
-      throw new RuntimeException(e);
-    }
-  };
+  public static Object fromJSON(InputStream is) {
+    return fromJSON(new InputStreamReader(is, UTF_8));
+  }
 
-  public static Object fromJSON(InputStream is, Function<JSONParser, ObjectBuilder> objBuilderProvider) {
+  public static Object fromJSON(Reader is) {
     try {
-      return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal();
+      return STANDARDOBJBUILDER.apply(getJSONParser(is)).getVal();
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
     }
@@ -336,10 +318,19 @@
       return fromJSON(stream);
     } catch (IOException e) {
       throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
-                              "Resource error: " + e.getMessage(), e);
+          "Resource error: " + e.getMessage(), e);
     }
   }
-  public static JSONParser getJSONParser(Reader reader){
+
+  public static Object fromJSON(InputStream is, Function<JSONParser, ObjectBuilder> objBuilderProvider) {
+    try {
+      return objBuilderProvider.apply(getJSONParser((new InputStreamReader(is, StandardCharsets.UTF_8)))).getVal();
+    } catch (IOException e) {
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error", e);
+    }
+  }
+
+  public static JSONParser getJSONParser(Reader reader) {
     JSONParser parser = new JSONParser(reader);
     parser.setFlags(parser.getFlags() |
         JSONParser.ALLOW_MISSING_COLON_COMMA_BEFORE_OBJECT |
@@ -347,11 +338,11 @@
     return parser;
   }
 
-  public static Object fromJSONString(String json)  {
+  public static Object fromJSONString(String json) {
     try {
       return STANDARDOBJBUILDER.apply(getJSONParser(new StringReader(json))).getVal();
     } catch (Exception e) {
-      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error : "+ json, e );
+      throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, "Parse error : " + json, e);
     }
   }
 
@@ -363,10 +354,10 @@
 
   public static boolean setObjectByPath(Object root, String hierarchy, Object value) {
     List<String> parts = StrUtils.splitSmart(hierarchy, '/', true);
-    return setObjectByPath(root, parts, value);
+    return setObjectByPath(root, parts, value, true);
   }
 
-  public static boolean setObjectByPath(Object root, List<String> hierarchy, Object value) {
+  public static boolean setObjectByPath(Object root, List<String> hierarchy, Object value, boolean insertMissing) {
     if (root == null) return false;
     if (!isMapLike(root)) throw new RuntimeException("must be a Map or NamedList");
     Object obj = root;
@@ -382,7 +373,10 @@
       }
       if (i < hierarchy.size() - 1) {
         Object o = getVal(obj, s, -1);
-        if (o == null) return false;
+        if (o == null) {
+          if (insertMissing) insertItem(o = new LinkedHashMap<>(), obj, s);
+          else return false;
+        }
         if (idx > -1) {
           List l = (List) o;
           o = idx < l.size() ? l.get(idx) : null;
@@ -391,14 +385,7 @@
         obj = o;
       } else {
         if (idx == -2) {
-          if (obj instanceof NamedList) {
-            NamedList namedList = (NamedList) obj;
-            int location = namedList.indexOf(s, 0);
-            if (location == -1) namedList.add(s, value);
-            else namedList.setVal(location, value);
-          } else if (obj instanceof Map) {
-            ((Map) obj).put(s, value);
-          }
+          insertItem(value, obj, s);
           return true;
         } else {
           Object v = getVal(obj, s, -1);
@@ -422,10 +409,20 @@
 
   }
 
+  private static void insertItem(Object value, Object container, String name) {
+    if (container instanceof NamedList) {
+      NamedList namedList = (NamedList) container;
+      int location = namedList.indexOf(name, 0);
+      if (location == -1) namedList.add(name, value);
+      else namedList.setVal(location, value);
+    } else if (container instanceof Map) {
+      ((Map) container).put(name, value);
+    }
+  }
 
   public static Object getObjectByPath(Object root, boolean onlyPrimitive, List<String> hierarchy) {
-    if(root == null) return null;
-    if(!isMapLike(root)) return null;
+    if (root == null) return null;
+    if (!isMapLike(root)) return null;
     Object obj = root;
     for (int i = 0; i < hierarchy.size(); i++) {
       int idx = -1;
@@ -518,6 +515,7 @@
       try {
         ((MapWriter) obj).writeMap(new MapWriter.EntryWriter() {
           int count = -1;
+
           @Override
           public MapWriter.EntryWriter put(CharSequence k, Object v) {
             if (result[0] != null) return this;
@@ -533,15 +531,14 @@
         throw new RuntimeException(e);
       }
       return result[0];
-    }
-    else if (obj instanceof Map) return ((Map) obj).get(key);
+    } else if (obj instanceof Map) return ((Map) obj).get(key);
     else throw new RuntimeException("must be a NamedList or Map");
   }
 
   /**
    * If the passed entity has content, make sure it is fully
    * read and closed.
-   * 
+   *
    * @param entity to consume or null
    */
   public static void consumeFully(HttpEntity entity) {
@@ -562,31 +559,21 @@
 
   /**
    * Make sure the InputStream is fully read.
-   * 
+   *
    * @param is to read
    * @throws IOException on problem with IO
    */
   private static void readFully(InputStream is) throws IOException {
     is.skip(is.available());
-    while (is.read() != -1) {}
-  }
-
-  public static Map<String, Object> getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException {
-    VersionedData data = null;
-    try {
-      data = distribStateManager.getData(path);
-    } catch (KeeperException.NoNodeException | NoSuchElementException e) {
-      return Collections.emptyMap();
+    while (is.read() != -1) {
     }
-    if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap();
-    return (Map<String, Object>) Utils.fromJSON(data.getData());
   }
 
   /**
    * Assumes data in ZooKeeper is a JSON string, deserializes it and returns as a Map
    *
-   * @param zkClient the zookeeper client
-   * @param path the path to the znode being read
+   * @param zkClient        the zookeeper client
+   * @param path            the path to the znode being read
    * @param retryOnConnLoss whether to retry the operation automatically on connection loss, see {@link org.apache.solr.common.cloud.ZkCmdExecutor#retryOperation(ZkOperation)}
    * @return a Map if the node exists and contains valid JSON or an empty map if znode does not exist or has a null data
    */
@@ -602,39 +589,23 @@
     return Collections.emptyMap();
   }
 
-  public static final Pattern ARRAY_ELEMENT_INDEX = Pattern
-      .compile("(\\S*?)\\[([-]?\\d+)\\]");
-
-  public static SpecProvider getSpec(final String name) {
-    return () -> {
-      return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION);
-    };
-  }
-
-  public static String parseMetricsReplicaName(String collectionName, String coreName) {
-    if (collectionName == null || !coreName.startsWith(collectionName)) {
-      return null;
-    } else {
-      // split "collection1_shard1_1_replica1" into parts
-      if (coreName.length() > collectionName.length()) {
-        String str = coreName.substring(collectionName.length() + 1);
-        int pos = str.lastIndexOf("_replica");
-        if (pos == -1) { // ?? no _replicaN part ??
-          return str;
-        } else {
-          return str.substring(pos + 1);
-        }
-      } else {
-        return null;
-      }
+  public static Map<String, Object> getJson(DistribStateManager distribStateManager, String path) throws InterruptedException, IOException, KeeperException {
+    VersionedData data = null;
+    try {
+      data = distribStateManager.getData(path);
+    } catch (KeeperException.NoNodeException | NoSuchElementException e) {
+      return Collections.emptyMap();
     }
+    if (data == null || data.getData() == null || data.getData().length == 0) return Collections.emptyMap();
+    return (Map<String, Object>) Utils.fromJSON(data.getData());
   }
 
-  /**Applies one json over other. The 'input' is applied over the sink
-   * The values in input isapplied over the values in 'sink' . If a value is 'null'
+  /**
+   * Applies one json over other. The 'input' is applied over the sink
+   * The values in input are applied over the values in 'sink' . If a value is 'null'
    * that value is removed from sink
    *
-   * @param sink the original json object to start with. Ensure that this Map is mutable
+   * @param sink  the original json object to start with. Ensure that this Map is mutable
    * @param input the json with new values
    * @return whether there was any change made to sink or not.
    */
@@ -672,20 +643,62 @@
     return isModified;
   }
 
+  public static SpecProvider getSpec(final String name) {
+    return () -> {
+      return ValidatingJsonMap.parse(CommonParams.APISPEC_LOCATION + name + ".json", CommonParams.APISPEC_LOCATION);
+    };
+  }
+
+  public static String parseMetricsReplicaName(String collectionName, String coreName) {
+    if (collectionName == null || !coreName.startsWith(collectionName)) {
+      return null;
+    } else {
+      // split "collection1_shard1_1_replica1" into parts
+      if (coreName.length() > collectionName.length()) {
+        String str = coreName.substring(collectionName.length() + 1);
+        int pos = str.lastIndexOf("_replica");
+        if (pos == -1) { // ?? no _replicaN part ??
+          return str;
+        } else {
+          return str.substring(pos + 1);
+        }
+      } else {
+        return null;
+      }
+    }
+  }
+
   public static String getBaseUrlForNodeName(final String nodeName, String urlScheme) {
     final int _offset = nodeName.indexOf("_");
     if (_offset < 0) {
       throw new IllegalArgumentException("nodeName does not contain expected '_' separator: " + nodeName);
     }
-    final String hostAndPort = nodeName.substring(0,_offset);
+    final String hostAndPort = nodeName.substring(0, _offset);
     try {
-      final String path = URLDecoder.decode(nodeName.substring(1+_offset), "UTF-8");
+      final String path = URLDecoder.decode(nodeName.substring(1 + _offset), "UTF-8");
       return urlScheme + "://" + hostAndPort + (path.isEmpty() ? "" : ("/" + path));
     } catch (UnsupportedEncodingException e) {
       throw new IllegalStateException("JVM Does not seem to support UTF-8", e);
     }
   }
 
+  private static class MapWriterJSONWriter extends JSONWriter {
+
+    public MapWriterJSONWriter(CharArr out, int indentSize) {
+      super(out, indentSize);
+    }
+
+    @Override
+    public void handleUnknownClass(Object o) {
+      if (o instanceof MapWriter) {
+        Map m = ((MapWriter) o).toMap(new LinkedHashMap<>());
+        write(m);
+      } else {
+        super.handleUnknownClass(o);
+      }
+    }
+  }
+
   public static long time(TimeSource timeSource, TimeUnit unit) {
     return unit.convert(timeSource.getTimeNs(), TimeUnit.NANOSECONDS);
   }
@@ -713,4 +726,67 @@
     return def;
   }
 
+  public interface InputStreamConsumer<T> {
+
+    T accept(InputStream is) throws IOException;
+
+  }
+  public static final InputStreamConsumer<?> JAVABINCONSUMER = is -> new JavaBinCodec().unmarshal(is);
+  public static final InputStreamConsumer<?> JSONCONSUMER = is -> Utils.fromJSON(is);
+  public static InputStreamConsumer<ByteBuffer> newBytesConsumer(int maxSize){
+    return is -> {
+      try (BinaryRequestWriter.BAOS bos = new BinaryRequestWriter.BAOS()) {
+        long sz = 0;
+        int next = is.read();
+        while (next > -1) {
+          if (++sz > maxSize) throw new BufferOverflowException();
+          bos.write(next);
+          next = is.read();
+        }
+        bos.flush();
+        return ByteBuffer.wrap( bos.getbuf(), 0, bos.size());
+      } catch (IOException e) {
+        throw new RuntimeException(e);
+      }
+    };
+
+  }
+
+
+
+
+  public static <T> T executeGET(HttpClient client, String url, InputStreamConsumer<T> consumer) throws SolrException {
+    T result = null;
+    HttpGet httpGet = new HttpGet(url);
+    HttpResponse rsp = null;
+    try {
+      rsp = client.execute(httpGet);
+    } catch (IOException e) {
+      log.error("Error in request to url : "+ url, e);
+      throw new SolrException(SolrException.ErrorCode.UNKNOWN, "error sending request");
+    }
+    int statusCode = rsp.getStatusLine().getStatusCode();
+    if(statusCode != 200) {
+      try {
+        log.error("Failed a request to : {} ,  status :{}  body {}",url, rsp.getStatusLine(),  EntityUtils.toString(rsp.getEntity(), StandardCharsets.UTF_8));
+      } catch (IOException e) {
+        log.error("could not print error", e);
+      }
+      throw new SolrException(SolrException.ErrorCode.getErrorCode(statusCode), "Unknown error");
+    }
+    HttpEntity entity = rsp.getEntity();
+    try{
+      InputStream is = entity.getContent();
+      if(consumer != null) {
+
+        result = consumer.accept(is);
+      }
+    } catch (IOException e) {
+      throw new SolrException(SolrException.ErrorCode.UNKNOWN, e);
+    } finally {
+      Utils.consumeFully(entity);
+    }
+    return result;
+  }
+
 }
diff --git a/solr/solrj/src/resources/apispec/cluster.Commands.json b/solr/solrj/src/resources/apispec/cluster.Commands.json
index 069cd1d..7c37cb2 100644
--- a/solr/solrj/src/resources/apispec/cluster.Commands.json
+++ b/solr/solrj/src/resources/apispec/cluster.Commands.json
@@ -169,6 +169,32 @@
       "required": [
         "name"
       ]
+    },
+    "add-requesthandler": {
+      "type": "object",
+      "documentation": "",
+      "description" : "Create a node level request handler",
+      "properties": {
+        "name": {
+          "type": "string",
+          "description": "Name of the request handler. This is the path"
+        },
+        "class": {
+          "type": "string",
+          "description": "The class name"
+        },
+        "package" : {
+          "type": "string",
+          "description": " The package from where the plugin can be loaded from"
+        }
+      },
+      "required": ["name", "class"],
+      "additionalProperties": true
+    },
+    "delete-requesthandler" : {
+      "description" : "delete a requesthandler",
+      "type": "string"
     }
+
   }
 }
diff --git a/solr/solrj/src/resources/apispec/cluster.filestore.json b/solr/solrj/src/resources/apispec/cluster.filestore.json
new file mode 100644
index 0000000..79732f8
--- /dev/null
+++ b/solr/solrj/src/resources/apispec/cluster.filestore.json
@@ -0,0 +1,10 @@
+{
+  "methods": [
+    "POST"
+  ],
+  "url": {
+    "paths": [
+      "/cluster/filestore/package"
+    ]
+  }
+}
diff --git a/solr/solrj/src/resources/apispec/cluster.package.Commands.file.properties.json b/solr/solrj/src/resources/apispec/cluster.package.Commands.file.properties.json
new file mode 100644
index 0000000..b4df0e6
--- /dev/null
+++ b/solr/solrj/src/resources/apispec/cluster.package.Commands.file.properties.json
@@ -0,0 +1,14 @@
+{
+  "type": "object",
+  "properties": {
+    "id": {
+      "type": "string",
+      "description": "The sha256 hash of the file"
+    },
+    "sig": {
+      "type": "string",
+      "description": "the signature of the file"
+    }
+  },
+  "required": ["id", "sig"]
+}
diff --git a/solr/solrj/src/resources/apispec/cluster.package.Commands.properties.json b/solr/solrj/src/resources/apispec/cluster.package.Commands.properties.json
new file mode 100644
index 0000000..0258d94
--- /dev/null
+++ b/solr/solrj/src/resources/apispec/cluster.package.Commands.properties.json
@@ -0,0 +1,31 @@
+{
+  "type": "object",
+  "properties": {
+    "name": {
+      "type": "string",
+      "description": "A name for the library"
+    },
+    "files": {
+      "type": "array",
+      "items": {
+       "#include" :"cluster.package.Commands.file.properties"
+      }
+    },
+    "file": {
+      "#include" :"cluster.package.Commands.file.properties"
+    },
+    "repository": {
+      "type": "string",
+      "description": "the repository name"
+    },
+    "version": {
+      "type": "string",
+      "description": "version of he package"
+    }
+  },
+  "required": [
+    "name",
+    "version"
+  ],
+  "oneOf" : ["file","files"]
+}
\ No newline at end of file
diff --git a/solr/solrj/src/resources/apispec/cluster.packages.Commands.json b/solr/solrj/src/resources/apispec/cluster.packages.Commands.json
new file mode 100644
index 0000000..b52851e
--- /dev/null
+++ b/solr/solrj/src/resources/apispec/cluster.packages.Commands.json
@@ -0,0 +1,27 @@
+{
+  "methods": [
+     "POST"
+  ],
+  "url": {
+    "paths": [
+      "/cluster/package"
+    ]
+  },
+  "commands": {
+    "add": {
+      "documentation": "",
+      "description" : "Add a package to the classpath",
+      "#include": "cluster.package.Commands.properties"
+    },
+    "update": {
+      "documentation": "",
+      "description" : "Update the jar details",
+      "#include": "cluster.package.Commands.properties"
+    },
+    "delete": {
+      "documentation": "",
+      "description" : "delete a package",
+      "type": "string"
+    }
+  }
+}
diff --git a/solr/solrj/src/resources/apispec/cluster.packages.GET.json b/solr/solrj/src/resources/apispec/cluster.packages.GET.json
new file mode 100644
index 0000000..ba11bd2
--- /dev/null
+++ b/solr/solrj/src/resources/apispec/cluster.packages.GET.json
@@ -0,0 +1,10 @@
+{
+  "methods": [
+    "GET"
+  ],
+  "url": {
+    "paths": [
+      "/cluster/package"
+    ]
+  }
+}
\ No newline at end of file
diff --git a/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json b/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json
index 731c3d8..6ee1498 100644
--- a/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json
+++ b/solr/solrj/src/resources/apispec/core.config.Commands.addRequestHandler.properties.json
@@ -10,7 +10,7 @@
       "description": "The request handler class. Class names do not need to be fully qualified if they are included with Solr, so you can abbreviate the name as 'solr.SearchHandler'. Custom or third-party class names may need to be fully qualified, however."
     },
     "runtimeLib": {
-      "type": "boolean",
+      "type": "string",
       "description": "An optional parameter to use a custom .jar file that has been uploaded to Solr's blobstore. This additionally requires that the .jar has also been registered with the 'add-runtimelib' command, which is one of the available commands for the Config API."
     },
     "startup": {
diff --git a/solr/solrj/src/resources/apispec/core.config.Commands.generic.json b/solr/solrj/src/resources/apispec/core.config.Commands.generic.json
index 9d2b01d..2ebfdf8 100644
--- a/solr/solrj/src/resources/apispec/core.config.Commands.generic.json
+++ b/solr/solrj/src/resources/apispec/core.config.Commands.generic.json
@@ -10,7 +10,7 @@
       "description": "The configuration item class. Class names do not need to be fully qualified if they are included with Solr, so you can abbreviate the name as 'solr.SearchHandler'. Custom or third-party class names may need to be fully qualified, however."
    },
     "runtimeLib": {
-      "type": "boolean",
+      "type": "string",
       "description": "An optional parameter to use a custom .jar file that has been uploaded to Solr's blobstore. This additionally requires that the .jar has also been registered with the 'add-runtimelib' command, which is one of the available commands for the Config API."
    }
   },
diff --git a/solr/solrj/src/resources/apispec/core.config.json b/solr/solrj/src/resources/apispec/core.config.json
index 81e7d54..2324821 100644
--- a/solr/solrj/src/resources/apispec/core.config.json
+++ b/solr/solrj/src/resources/apispec/core.config.json
@@ -12,7 +12,8 @@
       "/config/jmx",
       "/config/requestDispatcher",
       "/config/znodeVersion",
-      "/config/{plugin}"
+      "/config/{plugin}",
+      "/config/{plugin}/{pluginName}"
     ]
   }
 }
diff --git a/solr/solrj/src/resources/apispec/node.ext.json b/solr/solrj/src/resources/apispec/node.ext.json
new file mode 100644
index 0000000..161b2aa
--- /dev/null
+++ b/solr/solrj/src/resources/apispec/node.ext.json
@@ -0,0 +1,13 @@
+{
+  "methods": [
+    "POST",
+    "GET",
+    "DELETE"
+  ],
+  "url": {
+    "paths": [
+      "/node/ext/{handlerName}",
+      "/node/ext"
+    ]
+  }
+}
diff --git a/solr/solrj/src/resources/apispec/node.filestore.GET.json b/solr/solrj/src/resources/apispec/node.filestore.GET.json
new file mode 100644
index 0000000..d2bd1b4
--- /dev/null
+++ b/solr/solrj/src/resources/apispec/node.filestore.GET.json
@@ -0,0 +1,11 @@
+{
+  "methods": [
+    "GET"
+  ],
+  "url": {
+    "paths": [
+      "/node/filestore/package",
+      "/node/filestore/package/{id}"
+    ]
+  }
+}
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
index d9de129..2ef2659 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/AbstractFullDistribZkTestBase.java
@@ -16,8 +16,6 @@
  */
 package org.apache.solr.cloud;
 
-import static org.apache.solr.common.util.Utils.makeMap;
-
 import java.io.File;
 import java.io.IOException;
 import java.lang.invoke.MethodHandles;
@@ -108,6 +106,8 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.util.Utils.makeMap;
+
 /**
  * TODO: we should still test this works as a custom update chain as well as
  * what we test now - the default update chain
diff --git a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
index cb66ae9..23283cc 100644
--- a/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
+++ b/solr/test-framework/src/java/org/apache/solr/cloud/SolrCloudTestCase.java
@@ -55,11 +55,14 @@
 import org.apache.solr.common.cloud.ZkStateReader;
 import org.apache.solr.common.params.CollectionAdminParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.zookeeper.CreateMode;
 import org.junit.AfterClass;
 import org.junit.Before;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import static org.apache.solr.common.cloud.ZkConfigManager.CONFIGS_ZKNODE;
+
 /**
  * Base class for SolrCloud tests
  * <p>
@@ -87,9 +90,12 @@
   private static class Config {
     final String name;
     final Path path;
-    private Config(String name, Path path) {
+    final Map<String, byte[]> extraConfig;
+
+    private Config(String name, Path path, Map<String, byte[]> extraConfig) {
       this.name = name;
       this.path = path;
+      this.extraConfig = extraConfig;
     }
   }
 
@@ -181,7 +187,12 @@
      * @param configPath the path to the config files
      */
     public Builder addConfig(String configName, Path configPath) {
-      this.configs.add(new Config(configName, configPath));
+      this.configs.add(new Config(configName, configPath, null));
+      return this;
+    }
+
+    public Builder addConfig(String configName, Path configPath, Map<String, byte[]> extraConfig) {
+      this.configs.add(new Config(configName, configPath, extraConfig));
       return this;
     }
 
@@ -206,8 +217,8 @@
      *
      * @throws Exception if an error occurs on startup
      */
-    public void configure() throws Exception {
-      cluster = build();
+    public MiniSolrCloudCluster configure() throws Exception {
+      return cluster = build();
     }
 
     /**
@@ -221,7 +232,15 @@
           null, securityJson, trackJettyMetrics);
       CloudSolrClient client = cluster.getSolrClient();
       for (Config config : configs) {
-        ((ZkClientClusterStateProvider)client.getClusterStateProvider()).uploadConfig(config.path, config.name);
+        ((ZkClientClusterStateProvider) client.getClusterStateProvider()).uploadConfig(config.path, config.name);
+        if (config.extraConfig != null) {
+          for (Map.Entry<String, byte[]> e : config.extraConfig.entrySet()) {
+            ((ZkClientClusterStateProvider) client.getClusterStateProvider()).getZkStateReader().getZkClient()
+                .create(CONFIGS_ZKNODE + "/" + config.name + "/" + e.getKey(), e.getValue(), CreateMode.PERSISTENT, true);
+
+          }
+
+        }
       }
 
       if (clusterProperties.size() > 0) {