DRILL-7590: Refactor plugin registry

Major cleanup of the plugin registry to split it into components
in preparation for a proper plugin API.

Better coordinates the named and ephemeral plugin caches.
Cleans up the registry API. Sharpens rules for modifying
plugin configs.

closes #1988
diff --git a/common/src/main/java/org/apache/drill/common/PlanStringBuilder.java b/common/src/main/java/org/apache/drill/common/PlanStringBuilder.java
new file mode 100644
index 0000000..7d06eaf
--- /dev/null
+++ b/common/src/main/java/org/apache/drill/common/PlanStringBuilder.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.common;
+
+import org.apache.commons.lang3.StringEscapeUtils;
+
+/**
+ * Builds a string in Drill's "plan string" format: that shown in the
+ * text version of {@code EXPLAIN PLAN FOR} output. Example: <pre><code>
+ * Mumble[foo=fred, bar=barney]</code></pre>
+ * <p>
+ * Similar to the Guava {@code Objects.ToStringHelper} class but for
+ * the Drill "plan-string" format. Use this class for any object that
+ * may be displayed in an query plan.
+ * <p>
+ * Example usage:<pre><code>
+ * public String toString() {
+ *   return PlanStringBuilder(this)
+ *     .field("foo", foo)
+ *     .field("bar", bar)
+ *     .toString();
+ * }</code></pre>
+ */
+public class PlanStringBuilder {
+
+  private final StringBuilder buf = new StringBuilder();
+  private int fieldCount = 0;
+
+  public PlanStringBuilder(Object node) {
+    this(node.getClass().getSimpleName());
+  }
+
+  public PlanStringBuilder(String node) {
+    buf.append(node).append(" [");
+  }
+
+  /**
+   * Displays the field as a quoted string: {@code foo="bar"}.
+   */
+  public PlanStringBuilder field(String key, String value) {
+    if (value != null) {
+      startField(key);
+      buf.append("\"").append(value).append("\"");
+    }
+    return this;
+  }
+
+  /**
+   * Displays the field as an unquoted string. Use this for things
+   * like names: {@code mode=OPTIONAL}.
+   */
+  public PlanStringBuilder unquotedField(String key, String value) {
+    if (value != null) {
+      startField(key);
+      buf.append(value);
+    }
+    return this;
+  }
+
+  /**
+   * Displays the field as an unquoted {@code toString()} value.
+   * Omits the field if the value is null.
+   */
+  public PlanStringBuilder field(String key, Object value) {
+    if (value != null) {
+      startField(key);
+      buf.append(value.toString());
+    }
+    return this;
+  }
+
+  /**
+   * Displays a numeric field: {@code size=10}.
+   */
+  public PlanStringBuilder field(String key, int value) {
+    startField(key);
+    buf.append(value);
+    return this;
+  }
+
+  /**
+   * Displays a character in Java-quoted format: {@code delimiter="\n"}.
+   */
+  public PlanStringBuilder escapedField(String key, char value) {
+    return escapedField(key, Character.toString(value));
+  }
+
+  /**
+   * Displays a string in Java-quoted format: {@code delimiter="\t"}.
+   */
+  public PlanStringBuilder escapedField(String key, String value) {
+    return field(key, StringEscapeUtils.escapeJava(value));
+  }
+
+  private void startField(String key) {
+    if (fieldCount++ != 0) {
+      buf.append(", ");
+    }
+    buf.append(key).append("=");
+  }
+
+  @Override
+  public String toString() { return buf.append("]").toString(); }
+}
diff --git a/common/src/main/java/org/apache/drill/common/collections/ImmutableEntry.java b/common/src/main/java/org/apache/drill/common/collections/ImmutableEntry.java
index e67b86c..7b9bf52 100644
--- a/common/src/main/java/org/apache/drill/common/collections/ImmutableEntry.java
+++ b/common/src/main/java/org/apache/drill/common/collections/ImmutableEntry.java
@@ -48,15 +48,24 @@
 
   @Override
   public boolean equals(final Object other) {
-    if (other instanceof ImmutableEntry && other.getClass() == getClass()) {
-      final ImmutableEntry<K, V> entry = (ImmutableEntry<K, V>)other;
-      return Objects.equal(key, entry.key) && Objects.equal(value, entry.value);
+    if (other == this) {
+      return true;
     }
-    return false;
+    if (other == null || !(other instanceof  Map.Entry)) {
+      return false;
+    }
+    Map.Entry<?, ?> entry = (Map.Entry<?, ?>) other;
+    return Objects.equal(key, entry.getKey()) && Objects.equal(value, entry.getValue());
   }
 
   @Override
   public int hashCode() {
     return Objects.hashCode(key, value);
   }
+
+  @Override
+  public String toString() {
+    return "(" + key.toString() + ", " +
+           value.toString() + ")";
+  }
 }
diff --git a/common/src/main/java/org/apache/drill/common/config/ConfigConstants.java b/common/src/main/java/org/apache/drill/common/config/ConfigConstants.java
index 3283fe0..ac9ff71 100644
--- a/common/src/main/java/org/apache/drill/common/config/ConfigConstants.java
+++ b/common/src/main/java/org/apache/drill/common/config/ConfigConstants.java
@@ -17,33 +17,26 @@
  */
 package org.apache.drill.common.config;
 
-public final class ConfigConstants {
+public interface ConfigConstants {
 
   /** Default (base) configuration file name.  (Classpath resource pathname.) */
-  public static final String CONFIG_DEFAULT_RESOURCE_PATHNAME = "drill-default.conf";
+  String CONFIG_DEFAULT_RESOURCE_PATHNAME = "drill-default.conf";
 
   /** Module configuration files name.  (Classpath resource pathname.) */
-  public static final String DRILL_JAR_MARKER_FILE_RESOURCE_PATHNAME = "drill-module.conf";
+  String DRILL_JAR_MARKER_FILE_RESOURCE_PATHNAME = "drill-module.conf";
 
   /** Distribution Specific Override configuration file name.  (Classpath resource pathname.) */
-  public static final String CONFIG_DISTRIBUTION_RESOURCE_PATHNAME = "drill-distrib.conf";
+  String CONFIG_DISTRIBUTION_RESOURCE_PATHNAME = "drill-distrib.conf";
 
   /** Override configuration file name.  (Classpath resource pathname.) */
-  public static final String CONFIG_OVERRIDE_RESOURCE_PATHNAME = "drill-override.conf";
-
-  /** Override plugins configs file name.  (Classpath resource pathname.) */
-  public static final String STORAGE_PLUGINS_OVERRIDE_CONF = "storage-plugins-override.conf";
+  String CONFIG_OVERRIDE_RESOURCE_PATHNAME = "drill-override.conf";
 
   /** Default RM configuration file name. (Classpath resource pathname.) */
-  public static final String RM_CONFIG_DEFAULT_RESOURCE_PATHNAME = "drill-rm-default.conf";
+  String RM_CONFIG_DEFAULT_RESOURCE_PATHNAME = "drill-rm-default.conf";
 
   /** Distribution Specific RM Override configuration file name.  (Classpath resource pathname.) */
-  public static final String RM_CONFIG_DISTRIBUTION_RESOURCE_PATHNAME = "drill-rm-distrib.conf";
+  String RM_CONFIG_DISTRIBUTION_RESOURCE_PATHNAME = "drill-rm-distrib.conf";
 
   /** RM Override configuration file name. (Classpath resource pathname.) */
-  public static final String RM_CONFIG_OVERRIDE_RESOURCE_PATHNAME = "drill-rm-override.conf";
-
-  // suppress default constructor
-  private ConfigConstants() {
-  }
+  String RM_CONFIG_OVERRIDE_RESOURCE_PATHNAME = "drill-rm-override.conf";
 }
diff --git a/common/src/main/java/org/apache/drill/common/config/DrillConfig.java b/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
index a655943..71a0697 100644
--- a/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
+++ b/common/src/main/java/org/apache/drill/common/config/DrillConfig.java
@@ -30,6 +30,8 @@
 import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
 import org.reflections.util.ClasspathHelper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.lang.management.ManagementFactory;
 import java.lang.management.RuntimeMXBean;
@@ -43,11 +45,10 @@
 import java.util.concurrent.TimeUnit;
 
 public class DrillConfig extends NestedConfig {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillConfig.class);
+  private static final Logger logger = LoggerFactory.getLogger(DrillConfig.class);
 
   private final ImmutableList<String> startupArguments;
 
-  @SuppressWarnings("restriction")
   private static final long MAX_DIRECT_MEMORY = PlatformDependent.maxDirectMemory();
 
   @VisibleForTesting
@@ -116,7 +117,6 @@
     return create(null, false);
   }
 
-
   /**
    * DrillConfig loads up Drill configuration information. It does this utilizing a combination of classpath scanning
    * and Configuration fallbacks provided by the TypeSafe configuration library. The order of precedence is as
diff --git a/common/src/main/java/org/apache/drill/common/exceptions/DrillRuntimeException.java b/common/src/main/java/org/apache/drill/common/exceptions/DrillRuntimeException.java
index 35cf586..31acc07 100644
--- a/common/src/main/java/org/apache/drill/common/exceptions/DrillRuntimeException.java
+++ b/common/src/main/java/org/apache/drill/common/exceptions/DrillRuntimeException.java
@@ -40,12 +40,12 @@
     super(cause);
   }
 
-  public static DrillRuntimeException format(String format, Object...args) {
-    return format(null, format, args);
+  public static DrillRuntimeException create(String format, Object...args) {
+    return create(null, format, args);
   }
 
-  public static DrillRuntimeException format(Throwable cause, String format, Object...args) {
-    throw new DrillRuntimeException(String.format(format, args), cause);
+  public static DrillRuntimeException create(Throwable cause, String format, Object...args) {
+    return new DrillRuntimeException(String.format(format, args), cause);
   }
 
   /**
diff --git a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
index bf6508e..03eaa59 100644
--- a/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
+++ b/common/src/main/java/org/apache/drill/common/exceptions/UserException.java
@@ -398,6 +398,10 @@
     return new Builder(DrillPBError.ErrorType.INTERNAL_ERROR, cause);
   }
 
+  public static Builder internalError() {
+    return new Builder(DrillPBError.ErrorType.INTERNAL_ERROR, null);
+  }
+
   /**
    * Indicates an unspecified error: code caught the exception, but does not have
    * visibility into the cause well enough to pick one of the more specific
diff --git a/common/src/main/java/org/apache/drill/common/scanner/BuildTimeScan.java b/common/src/main/java/org/apache/drill/common/scanner/BuildTimeScan.java
index 1e52d4f..794983e 100644
--- a/common/src/main/java/org/apache/drill/common/scanner/BuildTimeScan.java
+++ b/common/src/main/java/org/apache/drill/common/scanner/BuildTimeScan.java
@@ -32,6 +32,8 @@
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.DrillRuntimeException;
 import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.fasterxml.jackson.databind.ObjectReader;
@@ -42,7 +44,7 @@
  * @see BuildTimeScan#main(String[])
  */
 public class BuildTimeScan {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(BuildTimeScan.class);
+  private static final Logger logger = LoggerFactory.getLogger(BuildTimeScan.class);
   private static final String REGISTRY_FILE = "META-INF/drill-module-scan/registry.json";
 
   private static final ObjectMapper mapper = new ObjectMapper().enable(INDENT_OUTPUT);
diff --git a/common/src/main/java/org/apache/drill/common/scanner/ClassPathScanner.java b/common/src/main/java/org/apache/drill/common/scanner/ClassPathScanner.java
index eeec2d6..6449b6f 100644
--- a/common/src/main/java/org/apache/drill/common/scanner/ClassPathScanner.java
+++ b/common/src/main/java/org/apache/drill/common/scanner/ClassPathScanner.java
@@ -46,7 +46,8 @@
 import org.reflections.scanners.AbstractScanner;
 import org.reflections.util.ConfigurationBuilder;
 import org.reflections.util.FilterBuilder;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
 import org.apache.drill.shaded.guava.com.google.common.collect.HashMultimap;
 import org.apache.drill.shaded.guava.com.google.common.collect.Multimap;
@@ -83,7 +84,7 @@
  * At runtime only the locations that have not been scanned yet will be scanned.
  */
 public final class ClassPathScanner {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClassPathScanner.class);
+  private static final Logger logger = LoggerFactory.getLogger(ClassPathScanner.class);
   private static final JavassistAdapter METADATA_ADAPTER = new JavassistAdapter();
 
   /** Configuration pathname to list of names of packages to scan for implementations. */
@@ -96,15 +97,15 @@
   private static final String IMPLEMENTATIONS_SCAN_ANNOTATIONS = "drill.classpath.scanning.annotations";
 
   /** Configuration pathname to turn off build time caching. */
-  private static final String IMPLEMENTATIONS_SCAN_CACHE = "drill.classpath.scanning.cache.enabled";
+  public static final String IMPLEMENTATIONS_SCAN_CACHE = "drill.classpath.scanning.cache.enabled";
 
   /**
    * scans the inheritance tree
    */
   private static class SubTypesScanner extends AbstractScanner {
 
-    private Multimap<String, ChildClassDescriptor> parentsChildren = HashMultimap.create();
-    private Multimap<String, ChildClassDescriptor> children = HashMultimap.create();
+    private final Multimap<String, ChildClassDescriptor> parentsChildren = HashMultimap.create();
+    private final Multimap<String, ChildClassDescriptor> children = HashMultimap.create();
 
     public SubTypesScanner(List<ParentClassDescriptor> parentImplementations) {
       for (ParentClassDescriptor parentClassDescriptor : parentImplementations) {
@@ -148,7 +149,7 @@
   }
 
   /**
-   * converts the annotation attribute value into a list of string to simplify
+   * Converts the annotation attribute value into a list of string to simplify
    */
   private static class ListingMemberValueVisitor implements MemberValueVisitor {
     private final List<String> values;
@@ -263,7 +264,6 @@
         }
         if (isAnnotated) {
           List<AnnotationDescriptor> classAnnotations = getAnnotationDescriptors(annotations);
-          @SuppressWarnings("unchecked")
           List<FieldInfo> classFields = classFile.getFields();
           List<FieldDescriptor> fieldDescriptors = new ArrayList<>(classFields.size());
           for (FieldInfo field : classFields) {
@@ -283,7 +283,6 @@
       List<AnnotationDescriptor> annotationDescriptors = new ArrayList<>(annotationsAttr.numAnnotations());
       for (javassist.bytecode.annotation.Annotation annotation : annotationsAttr.getAnnotations()) {
         // Sigh: javassist uses raw collections (is this 2002?)
-        @SuppressWarnings("unchecked")
         Set<String> memberNames = annotation.getMemberNames();
         List<AttributeDescriptor> attributes = new ArrayList<>();
         if (memberNames != null) {
diff --git a/common/src/main/java/org/apache/drill/common/scanner/RunTimeScan.java b/common/src/main/java/org/apache/drill/common/scanner/RunTimeScan.java
index cc86c67..b5033c7 100644
--- a/common/src/main/java/org/apache/drill/common/scanner/RunTimeScan.java
+++ b/common/src/main/java/org/apache/drill/common/scanner/RunTimeScan.java
@@ -29,14 +29,13 @@
 
 /**
  * Utility to scan classpath at runtime
- *
  */
 public class RunTimeScan {
 
-  /** result of prescan */
+  /** Result of prescan */
   private static final ScanResult PRESCANNED = BuildTimeScan.load();
 
-  /** urls of the locations (classes directory or jar) to scan that don't have a registry in them */
+  /** URLs of the locations (classes directory or jar) to scan that don't have a registry in them */
   private static final Collection<URL> NON_PRESCANNED_MARKED_PATHS = getNonPrescannedMarkedPaths();
 
   /**
@@ -49,8 +48,8 @@
   }
 
   /**
-   * loads prescanned classpath info and scans for extra ones based on configuration.
-   * (unless prescan is disabled with {@see ClassPathScanner#IMPLEMENTATIONS_SCAN_CACHE}=falses)
+   * Loads prescanned classpath info and scans for extra ones based on configuration.
+   * (unless prescan is disabled with {@link ClassPathScanner#IMPLEMENTATIONS_SCAN_CACHE}{@code =false})
    * @param config to retrieve the packages to scan
    * @return the scan result
    */
@@ -96,5 +95,4 @@
         PRESCANNED.getScannedAnnotations(),
         ClassPathScanner.emptyResult());
   }
-
 }
diff --git a/common/src/main/java/org/apache/drill/common/scanner/persistence/ScanResult.java b/common/src/main/java/org/apache/drill/common/scanner/persistence/ScanResult.java
index ed3a58b..cbc07e3 100644
--- a/common/src/main/java/org/apache/drill/common/scanner/persistence/ScanResult.java
+++ b/common/src/main/java/org/apache/drill/common/scanner/persistence/ScanResult.java
@@ -41,12 +41,14 @@
 import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
 import org.apache.drill.shaded.guava.com.google.common.collect.HashMultimap;
 import org.apache.drill.shaded.guava.com.google.common.collect.Multimap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
  * The root doc of the scan result
  */
 public final class ScanResult {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ScanResult.class);
+  private static final Logger logger = LoggerFactory.getLogger(ScanResult.class);
 
   private final List<String> scannedPackages;
   private final Set<String> scannedClasses;
@@ -126,7 +128,7 @@
   }
 
   /**
-   * This will load all the scanned classes for this parent as a side effect
+   * Loads all the scanned classes for this parent as a side effect
    * @param c the parent
    * @return all the classes found
    */
@@ -154,7 +156,6 @@
     }
   }
 
-
   /**
    * @param c the annotation class name
    * @return the descriptor of the annotated class
@@ -209,5 +210,4 @@
         merge(annotatedClasses, other.annotatedClasses),
         newImplementations);
   }
-
-}
\ No newline at end of file
+}
diff --git a/common/src/main/java/org/apache/drill/exec/util/ActionOnFile.java b/common/src/main/java/org/apache/drill/exec/util/ActionOnFile.java
index cca1e77..af3209f 100644
--- a/common/src/main/java/org/apache/drill/exec/util/ActionOnFile.java
+++ b/common/src/main/java/org/apache/drill/exec/util/ActionOnFile.java
@@ -25,8 +25,11 @@
 import java.text.SimpleDateFormat;
 import java.util.Date;
 
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
 /**
- * It defines possible actions on the file and performs the necessary action
+ * Defines possible actions on the file and performs the necessary action
  */
 public enum ActionOnFile {
 
@@ -39,7 +42,8 @@
   },
 
   /**
-   * Rename the file by adding current timestamp value with "yyyyMMdd_HHmmss" format before last dot of original file name<p>
+   * Renames the file by adding current timestamp value with "yyyyMMdd_HHmmss"
+   * format before last dot of original file name<p>
    * Example:<br>
    * Original file name: "storage-plugins-override.conf"<br>
    * New file name: "storage-plugins-override-20180703_033354.conf"
@@ -63,7 +67,7 @@
   },
 
   /**
-   * It removes the file
+   * Removes the file
    */
   REMOVE {
     @Override
@@ -77,10 +81,10 @@
     }
   };
 
-  private static final org.slf4j.Logger logger =  org.slf4j.LoggerFactory.getLogger(ActionOnFile.class);
+  private static final Logger logger = LoggerFactory.getLogger(ActionOnFile.class);
 
   /**
-   * This is an action which should be performed on the file
+   * Action which should be performed on the file
    * @param url the file URL
    */
   public abstract void action(URL url);
diff --git a/common/src/test/java/org/apache/drill/test/DirTestWatcher.java b/common/src/test/java/org/apache/drill/test/DirTestWatcher.java
index 6bdc951..839fbac 100644
--- a/common/src/test/java/org/apache/drill/test/DirTestWatcher.java
+++ b/common/src/test/java/org/apache/drill/test/DirTestWatcher.java
@@ -27,30 +27,40 @@
 
 /**
  * <p>
- * This class is used to create consistently named and safe temp directories for unit tests.
+ * This class is used to create consistently named and safe temp directories for
+ * unit tests.
+ * </p>
+ * <p>
+ * A {@link DirTestWatcher} is added to a test by declaring it as a JUnit
+ * {@link org.junit.Rule}. A {@link org.junit.Rule Rule} is a piece of code that
+ * is run before and after every JUnit test marked with the
+ * {@link org.junit.Test Test} annotation. When the {@link DirTestWatcher} is
+ * added to a test class the {@link DirTestWatcher} will create a temp directory
+ * before each of your {@link org.junit.Test Test}s and optionally delete the
+ * temp directory after each of your {@link org.junit.Test Test}s. The <b>base
+ * temp directory</b> created by the {@link DirTestWatcher} is in the
+ * <b>target</b> folder of the maven project and has the form <b>(my test class
+ * fully qualified name)/(my test method name)</b>. So in the context of the
+ * code example below, the temp directory created for each test in <b>target</b>
+ * will be <b>my.proj.MyTestClass/myTestMethod1</b> and
+ * <b>my.proj.MyTestClass/myTestMethod2</b> respectively.
+ * </p>
+ * <p>
+ * The temp directory created by the {@link DirTestWatcher} can be used within a
+ * test by simply calling the {@link DirTestWatcher#getDir()} method on the
+ * {@link DirTestWatcher} within your unit test.
  * </p>
  *
  * <p>
- * A {@link DirTestWatcher} is added to a test by declaring it as a JUnit {@link org.junit.Rule}. A {@link org.junit.Rule Rule} is
- * a piece of code that is run before and after every JUnit test marked with the {@link org.junit.Test Test} annotation. When the
- * {@link DirTestWatcher} is added to a test class the {@link DirTestWatcher} will create a temp directory before each of your
- * {@link org.junit.Test Test}s and optionally delete the temp directory after each of your {@link org.junit.Test Test}s. The <b>base temp directory</b>
- * created by the {@link DirTestWatcher} is in the <b>target</b> folder of the maven project and has the form
- * <b>(my test class fully qualified name)/(my test method name)</b>. So in the context of the code example below, the temp directory created for
- * each test in <b>target</b> will be <b>my.proj.MyTestClass/myTestMethod1</b> and <b>my.proj.MyTestClass/myTestMethod2</b> respectively.
- * </p>
- *
- * <p>
- * The temp directory created by the {@link DirTestWatcher} can be used within a test by simply calling the {@link DirTestWatcher#getDir()}
- * method on the {@link DirTestWatcher} within your unit test.
- * </p>
- *
- * <p>
- * By default, the {@link DirTestWatcher} deletes the temp directory it creates at the end of each {@link org.junit.Test Test}. However, you can create a {@link DirTestWatcher}
- * by doing {@code new DirTestWatcher(false)} to disable the deletion of temp directories after a test. This is useful if you want to examine files after a test runs.
+ * By default, the {@link DirTestWatcher} deletes the temp directory it creates
+ * at the end of each {@link org.junit.Test Test}. However, you can create a
+ * {@link DirTestWatcher} by doing {@code new DirTestWatcher(false)} to disable
+ * the deletion of temp directories after a test. This is useful if you want to
+ * examine files after a test runs.
  * </p>
  *
  * <pre>
+ * <code>
  * package my.proj;
  *
  * public class MyTestClass {
@@ -69,11 +79,14 @@
  *     // Do stuff in the temp directory
  *   }
  * }
+ * </code>
  * </pre>
  *
  * <p>
- * <b>Note:</b> In the code sample above, the directories returned by {@link DirTestWatcher#getDir()} in myTestMethod1 and myTestMethod2 are
- * <b>my.proj.MyTestClass/myTestMethod1</b> and <b>my.proj.MyTestClass/myTestMethod2</b> respectively.
+ * <b>Note:</b> In the code sample above, the directories returned by
+ * {@link DirTestWatcher#getDir()} in myTestMethod1 and myTestMethod2 are
+ * <b>my.proj.MyTestClass/myTestMethod1</b> and
+ * <b>my.proj.MyTestClass/myTestMethod2</b> respectively.
  * </p>
  */
 public class DirTestWatcher extends TestWatcher {
@@ -84,15 +97,20 @@
   private boolean deleteDirAtEnd = true;
 
   /**
-   * Creates a {@link DirTestWatcher} that deletes temp directories after the {@link TestWatcher} completes.
+   * Creates a {@link DirTestWatcher} that deletes temp directories after the
+   * {@link TestWatcher} completes.
    */
   public DirTestWatcher() {
   }
 
   /**
-   * Creates a {@link DirTestWatcher} which can delete or keep the temp directory after the {@link TestWatcher} completes.
-   * @param deleteDirAtEnd When true the temp directory created by the {@link DirTestWatcher} is deleted. When false the
-   *                       temp directory created by the {@link DirTestWatcher} is not deleted.
+   * Creates a {@link DirTestWatcher} which can delete or keep the temp
+   * directory after the {@link TestWatcher} completes.
+   *
+   * @param deleteDirAtEnd
+   *          When true the temp directory created by the {@link DirTestWatcher}
+   *          is deleted. When false the temp directory created by the
+   *          {@link DirTestWatcher} is not deleted.
    */
   public DirTestWatcher(boolean deleteDirAtEnd) {
     this.deleteDirAtEnd = deleteDirAtEnd;
@@ -101,9 +119,10 @@
   @Override
   protected void starting(Description description) {
     if (description.getMethodName() != null) {
-      dirPath = Paths.get(".","target", description.getClassName(), description.getMethodName()).toString();
+      dirPath = Paths.get(".", "target", description.getClassName(),
+          description.getMethodName()).toString();
     } else {
-      dirPath = Paths.get(".","target", description.getClassName()).toString();
+      dirPath = Paths.get(".", "target", description.getClassName()).toString();
     }
 
     dir = new File(dirPath);
@@ -122,15 +141,16 @@
   }
 
   /**
-   * Creates a sub directory with the given relative path in current temp directory
-   * @param relativeDirPath The relative path of the sub directory to create in the current temp directory.
+   * Creates a sub directory with the given relative path in current temp
+   * directory
+   *
+   * @param relativeDirPath
+   *          The relative path of the sub directory to create in the current
+   *          temp directory.
    * @return The {@link java.io.File} object of the newly created sub directory.
    */
   public File makeSubDir(Path relativeDirPath) {
-    File subDir = dir
-      .toPath()
-      .resolve(relativeDirPath)
-      .toFile();
+    File subDir = dir.toPath().resolve(relativeDirPath).toFile();
     subDir.mkdirs();
     return subDir;
   }
@@ -143,6 +163,7 @@
 
   /**
    * Gets the {@link java.io.File} object of the current temp directory.
+   *
    * @return The {@link java.io.File} object of the current temp directory.
    */
   public File getDir() {
@@ -159,8 +180,9 @@
       }
     }
 
-    String message = String.format("Failed to create directory within %s attempts (tried %s0 to %s)",
-      TEMP_DIR_ATTEMPTS, baseName, baseName + (TEMP_DIR_ATTEMPTS - 1));
+    String message = String.format(
+        "Failed to create directory within %s attempts (tried %s0 to %s)",
+        TEMP_DIR_ATTEMPTS, baseName, baseName + (TEMP_DIR_ATTEMPTS - 1));
     throw new IllegalStateException(message);
   }
 }
diff --git a/contrib/format-esri/src/main/resources/bootstrap-format-plugins.json b/contrib/format-esri/src/main/resources/bootstrap-format-plugins.json
index cc4f74f..c4e2daf 100644
--- a/contrib/format-esri/src/main/resources/bootstrap-format-plugins.json
+++ b/contrib/format-esri/src/main/resources/bootstrap-format-plugins.json
@@ -8,6 +8,14 @@
         }
       }
     },
+    "cp": {
+      "type": "file",
+      "formats": {
+        "shp": {
+          "type": "shp"
+        }
+      }
+    },
     "s3": {
       "type": "file",
       "formats": {
diff --git a/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java b/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
index a4532a6..1df2687 100644
--- a/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
+++ b/contrib/format-esri/src/test/java/org/apache/drill/exec/store/esri/TestShapefileFormatPlugin.java
@@ -43,8 +43,6 @@
   public static void setup() throws Exception {
     ClusterTest.startCluster(ClusterFixture.builder(dirTestWatcher));
 
-    ShpFormatConfig formatConfig = new ShpFormatConfig();
-    cluster.defineFormat("dfs", "shp", formatConfig);
     dirTestWatcher.copyResourceToRoot(Paths.get("shapefiles/"));
   }
 
diff --git a/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatConfig.java b/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatConfig.java
index 6e2bf78..b347269 100644
--- a/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatConfig.java
+++ b/contrib/format-excel/src/main/java/org/apache/drill/exec/store/excel/ExcelFormatConfig.java
@@ -20,6 +20,8 @@
 
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonTypeName;
+
+import org.apache.drill.common.PlanStringBuilder;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.exec.store.excel.ExcelBatchReader.ExcelReaderConfig;
 
@@ -35,6 +37,12 @@
   // This is the theoretical maximum number of rows in an Excel spreadsheet
   private final int MAX_ROWS = 1048576;
 
+  // TODO: Bad things happen if fields change after created.
+  // That is, if this config is stored in the plugin registry, then
+  // later modified.
+  // Change all these to be private final, and add constructor.
+  // See DRILL-7612.
+
   public List<String> extensions = Collections.singletonList("xlsx");
 
   public int headerRow;
@@ -86,7 +94,7 @@
   @Override
   public int hashCode() {
     return Arrays.hashCode(
-      new Object[]{extensions, headerRow, lastRow, sheetName, firstColumn, lastColumn, allTextMode});
+      new Object[]{extensions, headerRow, lastRow, firstColumn, lastColumn, allTextMode, sheetName});
   }
 
   @Override
@@ -98,11 +106,25 @@
       return false;
     }
     ExcelFormatConfig other = (ExcelFormatConfig) obj;
-    return Objects.equals(headerRow, other.headerRow)
+    return Objects.equals(extensions, other.extensions)
+      && Objects.equals(headerRow, other.headerRow)
       && Objects.equals(lastRow, other.lastRow)
       && Objects.equals(firstColumn, other.firstColumn)
       && Objects.equals(lastColumn, other.lastColumn)
-      && Objects.equals(sheetName, other.sheetName)
-      && Objects.equals(allTextMode, other.allTextMode);
+      && Objects.equals(allTextMode, other.allTextMode)
+      && Objects.equals(sheetName, other.sheetName);
+  }
+
+  @Override
+  public String toString() {
+    return new PlanStringBuilder(this)
+        .field("extensions", extensions)
+        .field("sheetName", sheetName)
+        .field("headerRow", headerRow)
+        .field("lastRow", lastRow)
+        .field("firstColumn", firstColumn)
+        .field("lastColumn", lastColumn)
+        .field("allTextMode", allTextMode)
+        .toString();
   }
 }
diff --git a/contrib/format-excel/src/main/resources/bootstrap-format-plugins.json b/contrib/format-excel/src/main/resources/bootstrap-format-plugins.json
index ef6765c..a7a142c 100644
--- a/contrib/format-excel/src/main/resources/bootstrap-format-plugins.json
+++ b/contrib/format-excel/src/main/resources/bootstrap-format-plugins.json
@@ -11,6 +11,17 @@
         }
       }
     },
+    "cp": {
+      "type": "file",
+      "formats": {
+        "excel": {
+          "type": "excel",
+          "extensions": [
+            "xlsx"
+          ]
+        }
+      }
+    },
     "s3": {
       "type": "file",
       "formats": {
diff --git a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
index 6a85c25..fb7df5c 100644
--- a/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
+++ b/contrib/format-excel/src/test/java/org/apache/drill/exec/store/excel/TestExcelFormat.java
@@ -63,9 +63,6 @@
   public static void setup() throws Exception {
     ClusterTest.startCluster(ClusterFixture.builder(dirTestWatcher));
 
-    ExcelFormatConfig formatConfig = new ExcelFormatConfig();
-    cluster.defineFormat("cp", "excel", formatConfig);
-
     // Needed for compressed file unit test
     dirTestWatcher.copyResourceToRoot(Paths.get("excel/"));
   }
diff --git a/contrib/format-hdf5/src/main/resources/bootstrap-format-plugins.json b/contrib/format-hdf5/src/main/resources/bootstrap-format-plugins.json
index 99f74c3..1bee10f 100644
--- a/contrib/format-hdf5/src/main/resources/bootstrap-format-plugins.json
+++ b/contrib/format-hdf5/src/main/resources/bootstrap-format-plugins.json
@@ -11,6 +11,17 @@
         }
       }
     },
+    "cp": {
+      "type": "file",
+      "formats": {
+        "hdf5": {
+          "type": "hdf5",
+          "extensions": [
+            "h5"
+          ]
+        }
+      }
+    },
     "s3": {
       "type": "file",
       "formats": {
diff --git a/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java b/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
index 39d778e..a34488b 100644
--- a/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
+++ b/contrib/format-hdf5/src/test/java/org/apache/drill/exec/store/hdf5/TestHDF5Format.java
@@ -63,8 +63,6 @@
     ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
     startCluster(builder);
 
-    HDF5FormatConfig formatConfig = new HDF5FormatConfig();
-    cluster.defineFormat("dfs", "hdf5", formatConfig);
     dirTestWatcher.copyResourceToRoot(Paths.get("hdf5/"));
   }
 
diff --git a/contrib/format-ltsv/src/main/resources/bootstrap-format-plugins.json b/contrib/format-ltsv/src/main/resources/bootstrap-format-plugins.json
index 3dda8cf..786c9f1 100644
--- a/contrib/format-ltsv/src/main/resources/bootstrap-format-plugins.json
+++ b/contrib/format-ltsv/src/main/resources/bootstrap-format-plugins.json
@@ -11,6 +11,17 @@
         }
       }
     },
+    "cp": {
+      "type": "file",
+      "formats": {
+        "ltsv": {
+          "type": "ltsv",
+          "extensions": [
+            "ltsv"
+          ]
+        }
+      }
+    },
     "s3": {
       "type": "file",
       "formats": {
diff --git a/contrib/format-ltsv/src/test/java/org/apache/drill/exec/store/ltsv/TestLTSVRecordReader.java b/contrib/format-ltsv/src/test/java/org/apache/drill/exec/store/ltsv/TestLTSVRecordReader.java
index 61f65f4..419bb6f 100644
--- a/contrib/format-ltsv/src/test/java/org/apache/drill/exec/store/ltsv/TestLTSVRecordReader.java
+++ b/contrib/format-ltsv/src/test/java/org/apache/drill/exec/store/ltsv/TestLTSVRecordReader.java
@@ -17,36 +17,22 @@
  */
 package org.apache.drill.exec.store.ltsv;
 
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
 import org.apache.drill.common.exceptions.UserException;
-import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.exec.proto.UserBitShared;
-import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.store.dfs.FileSystemConfig;
 import org.apache.drill.test.ClusterFixture;
 import org.apache.drill.test.ClusterTest;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
-import java.util.HashMap;
-import java.util.Map;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
-
 public class TestLTSVRecordReader extends ClusterTest {
 
   @BeforeClass
   public static void setup() throws Exception {
     startCluster(ClusterFixture.builder(dirTestWatcher));
-
-    DrillbitContext context = cluster.drillbit().getContext();
-    FileSystemConfig original = (FileSystemConfig) context.getStorage().getPlugin("cp").getConfig();
-    Map<String, FormatPluginConfig> newFormats = new HashMap<>(original.getFormats());
-    newFormats.put("ltsv", new LTSVFormatPluginConfig());
-    FileSystemConfig pluginConfig = new FileSystemConfig(original.getConnection(), original.getConfig(), original.getWorkspaces(), newFormats);
-    pluginConfig.setEnabled(true);
-    context.getStorage().createOrUpdate("cp", pluginConfig, true);
   }
 
   @Test
@@ -96,5 +82,4 @@
       assertTrue(e.getMessage().contains("Failure while reading messages from /invalid.ltsv. Record reader was at record: 1"));
     }
   }
-
 }
diff --git a/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/MaprDBTestsSuite.java b/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/MaprDBTestsSuite.java
index 642a989..aea03c9 100644
--- a/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/MaprDBTestsSuite.java
+++ b/contrib/format-maprdb/src/test/java/com/mapr/drill/maprdb/tests/MaprDBTestsSuite.java
@@ -138,7 +138,7 @@
 
           FileSystemConfig pluginConfig = ctx.getLpPersistence().getMapper().readValue(pluginConfStr, FileSystemConfig.class);
           // create the plugin with "hbase" name so that we can run HBase unit tests against them
-          pluginRegistry.createOrUpdate("hbase", pluginConfig, true);
+          pluginRegistry.put("hbase", pluginConfig);
         }
       }
     }
diff --git a/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatConfig.java b/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatConfig.java
index 0f60eeb..184031a 100644
--- a/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatConfig.java
+++ b/contrib/format-syslog/src/main/java/org/apache/drill/exec/store/syslog/SyslogFormatConfig.java
@@ -47,7 +47,7 @@
     return extensions;
   }
 
-  public void setExtensions(List ext) {
+  public void setExtensions(List<String> ext) {
     this.extensions = ext;
   }
 
diff --git a/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java b/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
index 084c413..a45a07f 100644
--- a/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
+++ b/contrib/format-syslog/src/test/java/org/apache/drill/exec/store/syslog/TestSyslogFormat.java
@@ -62,7 +62,7 @@
     final FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig();
     pluginConfig.getFormats().put("sample", sampleConfig);
     pluginConfig.getFormats().put("flat", flattenedDataConfig);
-    pluginRegistry.createOrUpdate("cp", pluginConfig, false);
+    pluginRegistry.put("cp", pluginConfig);
   }
 
   @Test
@@ -196,13 +196,14 @@
             .buildSchema();
 
     RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
-            .addRow("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132", "0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation", "Tester2", "27389", "192.168.2.132", "AUDIT", "4")
+            .addRow("Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132",
+                    "0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation",
+                    "Tester2", "27389", "192.168.2.132", "AUDIT", "4")
             .build();
 
     new RowSetComparison(expected).verifyAndClearAll(results);
   }
 
-
   @Test
   public void testStarFlattenedStructuredDataQuery() throws RpcException {
     String sql = "SELECT * FROM cp.`syslog/test.syslog1`";
@@ -234,7 +235,11 @@
             .buildSchema();
 
     RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
-            .addRow(1438811939693L, 6, 10, "INFO", "AUTHPRIV", "192.168.2.132", "SecureAuth0", "23108", "ID52020", "{SecureAuth@27389=[UserAgent=Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko, UserHostAddress=192.168.2.132, BrowserSession=0gvhdi5udjuqtweprbgoxilc, Realm=SecureAuth0, Appliance=secureauthqa.gosecureauth.com, Company=SecureAuth Corporation, UserID=Tester2, PEN=27389, HostName=192.168.2.132, Category=AUDIT, Priority=4]}", "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132", "0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation", "Tester2", "27389", "192.168.2.132", "AUDIT", "4", "Found the user for retrieving user's profile")
+            .addRow(1438811939693L, 6, 10, "INFO", "AUTHPRIV", "192.168.2.132", "SecureAuth0", "23108", "ID52020",
+                    "{SecureAuth@27389=[UserAgent=Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko, UserHostAddress=192.168.2.132, BrowserSession=0gvhdi5udjuqtweprbgoxilc, Realm=SecureAuth0, Appliance=secureauthqa.gosecureauth.com, Company=SecureAuth Corporation, UserID=Tester2, PEN=27389, HostName=192.168.2.132, Category=AUDIT, Priority=4]}",
+                    "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132",
+                    "0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation",
+                    "Tester2", "27389", "192.168.2.132", "AUDIT", "4", "Found the user for retrieving user's profile")
             .build();
 
     new RowSetComparison(expected).verifyAndClearAll(results);
@@ -294,7 +299,11 @@
 
 
     RowSet expected = new RowSetBuilder(client.allocator(), expectedSchema)
-            .addRow(1438811939693L, 6, 10, "INFO", "AUTHPRIV", "192.168.2.132", "SecureAuth0", "23108", "", "{SecureAuth@27389=[UserAgent=Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko, UserHostAddress=192.168.2.132, BrowserSession=0gvhdi5udjuqtweprbgoxilc, Realm=SecureAuth0, Appliance=secureauthqa.gosecureauth.com, Company=SecureAuth Corporation, UserID=Tester2, PEN=27389, HostName=192.168.2.132, Category=AUDIT, Priority=4]}", "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132", "0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation", "Tester2", "27389", "192.168.2.132", "AUDIT", "4", "Found the user for retrieving user's profile")
+            .addRow(1438811939693L, 6, 10, "INFO", "AUTHPRIV", "192.168.2.132", "SecureAuth0", "23108", "",
+                    "{SecureAuth@27389=[UserAgent=Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko, UserHostAddress=192.168.2.132, BrowserSession=0gvhdi5udjuqtweprbgoxilc, Realm=SecureAuth0, Appliance=secureauthqa.gosecureauth.com, Company=SecureAuth Corporation, UserID=Tester2, PEN=27389, HostName=192.168.2.132, Category=AUDIT, Priority=4]}",
+                    "Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko", "192.168.2.132",
+                    "0gvhdi5udjuqtweprbgoxilc", "SecureAuth0", "secureauthqa.gosecureauth.com", "SecureAuth Corporation",
+                    "Tester2", "27389", "192.168.2.132", "AUDIT", "4", "Found the user for retrieving user's profile")
             .build();
 
     new RowSetComparison(expected).verifyAndClearAll(results);
diff --git a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
index 2c0654c..a45efbf 100644
--- a/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
+++ b/contrib/storage-hbase/src/main/java/org/apache/drill/exec/store/hbase/HBaseUtils.java
@@ -142,7 +142,6 @@
     return Bytes.compareTo(left, right) < 0 ? left : right;
   }
 
-
   /**
    * Verify the presence of a column family in the schema path of the hbase table or whether the schema path is
    * the row key column.
@@ -158,10 +157,10 @@
     for (SchemaPath column : columns) {
       if (!(column.equals(DrillHBaseConstants.ROW_KEY_PATH) ||
           hTableDesc.hasFamily(HBaseUtils.getBytes(column.getRootSegment().getPath())))) {
-        DrillRuntimeException.format("The column family '%s' does not exist in HBase table: %s .",
+        throw DrillRuntimeException.create(
+            "The column family '%s' does not exist in the HBase table: `%s`.",
             column.getRootSegment().getPath(), hTableDesc.getNameAsString());
       }
     }
   }
-
 }
diff --git a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/BaseHBaseTest.java b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/BaseHBaseTest.java
index d2b3f16..402e2d2 100644
--- a/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/BaseHBaseTest.java
+++ b/contrib/storage-hbase/src/test/java/org/apache/drill/hbase/BaseHBaseTest.java
@@ -57,8 +57,8 @@
     storagePluginConfig.setEnabled(true);
     storagePluginConfig.setZookeeperPort(HBaseTestsSuite.getZookeeperPort());
 
-    storagePlugin = (HBaseStoragePlugin) pluginRegistry.createOrUpdate(HBASE_STORAGE_PLUGIN_NAME,
-        storagePluginConfig, true);
+    pluginRegistry.put(HBASE_STORAGE_PLUGIN_NAME, storagePluginConfig);
+    storagePlugin = (HBaseStoragePlugin) pluginRegistry.getPlugin(HBASE_STORAGE_PLUGIN_NAME);
   }
 
   @AfterClass
@@ -102,5 +102,4 @@
   protected String canonizeHBaseSQL(String sql) {
     return sql.replace("[TABLE_NAME]", HBaseTestsSuite.TEST_TABLE_1.getNameAsString());
   }
-
 }
diff --git a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
index 9cd556d..4e7fde9 100644
--- a/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
+++ b/contrib/storage-hive/core/src/main/java/org/apache/drill/exec/store/hive/HiveStoragePlugin.java
@@ -81,6 +81,7 @@
     return hiveConf;
   }
 
+  @Override
   public HiveStoragePluginConfig getConfig() {
     return config;
   }
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
index 5078272..bff25ac 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/hive/HiveTestFixture.java
@@ -226,7 +226,7 @@
         for (Drillbit drillbit : drillbits) {
           HiveStoragePluginConfig pluginConfig = new HiveStoragePluginConfig(new HashMap<>(pluginConf));
           pluginConfig.setEnabled(true);
-          drillbit.getContext().getStorage().createOrUpdate(pluginName, pluginConfig, true);
+          drillbit.getContext().getStorage().put(pluginName, pluginConfig);
         }
       } catch (ExecutionSetupException e) {
         throw new RuntimeException("Failed to add Hive storage plugin to drillbits", e);
@@ -238,7 +238,7 @@
     }
 
     public void removeHivePluginFrom(Iterable<Drillbit> drillbits) {
-      drillbits.forEach(bit -> bit.getContext().getStorage().deletePlugin(pluginName));
+      drillbits.forEach(bit -> bit.getContext().getStorage().remove(pluginName));
     }
 
     public void updateHivePlugin(Iterable<Drillbit> drillbits,
@@ -252,7 +252,7 @@
 
           HiveStoragePluginConfig newPluginConfig = storagePlugin.getConfig();
           newPluginConfig.getConfigProps().putAll(configOverride);
-          pluginRegistry.createOrUpdate(pluginName, newPluginConfig, true);
+          pluginRegistry.put(pluginName, newPluginConfig);
         }
       } catch (ExecutionSetupException e) {
         throw new RuntimeException("Failed to update Hive storage plugin for drillbits", e);
diff --git a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
index 422d44a..4fc85cc 100644
--- a/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
+++ b/contrib/storage-hive/core/src/test/java/org/apache/drill/exec/impersonation/hive/BaseTestHiveImpersonation.java
@@ -123,7 +123,7 @@
   }
 
   protected static void addHiveStoragePlugin(final Map<String, String> hiveConfig) throws Exception {
-    getDrillbitContext().getStorage().createOrUpdate(hivePluginName, createHiveStoragePlugin(hiveConfig), true);
+    getDrillbitContext().getStorage().put(hivePluginName, createHiveStoragePlugin(hiveConfig));
   }
 
   protected void showTablesHelper(final String db, List<String> expectedTables) throws Exception {
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithH2IT.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithH2IT.java
index ed7a243..2e09231 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithH2IT.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithH2IT.java
@@ -67,8 +67,8 @@
     sourceParameters.put("maxTotal", 5);
     JdbcStorageConfig jdbcStorageConfig = new JdbcStorageConfig("org.h2.Driver", connString, "root", "root", true, sourceParameters);
     jdbcStorageConfig.setEnabled(true);
-    cluster.defineStoragePlugin(ctx -> new JdbcStoragePlugin(jdbcStorageConfig, ctx, "h2"));
-    cluster.defineStoragePlugin(ctx -> new JdbcStoragePlugin(jdbcStorageConfig, ctx, "h2o"));
+    cluster.defineStoragePlugin("h2", jdbcStorageConfig);
+    cluster.defineStoragePlugin("h2o", jdbcStorageConfig);
   }
 
   @Test
diff --git a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
index 28b993b..49b36c2 100644
--- a/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
+++ b/contrib/storage-jdbc/src/test/java/org/apache/drill/exec/store/jdbc/TestJdbcPluginWithMySQLIT.java
@@ -74,7 +74,7 @@
         "mysqlUser", "mysqlPass", false, null);
     jdbcStorageConfig.setEnabled(true);
 
-    cluster.defineStoragePlugin(ctx -> new JdbcStoragePlugin(jdbcStorageConfig, ctx, "mysql"));
+    cluster.defineStoragePlugin("mysql", jdbcStorageConfig);
 
     if (osName.startsWith("linux")) {
       // adds storage plugin with case insensitive table names
@@ -82,7 +82,7 @@
           String.format("jdbc:mysql://localhost:%s/%s?useJDBCCompliantTimezoneShift=true", mysqlPort, mysqlDBName),
           "mysqlUser", "mysqlPass", true, null);
       jdbcCaseSensitiveStorageConfig.setEnabled(true);
-      cluster.defineStoragePlugin(ctx -> new JdbcStoragePlugin(jdbcCaseSensitiveStorageConfig, ctx, "mysqlCaseInsensitive"));
+      cluster.defineStoragePlugin("mysqlCaseInsensitive", jdbcCaseSensitiveStorageConfig);
     }
   }
 
diff --git a/contrib/storage-kafka/src/test/java/org/apache/drill/exec/store/kafka/KafkaTestBase.java b/contrib/storage-kafka/src/test/java/org/apache/drill/exec/store/kafka/KafkaTestBase.java
index effff77..f3c24e6 100644
--- a/contrib/storage-kafka/src/test/java/org/apache/drill/exec/store/kafka/KafkaTestBase.java
+++ b/contrib/storage-kafka/src/test/java/org/apache/drill/exec/store/kafka/KafkaTestBase.java
@@ -52,7 +52,7 @@
     kafkaConsumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "drill-test-consumer");
     storagePluginConfig = new KafkaStoragePluginConfig(kafkaConsumerProps);
     storagePluginConfig.setEnabled(true);
-    pluginRegistry.createOrUpdate(KafkaStoragePluginConfig.NAME, storagePluginConfig, true);
+    pluginRegistry.put(KafkaStoragePluginConfig.NAME, storagePluginConfig);
     testNoResult(String.format("alter session set `%s` = '%s'", ExecConstants.KAFKA_RECORD_READER,
         "org.apache.drill.exec.store.kafka.decoders.JsonMessageReader"));
     testNoResult(String.format("alter session set `%s` = %d", ExecConstants.KAFKA_POLL_TIMEOUT, 5000));
diff --git a/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/MongoTestBase.java b/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/MongoTestBase.java
index f1b4e4d..158e48b 100644
--- a/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/MongoTestBase.java
+++ b/contrib/storage-mongo/src/test/java/org/apache/drill/exec/store/mongo/MongoTestBase.java
@@ -39,7 +39,7 @@
   private static void initMongoStoragePlugin(String connectionURI) throws Exception {
     MongoStoragePluginConfig storagePluginConfig = new MongoStoragePluginConfig(connectionURI);
     storagePluginConfig.setEnabled(true);
-    pluginRegistry.createOrUpdate(MongoStoragePluginConfig.NAME, storagePluginConfig, true);
+    pluginRegistry.put(MongoStoragePluginConfig.NAME, storagePluginConfig);
 
     client.testBuilder()
         .sqlQuery("alter session set `%s` = %s",
@@ -51,8 +51,7 @@
 
   @AfterClass
   public static void tearDownMongoTestBase() throws Exception {
-    pluginRegistry.deletePlugin(MongoStoragePluginConfig.NAME);
+    pluginRegistry.remove(MongoStoragePluginConfig.NAME);
     MongoTestSuite.tearDownCluster();
   }
-
 }
diff --git a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java
index 539442a..c83a4e5 100644
--- a/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java
+++ b/contrib/storage-opentsdb/src/main/java/org/apache/drill/exec/store/openTSDB/OpenTSDBStoragePlugin.java
@@ -36,7 +36,8 @@
 
   private final ServiceImpl db;
 
-  public OpenTSDBStoragePlugin(OpenTSDBStoragePluginConfig configuration, DrillbitContext context, String name) throws IOException {
+  public OpenTSDBStoragePlugin(OpenTSDBStoragePluginConfig configuration, DrillbitContext context,
+      String name) throws IOException {
     super(context, name);
     this.schemaFactory = new OpenTSDBSchemaFactory(this, getName());
     this.engineConfig = configuration;
diff --git a/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java b/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java
index 4fe9cc2..8c03752 100644
--- a/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java
+++ b/contrib/storage-opentsdb/src/test/java/org/apache/drill/store/openTSDB/TestOpenTSDBPlugin.java
@@ -66,7 +66,7 @@
     OpenTSDBStoragePluginConfig storagePluginConfig =
         new OpenTSDBStoragePluginConfig(String.format("http://localhost:%s", portNumber));
     storagePluginConfig.setEnabled(true);
-    pluginRegistry.createOrUpdate(OpenTSDBStoragePluginConfig.NAME, storagePluginConfig, true);
+    pluginRegistry.put(OpenTSDBStoragePluginConfig.NAME, storagePluginConfig);
   }
 
   @Before
@@ -203,7 +203,7 @@
     OpenTSDBStoragePluginConfig storagePluginConfig =
         new OpenTSDBStoragePluginConfig(String.format("http://localhost:%s/", portNumber));
     storagePluginConfig.setEnabled(true);
-    pluginRegistry.createOrUpdate(OpenTSDBStoragePluginConfig.NAME, storagePluginConfig, true);
+    pluginRegistry.put(OpenTSDBStoragePluginConfig.NAME, storagePluginConfig);
     String query = "select * from information_schema.`views`";
     cluster.clientFixture()
         .queryBuilder()
diff --git a/distribution/src/main/resources/storage-plugins-override-example.conf b/distribution/src/main/resources/storage-plugins-override-example.conf
index 360ba2c..d471708 100644
--- a/distribution/src/main/resources/storage-plugins-override-example.conf
+++ b/distribution/src/main/resources/storage-plugins-override-example.conf
@@ -16,50 +16,50 @@
 # This file involves storage plugins configs, which can be updated on the Drill start-up.
 # This file is in HOCON format, see https://github.com/typesafehub/config/blob/master/HOCON.md for more information.
 
-  "storage":{
-    cp: {
-      type: "file",
-      connection: "classpath:///",
-      formats: {
-        "csv" : {
-          type: "text",
-          extensions: [ "csv" ],
-          delimiter: ","
-        }
+"storage": {
+  cp: {
+    type: "file",
+    connection: "classpath:///",
+    formats: {
+      "csv" : {
+        type: "text",
+        extensions: [ "csv" ],
+        delimiter: ","
       }
     }
   }
-  "storage":{
-    dfs: {
-      type: "file",
-      connection: "hdfs:///",
-      workspaces: {
-        "root": {
-          "location": "/",
-          "writable": false,
-          "defaultInputFormat": null,
-          "allowAccessOutsideWorkspace": false
-        }
-      },
-      formats: {
-        "parquet": {
-          "type": "parquet"
-        }
-      },
-      enabled: false
-    }
+}
+"storage": {
+  dfs: {
+    type: "file",
+    connection: "hdfs:///",
+    workspaces: {
+      "root": {
+        "location": "/",
+        "writable": false,
+        "defaultInputFormat": null,
+        "allowAccessOutsideWorkspace": false
+      }
+    },
+    formats: {
+      "parquet": {
+        "type": "parquet"
+      }
+    },
+    enabled: false
   }
-  "storage":{
-    mongo : {
-      type:"mongo",
-      connection:"mongodb://test_host:27017/",
-      enabled: true
-    }
+}
+"storage": {
+  mongo : {
+    type:"mongo",
+    connection:"mongodb://test_host:27017/",
+    enabled: true
   }
-  "storage": {
-    openTSDB: {
-      type: "openTSDB",
-      connection: "http://localhost:8888",
-      enabled: true
-    }
+}
+"storage": {
+  openTSDB: {
+    type: "openTSDB",
+    connection: "http://localhost:8888",
+    enabled: true
   }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
index 744f8b5..98146f3 100644
--- a/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/calcite/jdbc/DynamicRootSchema.java
@@ -32,20 +32,22 @@
 import org.apache.drill.exec.store.StoragePlugin;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.SubSchemaWrapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.ArrayList;
 import java.util.List;
 
 /**
- * This class is to allow us loading schemas from storage plugins later when {@link #getSubSchema(String, boolean)}
+ * Loads schemas from storage plugins later when {@link #getSubSchema(String, boolean)}
  * is called.
  */
 public class DynamicRootSchema extends DynamicSchema {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DynamicRootSchema.class);
+  private static final Logger logger = LoggerFactory.getLogger(DynamicRootSchema.class);
 
-  private SchemaConfig schemaConfig;
-  private StoragePluginRegistry storages;
+  private final SchemaConfig schemaConfig;
+  private final StoragePluginRegistry storages;
 
   /** Creates a root schema. */
   DynamicRootSchema(StoragePluginRegistry storages, SchemaConfig schemaConfig) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
index 7532471..2619cd4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/ExecConstants.java
@@ -792,9 +792,6 @@
   public static final OptionValidator MIN_READER_WIDTH = new LongValidator(MIN_READER_WIDTH_KEY,
       new OptionDescription("Min width for text readers, mostly for testing."));
 
-  public static final String BOOTSTRAP_STORAGE_PLUGINS_FILE = "bootstrap-storage-plugins.json";
-  public static final String BOOTSTRAP_FORMAT_PLUGINS_FILE = "bootstrap-format-plugins.json";
-
   public static final String SKIP_RUNTIME_ROWGROUP_PRUNING_KEY = "exec.storage.skip_runtime_rowgroup_pruning";
   public static final OptionValidator SKIP_RUNTIME_ROWGROUP_PRUNING = new BooleanValidator(SKIP_RUNTIME_ROWGROUP_PRUNING_KEY,
     new OptionDescription("Enables skipping the runtime pruning of the rowgroups"));
@@ -1154,4 +1151,19 @@
   public static final String PARQUET_READER_ENABLE_MAP_SUPPORT = "store.parquet.reader.enable_map_support";
   public static final BooleanValidator PARQUET_READER_ENABLE_MAP_SUPPORT_VALIDATOR = new BooleanValidator(
       PARQUET_READER_ENABLE_MAP_SUPPORT, new OptionDescription("Enables Drill Parquet reader to read Parquet MAP type correctly. (Drill 1.17+)"));
+
+  // ---------------------------------------
+  // Storage-plugin related config constants
+
+  // Bootstrap plugin files configuration keys
+  public static final String BOOTSTRAP_STORAGE_PLUGINS_FILE = "drill.exec.storage.bootstrap.storage";
+  public static final String BOOTSTRAP_FORMAT_PLUGINS_FILE =  "drill.exec.storage.bootstrap.format";
+
+  public static final String UPGRADE_STORAGE_PLUGINS_FILE = "drill.exec.storage.upgrade.storage";
+
+  public static final String STORAGE_PLUGIN_REGISTRY_IMPL = "drill.exec.storage.registry";
+  public static final String ACTION_ON_STORAGE_PLUGINS_OVERRIDE_FILE = "drill.exec.storage.action_on_plugins_override_file";
+
+  // Extra private plugin classes, used for testing
+  public static final String PRIVATE_CONNECTORS = "drill.exec.storage.private_connectors";
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/BaseTransientStore.java b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/BaseTransientStore.java
index 99ebf5d..941883c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/BaseTransientStore.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/BaseTransientStore.java
@@ -65,7 +65,7 @@
     });
   }
 
-  protected void fireListeners(final TransientStoreEvent event) {
+  protected void fireListeners(final TransientStoreEvent<?> event) {
     for (final TransientStoreListener listener:listeners) {
       listener.onChange(event);
     }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/TransientStoreEvent.java b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/TransientStoreEvent.java
index eb8f032..6de69ad 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/TransientStoreEvent.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/coord/store/TransientStoreEvent.java
@@ -54,7 +54,8 @@
   @Override
   public boolean equals(final Object obj) {
     if (obj instanceof TransientStoreEvent && obj.getClass().equals(getClass())) {
-      final TransientStoreEvent<V> other = (TransientStoreEvent<V>)obj;
+      @SuppressWarnings("unchecked")
+      final TransientStoreEvent<V> other = (TransientStoreEvent<V>) obj;
       return Objects.equal(type, other.type) && Objects.equal(key, other.key) && Objects.equal(value, other.value);
     }
     return super.equals(obj);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionImplementationRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionImplementationRegistry.java
index 4210067..44d062c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionImplementationRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/FunctionImplementationRegistry.java
@@ -71,21 +71,24 @@
 import org.apache.drill.exec.util.JarUtil;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
- * This class offers the registry for functions. Notably, in addition to Drill its functions
- * (in {@link LocalFunctionRegistry}), other PluggableFunctionRegistry (e.g., {@link org.apache.drill.exec.expr.fn.HiveFunctionRegistry})
- * is also registered in this class
+ * Registry for functions. Notably, in addition to Drill its functions (in
+ * {@link LocalFunctionRegistry}), other PluggableFunctionRegistry (e.g.,
+ * {@link org.apache.drill.exec.expr.fn.HiveFunctionRegistry}) is also
+ * registered in this class
  */
 public class FunctionImplementationRegistry implements FunctionLookupContext, AutoCloseable {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FunctionImplementationRegistry.class);
+  private static final Logger logger = LoggerFactory.getLogger(FunctionImplementationRegistry.class);
 
   private final LocalFunctionRegistry localFunctionRegistry;
   private final RemoteFunctionRegistry remoteFunctionRegistry;
   private final Path localUdfDir;
-  private boolean deleteTmpDir = false;
+  private boolean deleteTmpDir;
   private File tmpDir;
-  private List<PluggableFunctionRegistry> pluggableFuncRegistries = new ArrayList<>();
+  private final List<PluggableFunctionRegistry> pluggableFuncRegistries = new ArrayList<>();
   private OptionSet optionManager;
   private final boolean useDynamicUdfs;
 
@@ -109,7 +112,7 @@
     // rather than a crash, we provide a boot-time option, set only by
     // tests, to disable DUDF lookup.
 
-    useDynamicUdfs = ! config.getBoolean(ExecConstants.UDF_DISABLE_DYNAMIC);
+    useDynamicUdfs = !config.getBoolean(ExecConstants.UDF_DISABLE_DYNAMIC);
     localFunctionRegistry = new LocalFunctionRegistry(classpathScan);
 
     Set<Class<? extends PluggableFunctionRegistry>> registryClasses =
@@ -623,5 +626,4 @@
       FileUtils.deleteQuietly(new File(localDir, JarUtil.getSourceName(jarName)));
     }
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java
index f378d45..ff13a44 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/FunctionRegistryHolder.java
@@ -19,7 +19,8 @@
 
 import org.apache.drill.shaded.guava.com.google.common.collect.ArrayListMultimap;
 import org.apache.drill.shaded.guava.com.google.common.collect.ListMultimap;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.drill.common.AutoCloseables.Closeable;
 import org.apache.drill.common.concurrent.AutoCloseableLock;
 import org.apache.drill.exec.expr.fn.DrillFuncHolder;
@@ -86,8 +87,7 @@
  *
  */
 public class FunctionRegistryHolder implements AutoCloseable {
-
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(FunctionRegistryHolder.class);
+  private static final Logger logger = LoggerFactory.getLogger(FunctionRegistryHolder.class);
 
   private final ReadWriteLock readWriteLock = new ReentrantReadWriteLock();
   private final AutoCloseableLock readLock = new AutoCloseableLock(readWriteLock.readLock());
@@ -200,7 +200,7 @@
   /**
    * Retrieves all function names associated with the jar from {@link #jars}.
    * Returns empty list if jar is not registered.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @param jarName jar name
    * @return list of functions names associated from the jar
@@ -217,7 +217,7 @@
    * Uses guava {@link ListMultimap} structure to return data.
    * If no functions present, will return empty {@link ListMultimap}.
    * If version holder is not null, updates it with current registry version number.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @param version version holder
    * @return all functions which their holders
@@ -237,7 +237,7 @@
 
   /**
    * Returns list of functions with list of function holders for each functions without version number.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @return all functions which their holders
    */
@@ -249,7 +249,7 @@
    * Returns list of functions with list of function signatures for each functions.
    * Uses guava {@link ListMultimap} structure to return data.
    * If no functions present, will return empty {@link ListMultimap}.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @return all functions which their signatures
    */
@@ -267,7 +267,7 @@
    * Returns all function holders associated with function name.
    * If function is not present, will return empty list.
    * If version holder is not null, updates it with current registry version number.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @param functionName function name
    * @param version version holder
@@ -285,7 +285,7 @@
 
   /**
    * Returns all function holders associated with function name without version number.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @param functionName function name
    * @return list of function holders
@@ -296,7 +296,7 @@
 
   /**
    * Checks is jar is present in {@link #jars}.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @param jarName jar name
    * @return true if jar exists, else false
@@ -309,7 +309,7 @@
 
   /**
    * Returns quantity of functions stored in {@link #functions}.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @return quantity of functions
    */
@@ -323,7 +323,7 @@
    * Looks which jar in {@link #jars} contains passed function signature.
    * First looks by function name and if found checks if such function has passed function signature.
    * Returns jar name if found matching function signature, else null.
-   * This is read operation, so several users can perform this operation at the same time.
+   * This is a read operation, so several users can perform this operation at the same time.
    *
    * @param functionName function name
    * @param functionSignature function signature
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/LocalFunctionRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/LocalFunctionRegistry.java
index 4cf17cc..10db175 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/LocalFunctionRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/LocalFunctionRegistry.java
@@ -31,6 +31,8 @@
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap;
 import org.apache.drill.shaded.guava.com.google.common.collect.ListMultimap;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.calcite.sql.SqlOperator;
 import org.apache.commons.lang3.tuple.Pair;
 import org.apache.drill.common.scanner.persistence.AnnotatedClassDescriptor;
@@ -56,10 +58,11 @@
 
   public static final String BUILT_IN = "built-in";
 
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(LocalFunctionRegistry.class);
+  private static final Logger logger = LoggerFactory.getLogger(LocalFunctionRegistry.class);
   private static final String functionSignaturePattern = "%s(%s)";
 
-  private static final ImmutableMap<String, Pair<Integer, Integer>> registeredFuncNameToArgRange = ImmutableMap.<String, Pair<Integer, Integer>> builder()
+  private static final ImmutableMap<String, Pair<Integer, Integer>> registeredFuncNameToArgRange =
+      ImmutableMap.<String, Pair<Integer, Integer>> builder()
       // CONCAT is allowed to take [1, infinity) number of arguments.
       // Currently, this flexibility is offered by DrillOptiq to rewrite it as
       // a nested structure
@@ -97,7 +100,8 @@
   }
 
   /**
-   * @return remote function registry version number with which local function registry is synced
+   * @return remote function registry version number with which local function
+   *         registry is synced
    */
   public int getVersion() {
     return registryHolder.getVersion();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/RemoteFunctionRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/RemoteFunctionRegistry.java
index f727a93..6c84c06 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/RemoteFunctionRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/expr/fn/registry/RemoteFunctionRegistry.java
@@ -43,6 +43,8 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
@@ -84,7 +86,7 @@
 public class RemoteFunctionRegistry implements AutoCloseable {
 
   private static final String REGISTRY_PATH = "registry";
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(RemoteFunctionRegistry.class);
+  private static final Logger logger = LoggerFactory.getLogger(RemoteFunctionRegistry.class);
   private static final ObjectMapper mapper = new ObjectMapper().enable(INDENT_OUTPUT);
 
   private final TransientStoreListener unregistrationListener;
@@ -226,7 +228,8 @@
     try {
       this.fs = FileSystem.get(conf);
     } catch (IOException e) {
-      DrillRuntimeException.format(e, "Error during file system %s setup", conf.get(FileSystem.FS_DEFAULT_NAME_KEY));
+      throw DrillRuntimeException.create(e,
+          "Error during file system %s setup", conf.get(FileSystem.FS_DEFAULT_NAME_KEY));
     }
 
     String root = fs.getHomeDirectory().toUri().getPath();
@@ -259,7 +262,7 @@
       FileStatus fileStatus = fs.getFileStatus(path);
       Preconditions.checkState(fileStatus.isDirectory(), "Area [%s] must be a directory", fullPath);
       FsPermission permission = fileStatus.getPermission();
-      // It is considered that process user has write rights on directory if:
+      // The process user has write rights on directory if:
       // 1. process user is owner of the directory and has write rights
       // 2. process user is in group that has write rights
       // 3. any user has write rights
@@ -276,8 +279,8 @@
       if (e instanceof DrillRuntimeException) {
         throw (DrillRuntimeException) e;
       }
-      // throws
-      DrillRuntimeException.format(e, "Error during udf area creation [%s] on file system [%s]", fullPath, fs.getUri());
+      throw DrillRuntimeException.create(e,
+          "Error during udf area creation [%s] on file system [%s]", fullPath, fs.getUri());
     }
     logger.info("Created remote udf area [{}] on file system [{}]", fullPath, fs.getUri());
     return path;
@@ -300,5 +303,4 @@
     REGISTRATION,
     UNREGISTRATION
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/PhysicalPlan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/PhysicalPlan.java
index 13c3c2a..9d8831e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/PhysicalPlan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/PhysicalPlan.java
@@ -38,11 +38,10 @@
 
 @JsonPropertyOrder({ "head", "graph" })
 public class PhysicalPlan {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(PhysicalPlan.class);
 
-  PlanProperties properties;
+  protected PlanProperties properties;
 
-  Graph<PhysicalOperator, Root, Leaf> graph;
+  protected Graph<PhysicalOperator, Root, Leaf> graph;
 
   @JsonCreator
   public PhysicalPlan(@JsonProperty("head") PlanProperties properties,
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SubScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SubScan.java
index 7ea73be..1542540 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SubScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/physical/base/SubScan.java
@@ -19,8 +19,9 @@
 
 
 /**
- * A SubScan operator represents the data scanned by a particular major/minor fragment.  This is in contrast to
- * a GroupScan operator, which represents all data scanned by a physical plan.
+ * A SubScan operator represents the data scanned by a particular major/minor
+ * fragment. This is in contrast to a GroupScan operator, which represents all
+ * data scanned by a physical plan.
  */
 public interface SubScan extends Scan {
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java
index 920f9b2..f693744 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/PhysicalPlanReader.java
@@ -67,7 +67,7 @@
         .addSerializer(MajorType.class, new MajorTypeSerDe.Se())
         .addDeserializer(MajorType.class, new MajorTypeSerDe.De())
         .addDeserializer(DynamicPojoRecordReader.class,
-            new StdDelegatingDeserializer<>(new DynamicPojoRecordReader.Converter(lpMapper)))
+            new StdDelegatingDeserializer<>(new DynamicPojoRecordReader.Converter<>(lpMapper)))
         .addSerializer(Path.class, new PathSerDe.Se());
 
     lpMapper.registerModule(serDeModule);
@@ -103,8 +103,10 @@
     if (op instanceof FragmentRoot) {
       return (FragmentRoot) op;
     } else {
-      throw new UnsupportedOperationException(String.format("The provided json fragment doesn't have a FragmentRoot " +
-          "as its root operator.  The operator was %s.", op.getClass().getCanonicalName()));
+      throw new UnsupportedOperationException(String.format(
+          "The provided json fragment doesn't have a FragmentRoot " +
+          "as its root operator. The operator was %s.",
+          op.getClass().getCanonicalName()));
     }
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
index 9d74fa8..3b2a04f 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/logical/StoragePlugins.java
@@ -17,26 +17,26 @@
  */
 package org.apache.drill.exec.planner.logical;
 
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Optional;
 
-import org.apache.drill.common.config.DrillConfig;
-import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.common.map.CaseInsensitiveMap;
-import org.apache.drill.common.scanner.ClassPathScanner;
-import org.apache.drill.common.scanner.persistence.ScanResult;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
-import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
-import org.apache.drill.shaded.guava.com.google.common.io.Resources;
 
+/**
+ * Map of storage plugin *configurations* indexed by name.
+ * Does not hold the storage plugin *connector* itself.
+ * <p>
+ * This class is serialized to JSON and represents the set of
+ * storage plugin configurations visible to Drill.
+ */
 public class StoragePlugins implements Iterable<Map.Entry<String, StoragePluginConfig>> {
 
   private final Map<String, StoragePluginConfig> storage;
@@ -48,17 +48,8 @@
     this.storage = caseInsensitiveStorage;
   }
 
-  public static void main(String[] args) throws Exception{
-    DrillConfig config = DrillConfig.create();
-    ScanResult scanResult = ClassPathScanner.fromPrescan(config);
-    LogicalPlanPersistence lpp = new LogicalPlanPersistence(config, scanResult);
-    String data = Resources.toString(Resources.getResource("storage-engines.json"), Charsets.UTF_8);
-    StoragePlugins se = lpp.getMapper().readValue(data,  StoragePlugins.class);
-    ByteArrayOutputStream os = new ByteArrayOutputStream();
-    lpp.getMapper().writeValue(System.out, se);
-    lpp.getMapper().writeValue(os, se);
-    se = lpp.getMapper().readValue(new ByteArrayInputStream(os.toByteArray()), StoragePlugins.class);
-    System.out.println(se);
+  public StoragePlugins() {
+    this(new HashMap<>());
   }
 
   @JsonProperty("storage")
@@ -140,4 +131,7 @@
     return storage.get(pluginName);
   }
 
+  public boolean isEmpty() {
+    return storage.isEmpty();
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java
index 0319c8f..4169a58 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/AbstractSqlHandler.java
@@ -24,13 +24,16 @@
 
 import org.apache.drill.exec.physical.PhysicalPlan;
 import org.apache.drill.exec.work.foreman.ForemanSetupException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.calcite.sql.SqlNode;
 
 public abstract class AbstractSqlHandler {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractSqlHandler.class);
+  static final Logger logger = LoggerFactory.getLogger(AbstractSqlHandler.class);
 
   public abstract PhysicalPlan getPlan(SqlNode sqlNode) throws ValidationException, RelConversionException, IOException, ForemanSetupException;
 
+  @SuppressWarnings("unchecked")
   public static <T> T unwrap(Object o, Class<T> clazz) throws ForemanSetupException {
     if (clazz.isAssignableFrom(o.getClass())) {
       return (T) o;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateFunctionHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateFunctionHandler.java
index b7eb936..3fe74d0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateFunctionHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/CreateFunctionHandler.java
@@ -41,6 +41,8 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.File;
 import java.io.IOException;
@@ -48,8 +50,7 @@
 import java.util.UUID;
 
 public class CreateFunctionHandler extends DefaultSqlHandler {
-
-  private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CreateFunctionHandler.class);
+  private static Logger logger = LoggerFactory.getLogger(CreateFunctionHandler.class);
 
   public CreateFunctionHandler(SqlHandlerConfig config) {
     super(config);
@@ -107,7 +108,6 @@
     }
   }
 
-
   /**
    * Instantiates coping of binary to local file system
    * and validates functions from this jar against local function registry.
@@ -341,6 +341,5 @@
         logger.warn(String.format("Error during deletion [%s]", path.toUri().getPath()), e);
       }
     }
-
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropFunctionHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropFunctionHandler.java
index 1ab94d0..7b0fad6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropFunctionHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/planner/sql/handlers/DropFunctionHandler.java
@@ -35,13 +35,14 @@
 import org.apache.drill.exec.work.foreman.ForemanSetupException;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.List;
 
 public class DropFunctionHandler extends DefaultSqlHandler {
-
-  private static org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DropFunctionHandler.class);
+  private static Logger logger = LoggerFactory.getLogger(DropFunctionHandler.class);
 
   public DropFunctionHandler(SqlHandlerConfig config) {
     super(config);
@@ -163,5 +164,4 @@
       logger.error("Error removing jar {} from area {}", jarName, area.toUri().getPath());
     }
   }
-
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
index fb0ff0f..180b2e0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/Drillbit.java
@@ -104,7 +104,7 @@
   private final BootStrapContext context;
   private final WebServer webServer;
   private final int gracePeriod;
-  private DrillbitStateManager stateManager;
+  private final DrillbitStateManager stateManager;
   private GracefulShutdownThread gracefulShutdownThread;
   private Thread shutdownHook;
 
@@ -241,8 +241,8 @@
     logger.info("Startup completed ({} ms).", w.elapsed(TimeUnit.MILLISECONDS));
   }
 
-  /*
-    Wait uninterruptibly
+  /**
+   * Wait uninterruptibly
    */
   private void waitForGracePeriod() {
     ExtendedLatch exitLatch = new ExtendedLatch();
@@ -255,16 +255,15 @@
     }
   }
 
-  /*
-
-   */
   public void shutdown() {
     this.close();
   }
- /*
-  The drillbit is moved into Quiescent state and the drillbit waits for grace period amount of time.
-  Then drillbit moves into draining state and waits for all the queries and fragments to complete.
-  */
+
+  /**
+   * The drillbit is moved into Quiescent state and the drillbit waits for grace
+   * period amount of time. Then drillbit moves into draining state and waits
+   * for all the queries and fragments to complete.
+   */
   @Override
   public synchronized void close() {
     if (!stateManager.getState().equals(DrillbitState.ONLINE)) {
@@ -332,7 +331,6 @@
     if (interruptPollShutdown) {
       gracefulShutdownThread.interrupt();
     }
-
   }
 
   private void javaPropertiesToSystemOptions() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java
index 79d3bed..36c7b42 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitContext.java
@@ -22,6 +22,7 @@
 import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.compile.CodeCompiler;
 import org.apache.drill.exec.coord.ClusterCoordinator;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
@@ -109,8 +110,8 @@
     DrillConfig config = context.getConfig();
     lpPersistence = new LogicalPlanPersistence(config, classpathScan);
 
-    storagePlugins = config
-        .getInstance(StoragePluginRegistry.STORAGE_PLUGIN_REGISTRY_IMPL, StoragePluginRegistry.class, this);
+    storagePlugins = config.getInstance(
+        ExecConstants.STORAGE_PLUGIN_REGISTRY_IMPL, StoragePluginRegistry.class, this);
 
     reader = new PhysicalPlanReader(config, classpathScan, lpPersistence, endpoint, storagePlugins);
     operatorCreatorRegistry = new OperatorCreatorRegistry(classpathScan);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitStateManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitStateManager.java
index 48633c7..9554d59 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitStateManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/DrillbitStateManager.java
@@ -72,8 +72,9 @@
           throw new IllegalStateException("Cannot set drillbit to" + newState + "from" + currentState);
         }
         break;
+      default:
+        throw new IllegalArgumentException(newState.name());
     }
   }
-
 }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/QueryProfileStoreContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/QueryProfileStoreContext.java
index 4f94b10..32dcd91 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/QueryProfileStoreContext.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/QueryProfileStoreContext.java
@@ -31,10 +31,8 @@
 import org.apache.drill.exec.store.sys.PersistentStoreProvider;
 
 public class QueryProfileStoreContext {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(QueryProfileStoreContext.class);
 
   private static final String PROFILES = "profiles";
-
   private static final String RUNNING = "running";
 
   private final PersistentStore<UserBitShared.QueryProfile> completedProfiles;
@@ -74,4 +72,4 @@
   public TransientStore<QueryInfo> getRunningProfileStore() {
     return runningProfiles;
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
index 430e0a1..f9011c6 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/options/SystemOptionManager.java
@@ -32,6 +32,8 @@
 import org.apache.drill.exec.util.AssertionUtil;
 import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.Arrays;
 import java.util.HashMap;
@@ -43,36 +45,43 @@
 import java.util.stream.StreamSupport;
 
 /**
- *  <p> {@link OptionManager} that holds options within {@link org.apache.drill.exec.server.DrillbitContext}.
- * Only one instance of this class exists per drillbit. Options set at the system level affect the entire system and
- * persist between restarts.
- *  </p>
- *
- *  <p> All the system options are externalized into conf file. While adding a new system option
- *  a validator should be added and the default value for the option should be set in
- *  the conf files(example : drill-module.conf) under the namespace drill.exec.options.
- *  </p>
- *
- *  <p>
- *  The SystemOptionManager loads all the validators and the default values for the options are
- *  fetched from the config. The validators are populated with the default values fetched from
- *  the config. If the option is not set in the conf files config option is missing exception
- *  will be thrown.
- *  </p>
- *
- *  <p>
- *  If the option is set using ALTER, the value that is set will be returned. Else the default value
- *  that is loaded into validator from the config will be returned.
- *  </p>
+ * <p>
+ * {@link OptionManager} that holds options within
+ * {@link org.apache.drill.exec.server.DrillbitContext}. Only one instance of
+ * this class exists per drillbit. Options set at the system level affect the
+ * entire system and persist between restarts.
+ * </p>
+ * <p>
+ * All the system options are externalized into conf file. While adding a new
+ * system option a validator should be added and the default value for the
+ * option should be set in the conf files(example : drill-module.conf) under the
+ * namespace drill.exec.options.
+ * </p>
+ * <p>
+ * The SystemOptionManager loads all the validators and the default values for
+ * the options are fetched from the config. The validators are populated with
+ * the default values fetched from the config. If the option is not set in the
+ * conf files config option is missing exception will be thrown.
+ * </p>
+ * <p>
+ * If the option is set using ALTER, the value that is set will be returned.
+ * Else the default value that is loaded into validator from the config will be
+ * returned.
+ * </p>
  */
 public class SystemOptionManager extends BaseOptionManager implements AutoCloseable {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SystemOptionManager.class);
+  private static final Logger logger = LoggerFactory.getLogger(SystemOptionManager.class);
 
   /**
-   * Creates all the OptionDefinitions to be registered with the {@link SystemOptionManager}.
+   * Creates the {@code OptionDefinitions} to be registered with the {@link SystemOptionManager}.
    * @return A map
    */
   public static CaseInsensitiveMap<OptionDefinition> createDefaultOptionDefinitions() {
+    // The deprecation says not to use the option in code. But, for backward
+    // compatibility, we need to keep the old options in the table to avoid
+    // failures if users reference the options. So, ignore deprecation warnings
+    // here.
+    @SuppressWarnings("deprecation")
     final OptionDefinition[] definitions = new OptionDefinition[]{
       new OptionDefinition(PlannerSettings.CONSTANT_FOLDING),
       new OptionDefinition(PlannerSettings.EXCHANGE),
@@ -332,8 +341,8 @@
    * NOTE: CRUD operations must use lowercase keys.
    */
   private PersistentStore<PersistedOptionValue> options;
-  private CaseInsensitiveMap<OptionDefinition> definitions;
-  private CaseInsensitiveMap<OptionValue> defaults;
+  private final CaseInsensitiveMap<OptionDefinition> definitions;
+  private final CaseInsensitiveMap<OptionValue> defaults;
 
   public SystemOptionManager(LogicalPlanPersistence lpPersistence, final PersistentStoreProvider provider,
                              final DrillConfig bootConfig) {
@@ -355,7 +364,6 @@
    *
    * @param bootConfig Drill config
    */
-
   @VisibleForTesting
   public SystemOptionManager(final DrillConfig bootConfig) {
     this.provider = new InMemoryStoreProvider(100);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/PluginConfigWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/PluginConfigWrapper.java
index 9d22393..c19fcc9 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/PluginConfigWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/PluginConfigWrapper.java
@@ -29,9 +29,9 @@
 @XmlRootElement
 public class PluginConfigWrapper {
 
-  private String name;
-  private StoragePluginConfig config;
-  private boolean exists;
+  private final String name;
+  private final StoragePluginConfig config;
+  private final boolean exists;
 
   @JsonCreator
   public PluginConfigWrapper(@JsonProperty("name") String name, @JsonProperty("config") StoragePluginConfig config) {
@@ -53,7 +53,7 @@
   }
 
   public void createOrUpdateInStorage(StoragePluginRegistry storage) throws ExecutionSetupException {
-    storage.createOrUpdate(name, config, true);
+    storage.put(name, config);
   }
 
   public boolean setEnabledInStorage(StoragePluginRegistry storage, boolean enabled) throws ExecutionSetupException {
@@ -70,7 +70,7 @@
 
   public boolean deleteFromStorage(StoragePluginRegistry storage) {
     if (exists) {
-      storage.deletePlugin(name);
+      storage.remove(name);
       return true;
     }
     return false;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StorageResources.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StorageResources.java
index f1ccc2f..b63da59 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StorageResources.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/StorageResources.java
@@ -51,7 +51,6 @@
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.exec.server.rest.DrillRestServer.UserAuthEnabled;
 import org.apache.drill.exec.store.StoragePluginRegistry;
-import org.apache.drill.exec.store.sys.PersistentStore;
 import org.glassfish.jersey.server.mvc.Viewable;
 
 import com.fasterxml.jackson.core.JsonParseException;
@@ -140,10 +139,7 @@
   @Produces(MediaType.APPLICATION_JSON)
   public PluginConfigWrapper getPluginConfig(@PathParam("name") String name) {
     try {
-      PersistentStore<StoragePluginConfig> configStorage = storage.getStore();
-      if (configStorage.contains(name)) {
-        return new PluginConfigWrapper(name, configStorage.get(name));
-      }
+      return new PluginConfigWrapper(name, storage.getConfig(name));
     } catch (Exception e) {
       logger.error("Failure while trying to access storage config: {}", name, e);
     }
@@ -263,7 +259,7 @@
   public List<PluginConfigWrapper> getConfigsFor(@PathParam("group") String pluginGroup) {
     pluginGroup = StringUtils.isNotEmpty(pluginGroup) ? pluginGroup.replace("/", "") : ALL_PLUGINS;
     return StreamSupport.stream(
-        Spliterators.spliteratorUnknownSize(storage.getStore().getAll(), Spliterator.ORDERED), false)
+        Spliterators.spliteratorUnknownSize(storage.storedConfigs().entrySet().iterator(), Spliterator.ORDERED), false)
             .filter(byPluginGroup(pluginGroup))
             .map(entry -> new PluginConfigWrapper(entry.getKey(), entry.getValue()))
             .sorted(PLUGIN_COMPARATOR)
@@ -310,7 +306,7 @@
   @XmlRootElement
   public class JsonResult {
 
-    private String result;
+    private final String result;
 
     public JsonResult(String result) {
       this.result = result;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpConstraintSecurityHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpConstraintSecurityHandler.java
index 99ca966..1807b7b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpConstraintSecurityHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpConstraintSecurityHandler.java
@@ -33,8 +33,9 @@
 import static org.apache.drill.exec.server.rest.auth.DrillUserPrincipal.AUTHENTICATED_ROLE;
 
 /**
- * Accessor class that extends the ConstraintSecurityHandler to expose protected method's for start and stop of Handler.
- * This is needed since now {@link DrillHttpSecurityHandlerProvider} composes of 2 security handlers -
+ * Accessor class that extends the {@link ConstraintSecurityHandler} to expose
+ * protected method's for start and stop of Handler. This is needed since now
+ * {@code DrillHttpSecurityHandlerProvider} composes of 2 security handlers -
  * For FORM and SPNEGO and has responsibility to start/stop of those handlers.
  **/
 public abstract class DrillHttpConstraintSecurityHandler extends ConstraintSecurityHandler {
@@ -68,4 +69,4 @@
   }
 
   public abstract String getImplName();
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpSecurityHandlerProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpSecurityHandlerProvider.java
index e3087ca..36a9863 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpSecurityHandlerProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/DrillHttpSecurityHandlerProvider.java
@@ -33,6 +33,8 @@
 import org.eclipse.jetty.server.Handler;
 import org.eclipse.jetty.server.Request;
 import org.eclipse.jetty.util.security.Constraint;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import javax.servlet.ServletException;
 import javax.servlet.http.HttpServletRequest;
@@ -47,12 +49,12 @@
 
 
 public class DrillHttpSecurityHandlerProvider extends ConstraintSecurityHandler {
-
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(DrillHttpSecurityHandlerProvider.class);
+  private static final Logger logger = LoggerFactory.getLogger(DrillHttpSecurityHandlerProvider.class);
 
   private final Map<String, DrillHttpConstraintSecurityHandler> securityHandlers =
       CaseInsensitiveMap.newHashMapWithExpectedSize(2);
 
+  @SuppressWarnings("unchecked")
   public DrillHttpSecurityHandlerProvider(DrillConfig config, DrillbitContext drillContext)
       throws DrillbitStartupException {
 
@@ -99,7 +101,8 @@
     }
 
     if (securityHandlers.size() == 0) {
-      throw new DrillbitStartupException("Authentication is enabled for WebServer but none of the security mechanism " +
+      throw new DrillbitStartupException(
+          "Authentication is enabled for WebServer but none of the security mechanism " +
           "was configured properly. Please verify the configurations and try again.");
     }
 
@@ -165,6 +168,7 @@
     }
   }
 
+  @Override
   public void doStop() throws Exception {
     super.doStop();
     for (DrillHttpConstraintSecurityHandler securityHandler : securityHandlers.values()) {
@@ -185,7 +189,7 @@
   }
 
   /**
-   * Return's list of configured mechanisms for HTTP authentication. For backward
+   * Returns a list of configured mechanisms for HTTP authentication. For backward
    * compatibility if authentication is enabled it will include FORM mechanism by default.
    * @param config - {@link DrillConfig}
    * @return
@@ -205,4 +209,4 @@
     }
     return configuredMechs;
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/SpnegoSecurityHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/SpnegoSecurityHandler.java
index 9ccfa85..60858af 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/SpnegoSecurityHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/server/rest/auth/SpnegoSecurityHandler.java
@@ -22,7 +22,6 @@
 import org.eclipse.jetty.util.security.Constraint;
 
 public class SpnegoSecurityHandler extends DrillHttpConstraintSecurityHandler {
-  //private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SpnegoSecurityHandler.class);
 
   @Override
   public String getImplName() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java
index 1bbbe76..324c287 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractRecordReader.java
@@ -22,6 +22,8 @@
 import java.util.Map;
 
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableList;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.drill.common.expression.PathSegment;
 import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.exception.OutOfMemoryException;
@@ -32,14 +34,14 @@
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 
 public abstract class AbstractRecordReader implements RecordReader {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractRecordReader.class);
+  private static final Logger logger = LoggerFactory.getLogger(AbstractRecordReader.class);
 
   // For text reader, the default columns to read is "columns[0]".
   protected static final List<SchemaPath> DEFAULT_TEXT_COLS_TO_READ = ImmutableList.of(new SchemaPath(new PathSegment.NameSegment("columns", new PathSegment.ArraySegment(0))));
 
-  private Collection<SchemaPath> columns = null;
-  private boolean isStarQuery = false;
-  private boolean isSkipQuery = false;
+  private Collection<SchemaPath> columns;
+  private boolean isStarQuery;
+  private boolean isSkipQuery;
 
   @Override
   public String toString() {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
index c411683..00c4755 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchema.java
@@ -36,6 +36,8 @@
 import org.apache.drill.exec.store.table.function.TableSignature;
 import org.apache.drill.exec.store.table.function.WithOptionsTableMacro;
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.calcite.linq4j.tree.DefaultExpression;
 import org.apache.calcite.linq4j.tree.Expression;
 import org.apache.calcite.rel.type.RelProtoDataType;
@@ -51,14 +53,15 @@
 import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
 
 public abstract class AbstractSchema implements Schema, SchemaPartitionExplorer, AutoCloseable {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(AbstractSchema.class);
+  static final Logger logger = LoggerFactory.getLogger(AbstractSchema.class);
 
   private static final Expression EXPRESSION = new DefaultExpression(Object.class);
 
   private static final String SCHEMA_PARAMETER_NAME = "schema";
 
   /**
-   * Schema parameter for table function which creates schema provider based on given parameter value.
+   * Schema parameter for table function which creates schema provider based on
+   * given parameter value.
    */
   private static final TableParamDef SCHEMA_PARAMETER = TableParamDef.optional(
     SCHEMA_PARAMETER_NAME, String.class, (drillTable, value) -> {
@@ -134,9 +137,9 @@
   /**
    * The schema can be a top level schema which doesn't have its own tables, but refers
    * to one of the default sub schemas for table look up.
-   *
+   * <p>
    * Default implementation returns itself.
-   *
+   * <p>
    * Ex. "dfs" schema refers to the tables in "default" workspace when querying for
    * tables in "dfs" schema.
    *
@@ -404,5 +407,4 @@
   public boolean areTableNamesCaseSensitive() {
     return true;
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchemaFactory.java
index da5af1d..ab22b76 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/AbstractSchemaFactory.java
@@ -18,7 +18,8 @@
 package org.apache.drill.exec.store;
 
 /**
- * Abstract implementation of {@link SchemaFactory}, ensures that given schema name is always converted is lower case.
+ * Abstract implementation of {@link SchemaFactory}, ensures that given schema
+ * name is always converted is lower case.
  */
 public abstract class AbstractSchemaFactory implements SchemaFactory {
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ClassPathFileSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ClassPathFileSystem.java
index 312aa80..d842d8e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ClassPathFileSystem.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ClassPathFileSystem.java
@@ -30,12 +30,13 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Progressable;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.drill.shaded.guava.com.google.common.io.Resources;
 
 
-public class ClassPathFileSystem extends FileSystem{
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ClassPathFileSystem.class);
+public class ClassPathFileSystem extends FileSystem {
+  static final Logger logger = LoggerFactory.getLogger(ClassPathFileSystem.class);
 
   static final String ERROR_MSG = "ClassPathFileSystem is read only.";
 
@@ -128,8 +129,4 @@
   public void setWorkingDirectory(Path arg0) {
     this.working = arg0;
   }
-
-  public static void main(String[] args) throws Exception{
-    URI uri = new URI("classpath:///");
-  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ClassicConnectorLocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ClassicConnectorLocator.java
new file mode 100644
index 0000000..7f18983
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ClassicConnectorLocator.java
@@ -0,0 +1,302 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.apache.drill.shaded.guava.com.google.common.base.Preconditions.checkNotNull;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.lang.reflect.InvocationTargetException;
+import java.util.Collection;
+import java.util.IdentityHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
+import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Plugin locator for the "classic" class-path method of locating connectors.
+ * Connectors appear somewhere on the class path, and are subclasses of
+ * {@link StoragePlugin}. The connector and its configuration class must
+ * reside in Drill's default class loader.
+ * <p>
+ * Handles "classic" storage plugin classes which ship with Drill, or are
+ * added to Drill's class path.
+ * <p>
+ * The plugin registry supports access from multiple threads concurrently.
+ * This locator is therefore immutable after the {@code init()} call.
+ * The set of plugin instances and configurations (managed by the registry)
+ * changes during a run, but the set of known plugin classes is fixed.
+ * <p>
+ * We sometimes need to add special plugins for testing. Since we cannot
+ * add them on the fly, we must add them at (test) startup time via
+ * config properties.
+ *
+ * <h4>Plugin Implementations</h4>
+ *
+ * This class manages plugin implementation classes (AKA "connectors")
+ * which must derive from {@code StoragePlugin}. Each must be configured
+ * via a class derived from {@code StoragePluginConfig}. No two connectors
+ * can share a configuration class. Each connector must have exactly one
+ * associated configuration. (Actually, a single connector might handle
+ * multiple configurations, but that seems an obscure use case.)
+ * <p>
+ * The constructor of the connector associates the two classes, and must
+ * be of the form:<pre><code>
+ * public SomePlugin(SomePluginConfig config,
+ *                   DrillbitContext context,
+ *                   String pluginName) {</code></pre>
+ * <p>
+ * Classes must be on the class path. Drill often caches the class path:
+ * creating it at build time, then storing it in a file, to be read at
+ * run time. If you are developing a plugin in an IDE, and this class
+ * refuses to find the plugin, you can temporarly force a runtime
+ * class path scan via setting the
+ * {@code ClassPathScanner.IMPLEMENTATIONS_SCAN_CACHE} config property
+ * to {@code false}. The various test "fixtures" provide an easy way
+ * to set config properties per-test.
+ * <p>
+ * This locator ignores four categories of {@code StoragePluginConfig}
+ * classes:
+ * <ul>
+ * <li>Interfaces or abstract classes</li>
+ * <li>Those that do not have the constructor described above.</li>
+ * <li>System plugins with the {@code SystemPlugin} annotation.</li>
+ * <li>Private test plugins with the {@code PrivatePlugin}
+ * annotation.</li>
+ * </ul>
+ *
+ * <h4>Config Properties</h4>
+ *
+ * <dl>
+ * <dt>{@code ExecConstants.PRIVATE_CONNECTORS}
+ * ({@code drill.exec.storage.private_connectors})</dt>
+ * <dd>An optional list of private plugin class names. Private plugins
+ * are valid instances of {@code StoragePlugin} which have the
+ * {@code PrivatePlugin} annotation and so are not automatically
+ * loaded.</dd>
+ * </dl>
+ */
+public class ClassicConnectorLocator implements ConnectorLocator {
+  private static final Logger logger = LoggerFactory.getLogger(ClassicConnectorLocator.class);
+
+  private final PluginRegistryContext context;
+  // Here "plugin" means storage plugin class
+  private final Map<Class<? extends StoragePluginConfig>,
+                    Constructor<? extends StoragePlugin>> availablePlugins = new IdentityHashMap<>();
+  private PluginBootstrapLoader bootstrapLoader;
+
+  public ClassicConnectorLocator(PluginRegistryContext context) {
+    this.context = checkNotNull(context);
+    this.bootstrapLoader = new PluginBootstrapLoaderImpl(context);
+  }
+
+  // TODO: Provide a list of plugin classes to avoid doing
+  // the scan twice.
+  @Override
+  public void init() {
+
+    // Build the list of all available storage plugin class constructors.
+    final Collection<Class<? extends StoragePlugin>> pluginClasses =
+        context.classpathScan().getImplementations(StoragePlugin.class);
+    final String lineBrokenList =
+        pluginClasses.size() == 0
+            ? "" : "\n\t- " + Joiner.on("\n\t- ").join(pluginClasses);
+    logger.debug("Found {} storage plugin configuration classes: {}.",
+        pluginClasses.size(), lineBrokenList);
+    for (Class<? extends StoragePlugin> plugin : pluginClasses) {
+
+      // Skip system and private plugins
+      if (!plugin.isAnnotationPresent(SystemPlugin.class) &&
+          !plugin.isAnnotationPresent(PrivatePlugin.class)) {
+        registerPlugin(plugin);
+      }
+    }
+
+    // Any private connectors to load?
+    // Expected to be in the same class loader as this class.
+    DrillConfig config = context.config();
+    if (config.hasPath(ExecConstants.PRIVATE_CONNECTORS)) {
+      List<String> privateConfigs = config.getStringList(ExecConstants.PRIVATE_CONNECTORS);
+      for (String privateName : privateConfigs) {
+        registerPrivatePlugin(privateName);
+      }
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  private void registerPrivatePlugin(String pluginName) {
+    Class<?> pluginClass;
+    try {
+      ClassLoader cl = getClass().getClassLoader();
+      pluginClass = cl.loadClass(pluginName);
+    } catch (ClassNotFoundException e) {
+      throw new IllegalArgumentException("Private plugin class not found: " + pluginName, e);
+    }
+    if (!StoragePlugin.class.isAssignableFrom(pluginClass)) {
+      throw new IllegalArgumentException("Private plugin class does not extend StoragePlugin: " + pluginName);
+    }
+    if (!registerPlugin((Class<? extends StoragePlugin>) pluginClass)) {
+      throw new IllegalArgumentException("Private plugin class not valid, see logs: " + pluginName);
+    }
+  }
+
+  private boolean registerPlugin(Class<? extends StoragePlugin> plugin) {
+    Map<Class<? extends StoragePluginConfig>, Constructor<? extends StoragePlugin>> ctors = constuctorsFor(plugin);
+    if (ctors.isEmpty()) {
+      logger.debug("Skipping registration of StoragePlugin {} as it doesn't have a constructor with the parameters "
+          + "of (StoragePluginConfig, Config)", plugin.getCanonicalName());
+      return false;
+    } else {
+      for (Entry<Class<? extends StoragePluginConfig>, Constructor<? extends StoragePlugin>> ctor : ctors.entrySet()) {
+        if (availablePlugins.containsKey(ctor.getKey())) {
+          logger.warn(String.format("Two storage plugins cannot use the same config class. " +
+              "Found conflict %s and %s both use %s. Only the first added to registry.",
+              availablePlugins.get(ctor.getKey()).getDeclaringClass().getName(),
+              ctor.getValue().getDeclaringClass().getName(),
+              ctor.getKey().getName()));
+        } else {
+          availablePlugins.put(ctor.getKey(), ctor.getValue());
+        }
+      }
+      return true;
+    }
+  }
+
+  @SuppressWarnings("unchecked")
+  public static Map<Class<? extends StoragePluginConfig>, Constructor<? extends StoragePlugin>>
+      constuctorsFor(Class<? extends StoragePlugin> plugin) {
+    Map<Class<? extends StoragePluginConfig>, Constructor<? extends StoragePlugin>> ctors = new IdentityHashMap<>();
+    for (Constructor<?> c : plugin.getConstructors()) {
+      Class<?>[] params = c.getParameterTypes();
+      if (params.length != 3
+          || !StoragePluginConfig.class.isAssignableFrom(params[0])
+          || params[1] != DrillbitContext.class
+          || params[2] != String.class) {
+        logger.debug("Skipping StoragePlugin constructor {} for plugin class {} since it doesn't implement a "
+            + "constructor(StoragePluginConfig, DrillbitContext, String)", c, plugin);
+        continue;
+      }
+      Class<? extends StoragePluginConfig> configClass = (Class<? extends StoragePluginConfig>) params[0];
+      ctors.put(configClass, (Constructor<? extends StoragePlugin>) c);
+    }
+    return ctors;
+  }
+
+  @Override
+  @VisibleForTesting
+  public Set<Class<? extends StoragePluginConfig>> configClasses() {
+    return availablePlugins.keySet();
+  }
+
+  /**
+   * Classic storage plugins do not provide default configurations.
+   */
+  @Override
+  public StoragePlugin get(String name) {
+    return null;
+  }
+
+  @Override
+  public List<StoragePlugin> intrinsicPlugins() {
+    return null;
+  }
+
+  /**
+   * Read bootstrap storage plugins
+   * {@link ExecConstants#BOOTSTRAP_STORAGE_PLUGINS_FILE} and format plugins
+   * {@link ExecConstants#BOOTSTRAP_FORMAT_PLUGINS_FILE} files for the first
+   * fresh install of Drill.
+   *
+   * @param lpPersistence deserialization mapper provider
+   * @return bootstrap storage plugins
+   * @throws IOException if a read error occurs
+   */
+  @Override
+  public StoragePlugins bootstrapPlugins() throws IOException {
+    return bootstrapLoader.bootstrapPlugins();
+  }
+
+  @Override
+  public StoragePlugins updatedPlugins() {
+    return bootstrapLoader.updatedPlugins();
+  }
+
+  @Override
+  public void onUpgrade() {
+    bootstrapLoader.onUpgrade();
+    bootstrapLoader = null;
+  }
+
+  /**
+   * Creates plugin instance with the given {@code name} and configuration {@code pluginConfig}.
+   * The plugin need to be present in a list of available plugins and be enabled in the configuration
+   *
+   * @param name name of the plugin
+   * @param pluginConfig plugin configuration
+   * @return plugin client or {@code null} if plugin is disabled
+   */
+  @Override
+  public StoragePlugin create(String name, StoragePluginConfig pluginConfig) throws ExecutionSetupException {
+    StoragePlugin plugin;
+    Constructor<? extends StoragePlugin> constructor = availablePlugins.get(pluginConfig.getClass());
+    if (constructor == null) {
+      throw new ExecutionSetupException(String.format("Failure finding StoragePlugin constructor for config %s",
+          pluginConfig.getClass().getName()));
+    }
+    try {
+      plugin = constructor.newInstance(pluginConfig, context.drillbitContext(), name);
+      plugin.start();
+      return plugin;
+    } catch (ReflectiveOperationException | IOException e) {
+      Throwable t = e instanceof InvocationTargetException ? ((InvocationTargetException) e).getTargetException() : e;
+      if (t instanceof ExecutionSetupException) {
+        throw ((ExecutionSetupException) t);
+      }
+      throw new ExecutionSetupException(String.format(
+          "Failure setting up new storage plugin configuration for config %s", pluginConfig), t);
+    }
+  }
+
+  @Override
+  public boolean storable() {
+    return true;
+  }
+
+  @Override
+  public Class<? extends StoragePlugin> connectorClassFor(
+      Class<? extends StoragePluginConfig> configClass) {
+    Constructor<? extends StoragePlugin> constructor =
+        availablePlugins.get(configClass);
+    return constructor == null ? null : constructor.getDeclaringClass();
+  }
+
+  @Override
+  public void close() { }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ConnectorHandle.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ConnectorHandle.java
new file mode 100644
index 0000000..be5045a
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ConnectorHandle.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.store.PluginHandle.PluginType;
+import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
+
+/**
+ * Defines a storage connector: a storage plugin config along with the
+ * locator which can create a plugin instance given an instance of the
+ * config.
+ */
+public class ConnectorHandle {
+
+  private final ConnectorLocator locator;
+  private final Class<? extends StoragePluginConfig> configClass;
+
+  /**
+   * Intrinsic (system) plugins are created at start-up time and do
+   * not allow additional plugin instances.
+   */
+  private final boolean isIntrinsic;
+
+  private ConnectorHandle(ConnectorLocator locator,
+      Class<? extends StoragePluginConfig> configClass) {
+    this.locator = locator;
+    this.configClass = configClass;
+    this.isIntrinsic = false;
+  }
+
+  private ConnectorHandle(ConnectorLocator locator, StoragePlugin plugin) {
+    this.locator = locator;
+    this.configClass = plugin.getConfig().getClass();
+    this.isIntrinsic = true;
+  }
+
+  /**
+   * Construct a handle for a "normal" connector which takes a plugin config
+   * and constructs a plugin instance based on that config.
+   */
+  public static ConnectorHandle configuredConnector(ConnectorLocator locator,
+      Class<? extends StoragePluginConfig> configClass) {
+    return new ConnectorHandle(locator, configClass);
+  }
+
+  /**
+   * Construct a handle for an intrinsic (system) connector which always
+   * uses a single config: the one created along with the plugin instance
+   * itself.
+   */
+  public static ConnectorHandle intrinsicConnector(ConnectorLocator locator,
+      StoragePlugin plugin) {
+    return new ConnectorHandle(locator, plugin);
+  }
+
+  /**
+   * An intrinsic connector is one defined for the life of the Drillbit.
+   * It cannot be configured, changed or removed. It is not stored.
+   */
+  public boolean isIntrinsic() { return isIntrinsic; }
+  public boolean isStored() { return ! isIntrinsic() && locator.storable(); }
+  public ConnectorLocator locator() { return locator; }
+
+  public Class<? extends StoragePluginConfig> configClass() {
+    return configClass;
+  }
+
+  public Class<? extends StoragePlugin> connectorClass() {
+    return locator.connectorClassFor(configClass);
+  }
+
+  public PluginHandle pluginEntryFor(String name, StoragePluginConfig config, PluginType type) {
+    Preconditions.checkArgument(configClass.isInstance(config));
+    Preconditions.checkArgument(type != PluginType.INTRINSIC || isIntrinsic());
+    return new PluginHandle(name, config, this, type);
+  }
+
+  public StoragePlugin newInstance(String name, StoragePluginConfig config) throws Exception {
+    Preconditions.checkNotNull(name);
+    Preconditions.checkNotNull(config);
+    Preconditions.checkArgument(configClass.isInstance(config));
+    return locator.create(name, config);
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ConnectorLocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ConnectorLocator.java
new file mode 100644
index 0000000..ac6723d
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ConnectorLocator.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import java.io.IOException;
+import java.util.Collection;
+import java.util.Set;
+
+import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+
+/**
+ * Locates storage plugins. Allows multiple ways of finding plugins.
+ * <p>
+ * Terminology is a bit tortured. Drill overuses the term "plugin."
+ * Here we adopt the following conventions:
+ * <dl>
+ * <dt>(storage) plugin</dt>
+ * <dd>The user-visible concept of a storage plugin: a configuration
+ * with our without its corresponding connector instance. Either an
+ * instance of a storage plugin config class, or the
+ * same information serialized as JSON. Also, confusingly, "plugin"
+ * can also mean the config wrapped in an instance of the connector.
+ * </dd>
+ * <dl>
+ * <p>
+ * Connectors can be storable in the persistent store. All "normal"
+ * connectors have storable configurations. System plugins, however
+ * have a fixed config that is not storable.
+ * <p>
+ * Connectors can define bootstrap or upgrade plugin sets. "Normal"
+ * plugins usually provide bootstrap configs, system plugins do not.
+ * <p>
+ * This class instantiates a connector given a configuration and a
+ * name. The plugin registry caches the instance for the duration of
+ * the Drillbit run, or until the config changes.
+ */
+// TODO: Sort out error handling. Some method throw IOException, some
+// throw unchecked exceptions.
+public interface ConnectorLocator {
+
+  /**
+   * Initialize the locator. Must be called before the locator is used.
+   */
+  void init();
+
+  /**
+   * When starting a new installation, called to load bootstrap
+   * plugins (configurations) that come "out-of-the-box."
+   *
+   * @return the set of bootstrap plugins, or {@code null} if this locator
+   * does not provide bootstrap plugins
+   * @throws IOException
+   */
+  StoragePlugins bootstrapPlugins() throws IOException;
+
+  /**
+   * Identify plugins to be added to an existing system, typically
+   * on the first run after an upgrade.
+   * <p>
+   * TODO: The current mechanism depends on deleting a file after the
+   * first run, which is unreliable. It won't, for example, correctly
+   * handle a restored ZK. A better mechanism would store a version
+   * number in the persistent store, and pass that version number into
+   * this method.
+   *
+   * @return the set of plugin configurations to refresh in the
+   * persistent store, or null if none to update
+   * @throws IOException for errors
+   */
+  StoragePlugins updatedPlugins();
+
+  /**
+   * If {@code updatedPlugins()} returned non-null, then the
+   * registry will call this method after successful update of
+   * the persistent store. This method can do any post-update
+   * cleanup, such as deleting the file mentioned above.
+   */
+  void onUpgrade();
+
+  /**
+   * Enumerate the intrinsic plugins. An intrinsic plugin is one
+   * which takes no configuration and which therefore cannot be
+   * disabled, and thus is always available. Example: Drill's
+   * system plugins. For an intrinsic plugin, the plugin name is
+   * also the name of the configuration.
+   *
+   * @return map of intrinsic plugins which require no configuration
+   */
+  Collection<StoragePlugin> intrinsicPlugins();
+
+  /**
+   * Retrieve an instance of the named connector with default configuration.
+   * Typically used for connectors with no configuration, such as system
+   * storage plugins.
+   *
+   * @param name the name of a <i>connector class</i> (not the name of
+   * a plugin (configuration)
+   * @return a plugin with default configuration, or null if this locator does
+   * not support such plugins
+   */
+  StoragePlugin get(String name);
+
+  /**
+   * Return the set of known storage plugin configuration classes for which
+   * the user can create configs. Excludes system plugin configs. Used
+   * to map config classes to this locator to create plugin instances.
+   *
+   * @return the unuordered set of storage plugin configuration classes
+   * available from this locator. Can be null if this locator offers
+   * only system plugins
+   */
+  Set<Class<? extends StoragePluginConfig>> configClasses();
+
+  /**
+   * Create a connector instance given a named configuration. The configuration
+   * and/or name is used to locate the connector class.
+   *
+   * @param name name of the storage plugin (configuration).
+   * @param pluginConfig the deserialized Java configuration object.
+   * @return a connector of the proper class that matches the configuration or
+   * name, initialized with the configuration
+   * @throws ExecutionSetupException for all errors
+   */
+  StoragePlugin create(String name, StoragePluginConfig pluginConfig) throws Exception;
+
+  /**
+   * @return true if configs for this locator should be persisted, false if
+   * these are ad-hoc or otherwise per-run connectors
+   */
+  boolean storable();
+
+  /**
+   * Given a configuration class, return the corresponding connector
+   * (plugin) class.
+   */
+  Class<? extends StoragePlugin> connectorClassFor(
+      Class<? extends StoragePluginConfig> configClass);
+
+  void close();
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/DistributedStorageEngine.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/DistributedStorageEngine.java
deleted file mode 100644
index ac0d28d..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/DistributedStorageEngine.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store;
-
-public interface DistributedStorageEngine {
-  public String getDfsName();
-}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/DrillSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/DrillSchemaFactory.java
new file mode 100644
index 0000000..40e67c4
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/DrillSchemaFactory.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.concurrent.TimeUnit;
+
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DrillSchemaFactory extends AbstractSchemaFactory {
+  private static final Logger logger = LoggerFactory.getLogger(DrillSchemaFactory.class);
+
+  private final StoragePluginRegistryImpl registry;
+
+  public DrillSchemaFactory(String name, StoragePluginRegistryImpl registry) {
+    super(name);
+    this.registry = registry;
+  }
+
+  @Override
+  public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) throws IOException {
+    Stopwatch watch = Stopwatch.createStarted();
+    registry.registerSchemas(schemaConfig, parent);
+
+    // Add second level schema as top level schema with name qualified with parent schema name
+    // Ex: "dfs" schema has "default" and "tmp" as sub schemas. Add following extra schemas "dfs.default" and
+    // "dfs.tmp" under root schema.
+    //
+    // Before change, schema tree looks like below:
+    // "root"
+    // -- "dfs"
+    // -- "default"
+    // -- "tmp"
+    // -- "hive"
+    // -- "default"
+    // -- "hivedb1"
+    //
+    // After the change, the schema tree looks like below:
+    // "root"
+    // -- "dfs"
+    // -- "default"
+    // -- "tmp"
+    // -- "dfs.default"
+    // -- "dfs.tmp"
+    // -- "hive"
+    // -- "default"
+    // -- "hivedb1"
+    // -- "hive.default"
+    // -- "hive.hivedb1"
+    List<SchemaPlus> secondLevelSchemas = new ArrayList<>();
+    for (String firstLevelSchemaName : parent.getSubSchemaNames()) {
+      SchemaPlus firstLevelSchema = parent.getSubSchema(firstLevelSchemaName);
+      for (String secondLevelSchemaName : firstLevelSchema.getSubSchemaNames()) {
+        secondLevelSchemas.add(firstLevelSchema.getSubSchema(secondLevelSchemaName));
+      }
+    }
+
+    for (SchemaPlus schema : secondLevelSchemas) {
+      AbstractSchema drillSchema;
+      try {
+        drillSchema = schema.unwrap(AbstractSchema.class);
+      } catch (ClassCastException e) {
+        throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
+      }
+      SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
+      parent.add(wrapper.getName(), wrapper);
+    }
+
+    logger.debug("Took {} ms to register schemas.", watch.elapsed(TimeUnit.MILLISECONDS));
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/DrillbitPluginRegistryContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/DrillbitPluginRegistryContext.java
new file mode 100644
index 0000000..5a5187a
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/DrillbitPluginRegistryContext.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.jasonclawson.jackson.dataformat.hocon.HoconFactory;
+
+/**
+ * Implementation of the storage registry context which obtains the
+ * needed resources from the {@code DrillbitContext}.
+ */
+public class DrillbitPluginRegistryContext implements PluginRegistryContext {
+
+  private final DrillbitContext drillbitContext;
+  private final ObjectMapper mapper;
+
+  public DrillbitPluginRegistryContext(DrillbitContext drillbitContext) {
+    this.drillbitContext = drillbitContext;
+
+    // Specialized form of the persistence mechanism
+    // to handle HOCON format in the override file
+    LogicalPlanPersistence persistence = new LogicalPlanPersistence(drillbitContext.getConfig(),
+        drillbitContext.getClasspathScan(),
+        new ObjectMapper(new HoconFactory()));
+    mapper = persistence.getMapper();
+  }
+
+  @Override
+  public DrillConfig config() {
+    return drillbitContext.getConfig();
+  }
+
+  @Override
+  public ObjectMapper mapper() {
+    return mapper;
+  }
+
+  @Override
+  public ScanResult classpathScan() {
+    return drillbitContext.getClasspathScan();
+  }
+
+  @Override
+  public DrillbitContext drillbitContext() {
+    return drillbitContext;
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java
index 617d409..c086a36 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/LocalSyncableFileSystem.java
@@ -32,6 +32,7 @@
 import java.nio.ByteBuffer;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.drill.common.exceptions.UserException;
 import org.apache.hadoop.fs.ByteBufferReadable;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
@@ -43,11 +44,14 @@
 import org.apache.hadoop.fs.Syncable;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.util.Progressable;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
- * This class provides a Syncable local extension of the hadoop FileSystem
+ * A syncable local extension of the Hadoop FileSystem
  */
 public class LocalSyncableFileSystem extends FileSystem {
+  private static final Logger logger = LoggerFactory.getLogger(LocalSyncableFileSystem.class);
 
   @Override
   public URI getUri() {
@@ -64,8 +68,10 @@
   }
 
   @Override
-  public FSDataOutputStream create(Path path, FsPermission fsPermission, boolean b, int i, short i2, long l, Progressable progressable) throws IOException {
-    return new FSDataOutputStream(new LocalSyncableOutputStream(path), FileSystem.getStatistics(path.toUri().getScheme(), getClass()));
+  public FSDataOutputStream create(Path path, FsPermission fsPermission,
+      boolean b, int i, short i2, long l, Progressable progressable) throws IOException {
+    return new FSDataOutputStream(new LocalSyncableOutputStream(path),
+        FileSystem.getStatistics(path.toUri().getScheme(), getClass()));
   }
 
   @Override
@@ -106,8 +112,7 @@
   }
 
   @Override
-  public void setWorkingDirectory(Path path) {
-  }
+  public void setWorkingDirectory(Path path) { }
 
   @Override
   public Path getWorkingDirectory() {
@@ -126,8 +131,8 @@
   }
 
   public class LocalSyncableOutputStream extends OutputStream implements Syncable {
-    private FileOutputStream fos;
-    private BufferedOutputStream output;
+    private final FileOutputStream fos;
+    private final BufferedOutputStream output;
 
     public LocalSyncableOutputStream(Path path) throws FileNotFoundException {
       File dir = new File(path.getParent().toString());
@@ -166,12 +171,14 @@
   public class LocalInputStream extends InputStream implements Seekable, PositionedReadable, ByteBufferReadable {
 
     private BufferedInputStream input;
-    private String path;
-    private long position = 0;
+    private final String path;
+    private long position;
 
+    @SuppressWarnings("resource")
     public LocalInputStream(Path path)  throws IOException {
       this.path = path.toString();
-      input = new BufferedInputStream(new FileInputStream(new RandomAccessFile(this.path, "r").getFD()), 1024*1024);
+      this.input = new BufferedInputStream(new FileInputStream(
+          new RandomAccessFile(this.path, "r").getFD()), 1024*1024);
     }
 
     @Override
@@ -192,6 +199,7 @@
     @Override
     public void seek(long l) throws IOException {
       input.close();
+      @SuppressWarnings("resource")
       RandomAccessFile raf = new RandomAccessFile(path, "r");
       raf.seek(l);
       input = new BufferedInputStream(new FileInputStream(raf.getFD()), 1024*1024);
@@ -208,8 +216,6 @@
       throw new IOException("seekToNewSource not supported");
     }
 
-
-
     @Override
     public int read(ByteBuffer buf) throws IOException {
       buf.reset();
@@ -224,10 +230,8 @@
         buf.put(b);
         return read;
       }
-
     }
 
-
     @Override
     public int read(byte[] b) throws IOException {
       return input.read(b);
@@ -243,7 +247,19 @@
       byte[] b = new byte[1];
       input.read(b);
       position++;
-      return (int) b[0] & 0xFF;
+      return b[0] & 0xFF;
+    }
+
+    @Override
+    public void close() {
+      try {
+        input.close();
+      } catch (IOException e) {
+        throw UserException.dataWriteError(e)
+          .addContext(
+              "Failed to close local syncable file system %s, possible data loss.", path)
+          .build(logger);
+      }
     }
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/NamedStoragePluginConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/NamedStoragePluginConfig.java
deleted file mode 100644
index 41db97b..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/NamedStoragePluginConfig.java
+++ /dev/null
@@ -1,69 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store;
-
-import com.fasterxml.jackson.annotation.JsonCreator;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import org.apache.drill.common.logical.StoragePluginConfig;
-
-import com.fasterxml.jackson.annotation.JsonTypeName;
-
-@JsonTypeName(NamedStoragePluginConfig.NAME)
-public class NamedStoragePluginConfig extends StoragePluginConfig {
-
-  public static final String NAME = "named";
-
-  private final String name;
-
-  @JsonCreator
-  public NamedStoragePluginConfig(@JsonProperty("name") String name) {
-    this.name = name;
-  }
-
-  public String getName() {
-    return name;
-  }
-
-  @Override
-  public boolean equals(Object obj) {
-    if (this == obj) {
-      return true;
-    }
-    if (obj == null) {
-      return false;
-    }
-    if (getClass() != obj.getClass()) {
-      return false;
-    }
-    NamedStoragePluginConfig other = (NamedStoragePluginConfig) obj;
-    if (name == null) {
-      return other.name == null;
-    } else {
-      return name.equals(other.name);
-    }
-  }
-
-  @Override
-  public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result = prime * result + (name == null ? 0 : name.hashCode());
-    return result;
-  }
-
-}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PartitionNotFoundException.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PartitionNotFoundException.java
index 06a0848..71be1bc 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PartitionNotFoundException.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PartitionNotFoundException.java
@@ -17,6 +17,7 @@
  */
 package org.apache.drill.exec.store;
 
+@SuppressWarnings("serial")
 public class PartitionNotFoundException extends Exception {
 
   public PartitionNotFoundException() { }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginBootstrapLoader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginBootstrapLoader.java
new file mode 100644
index 0000000..aa72bc3
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginBootstrapLoader.java
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import java.io.IOException;
+
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+
+/**
+ * Generalized interface for bootstraping or upgrading the plugin persistent
+ * store. Provides the list of bootstrap plugins, along with those to be
+ * upgraded. Provides confirmation that the upgrade was performed to avoid
+ * doing it again.
+ * <p>
+ * <b>Caveats:</b>: The upgrade mechanism is rather ad-hoc; it actually needs
+ * a version number to be solid, but the persistent store does not currently
+ * provide a version number. See DRILL-7613.
+ */
+public interface PluginBootstrapLoader {
+
+  /**
+   * Drill has detected that this is a new installation. Provide
+   * the list of storage plugins (that is, names and configs)
+   * to use to populate the storage plugin store.
+   */
+  StoragePlugins bootstrapPlugins() throws IOException;
+
+  /**
+   * Drill has detected that on restart, the persistent storage
+   * plugin exists. Return any changes that should be applied.
+   * <p>
+   * Note: this functionality is crude, there is no version, all
+   * Drillbits in a cluster will race to do the upgrade.
+   * Caveat emptor. See DRILL-7613.
+   */
+  StoragePlugins updatedPlugins();
+
+  /**
+   * Drill successfully applied the plugin upgrades from
+   * {@link #updatedPlugins()}. Use this event to mark this
+   * version as having been upgraded. (Again, this is crude and
+   * may not actually work. See DRILL-7613.)
+   */
+  void onUpgrade();
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginBootstrapLoaderImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginBootstrapLoaderImpl.java
new file mode 100644
index 0000000..aebe45e
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginBootstrapLoaderImpl.java
@@ -0,0 +1,337 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Optional;
+import java.util.Set;
+
+import org.apache.drill.common.config.ConfigConstants;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.store.dfs.FileSystemConfig;
+import org.apache.drill.exec.util.ActionOnFile;
+import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
+import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
+import org.apache.drill.shaded.guava.com.google.common.io.Resources;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Loads the set of bootstrap plugin configurations for new systems.
+ * Also handles upgrades to new and existing systems. Each set is given
+ * by a Jackson-serialized JSON file with the list of plugin (configurations).
+ * <p>
+ * The files are given via indirections in the config system to allow easier
+ * testing when "non-stock" configurations.
+ *
+ * <h4>Bootstrap Files</h4>
+ *
+ * When a new system starts (boots), Drill populates the persistent store
+ * with a "starter" (bootstrap) set of storage plugin configurations as
+ * provided by each connector locator. The classic locator uses one or
+ * more bootstrap files, serialized to JSON, and visible at the root of the
+ * class path. Each plugin implementation, if packaged in its own project,
+ * can provide a bootstrap file ({@code bootstrap-storage-plugins.json}).
+ * <p>
+ * The core plugins share a single file in the exec module which has a list of
+ * storage plugins used as default on fresh start up. Then we have contrib module
+ * where some plugins and formats reside. We want them to be included in default
+ * plugins as well during fresh start up which is done by reading each bootstrap
+ * file in the classpath.
+ *
+ * <h4>Format Plugin Bootstrap</h4>
+ *
+ * All works fine for the plugins, since each plugin also has a
+ * {@code bootstrap-storage-plugins.json} loaded via this process. However, format
+ * plugins are defined <i>inside</i> storage plugins. This presents a problem when
+ * the format is defined outside of the core (such as in the contrib module). In
+ * this case, when new format was added, the developer had to modify the
+ * {@code bootstrap-storage-plugins.json} in exec module to ensure that the new format
+ * was included.
+ * <p>
+ * The {@code bootstrap-format-plugins.json} solves this problem: it allows non-core
+ * modules to include formats during bootstrap. The file must still indicate which
+ * plugin (config) to modify, and must list the same format for each supported file
+ * system plugin such as {@code dfs} and {@ cp}. Despite this limitation, the additional
+ * file makes the non-core format plugins independent of the core files.
+ * <p>
+ * The resulting bootstrap process is to 1) gather the storage plugins, and 2)
+ * merge any format bootstrap into these plugins.
+ *
+ * <h4>Upgrade Files</h4>
+ *
+ * Bootstrap files populate a new system. There are times when a user upgrades
+ * their Drill install and we would like to push out configurations for newly
+ * added plugins. This is done via the optional upgrade file.
+ * <p>
+ * If the system is new, configurations from the upgrade file replace
+ * (override) values from the bootstrap file. The net effect, bootstrap plus
+ * upgrades, are written into the persistent store.
+ * <p>
+ * If the system is upgraded, then the current technique uses a highly
+ * unreliable system: an upgrade file exists. Any upgrades are applied on top
+ * of the user's stored values. The upgrade file is then deleted to avoid
+ * applying the same upgrades multiple times. Not idea, but it is what it
+ * is for now.
+ *
+ * <h4>Format Plugins</h4>
+ *
+ * Earlier versions of this code provided support for format plugins. However,
+ * this code never worked. The original design of Drill has format plugins
+ * as a set of attributes of the file system plugin config. Additional work
+ * is needed to allow file system configs to share plugins. Such work is a
+ * good idea, so the ill-fated format plugin code still appears, but does
+ * nothing.
+ *
+ * <h4>Config Properties</h4>
+ *
+ * <dl>
+ * <dt>{@code ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE}
+ * ({@code drill.exec.storage.bootstrap.storage})</dt>
+ * <dd>The name of the bootstrap file, normally
+ * {@code bootstrap-storage-plugins.json}.</dd>
+ * <dt>{@code ExecConstants.UPGRADE_STORAGE_PLUGINS_FILE}
+ * ({@code drill.exec.storage.upgrade.storage})</dt>
+ * <dd>The name of the upgrade file, normally
+ * {@code storage-plugins-override.conf}. Unlike th bootstrap
+ * file, only one upgrade file can appear in the class path.
+ * The upgrade file is optional.</dd>
+ * </dl>
+ */
+public class PluginBootstrapLoaderImpl implements PluginBootstrapLoader {
+  private static final Logger logger = LoggerFactory.getLogger(PluginBootstrapLoaderImpl.class);
+
+  private final PluginRegistryContext context;
+
+  /**
+   * The file read the first time a Drillbit starts up, and deleted
+   * afterwards. A poor-man's way of handling upgrades. Will be
+   * non-null when an upgrade is needed, null thereafter until a
+   * new release.
+   */
+  private URL pluginsOverrideFileUrl;
+
+  public PluginBootstrapLoaderImpl(PluginRegistryContext context) {
+    this.context = context;
+  }
+
+  @Override
+  public StoragePlugins bootstrapPlugins() throws IOException {
+    Map<String, URL> pluginURLMap = new HashMap<>();
+    StoragePlugins bootstrapPlugins = loadBootstrapPlugins(pluginURLMap);
+
+    // Upgrade the bootstrap plugins with any updates. Seems odd,
+    // but this is how Drill 1.17 works, so keeping the code.
+    // Uses the enabled status from the updates if different than
+    // the bootstrap version.
+    StoragePlugins updatedPlugins = updatedPlugins();
+    if (updatedPlugins != null) {
+      bootstrapPlugins.putAll(updatedPlugins);
+    }
+    applyFormatPlugins(bootstrapPlugins, pluginURLMap);
+    return bootstrapPlugins;
+  }
+
+  @VisibleForTesting
+  protected StoragePlugins loadBootstrapPlugins(Map<String, URL> pluginURLMap) throws IOException {
+    // bootstrap load the config since no plugins are stored.
+    String storageBootstrapFileName = context.config().getString(
+        ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE);
+    Set<URL> storageUrls = ClassPathScanner.forResource(storageBootstrapFileName, false);
+    if (storageUrls == null || storageUrls.isEmpty()) {
+      throw new IOException("Cannot find storage plugin boostrap file: " + storageBootstrapFileName);
+    }
+    logger.info("Loading the storage plugin configs from URLs {}.", storageUrls);
+    StoragePlugins bootstrapPlugins = new StoragePlugins();
+    for (URL url : storageUrls) {
+      try {
+        loadStoragePlugins(url, bootstrapPlugins, pluginURLMap);
+      } catch (IOException e) {
+        throw new IOException("Failed to load bootstrap plugins from " + url.toString(), e );
+      }
+    }
+
+    return bootstrapPlugins;
+  }
+
+  /**
+   * Given a set of bootstrap plugins, updated selected plugins with the list
+   * of format plugins. Since the plugins do not yet reside in the registry,
+   * they can be modified in place without making copies.
+   * <p>
+   * Only one module (core) should have defined the file system plugin
+   * configs. Only one module should define each format plugin. As a result,
+   * order should not matter when applying the format configs.
+   * @throws IOException for load failures
+   */
+  private void applyFormatPlugins(StoragePlugins bootstrapPlugins,
+      Map<String, URL> pluginURLMap) throws IOException {
+    String formatBootstrapFileName = context.config().getString(
+        ExecConstants.BOOTSTRAP_FORMAT_PLUGINS_FILE);
+    Set<URL> formatUrls = ClassPathScanner.forResource(formatBootstrapFileName, false);
+    if (formatUrls == null) {
+      return;
+    }
+    for (URL url : formatUrls) {
+      logger.info("Loading format plugin configs from {}.", url);
+      loadFormatPlugins(url, bootstrapPlugins, pluginURLMap);
+    }
+  }
+
+  /**
+   * Get the new storage plugins from the
+   * {@link ConfigConstants#STORAGE_PLUGINS_OVERRIDE_CONF} file if it exists,
+   * null otherwise
+   */
+  @Override
+  public StoragePlugins updatedPlugins() {
+    String upgradeFileName = context.config().getString(
+        ExecConstants.UPGRADE_STORAGE_PLUGINS_FILE);
+    Set<URL> urlSet = ClassPathScanner.forResource(upgradeFileName, false);
+    if (urlSet.isEmpty()) {
+      logger.trace(
+          "The {} file is absent. Proceed without updating of the storage plugins configs",
+          upgradeFileName);
+      return null;
+    }
+    if (urlSet.size() != 1) {
+      throw DrillRuntimeException.create(
+          "More than one %s file is placed in Drill's classpath: %s",
+          upgradeFileName, urlSet);
+    }
+    pluginsOverrideFileUrl = urlSet.iterator().next();
+    try (InputStream is = pluginsOverrideFileUrl.openStream();) {
+      return context.mapper().readValue(is, StoragePlugins.class);
+    } catch (IOException e) {
+      logger.error("Failures are obtained while loading file: '{}'. Proceeding without update.",
+          upgradeFileName, e);
+      return null;
+    }
+  }
+
+  @Override
+  public void onUpgrade() {
+    if (pluginsOverrideFileUrl == null) {
+      return;
+    }
+    String fileAction = context.config().getString(
+        ExecConstants.ACTION_ON_STORAGE_PLUGINS_OVERRIDE_FILE);
+    Optional<ActionOnFile> actionOnFile = Arrays.stream(ActionOnFile.values())
+        .filter(action -> action.name().equalsIgnoreCase(fileAction))
+        .findFirst();
+    actionOnFile.ifPresent(action -> action.action(pluginsOverrideFileUrl));
+    // TODO: replace with ifPresentOrElse() once the project will be on Java9
+    if (!actionOnFile.isPresent()) {
+      logger.warn("Unknown value {} for {} boot option. Nothing will be done with file.",
+          fileAction, ExecConstants.ACTION_ON_STORAGE_PLUGINS_OVERRIDE_FILE);
+    }
+  }
+
+  /**
+   * Loads storage plugins from the given URL
+   *
+   * @param url
+   *          URL to the storage plugins bootstrap file
+   * @param bootstrapPlugins
+   *          a collection where the plugins should be loaded to
+   * @param pluginURLMap
+   *          a map to store correspondence between storage plugins and
+   *          bootstrap files in which they are defined. Used for logging
+   * @throws IOException
+   *           if failed to retrieve a plugin from a bootstrap file
+   */
+  private void loadStoragePlugins(URL url, StoragePlugins bootstrapPlugins,
+      Map<String, URL> pluginURLMap) throws IOException {
+    StoragePlugins plugins = getPluginsFromResource(url);
+    plugins.forEach(plugin -> {
+      StoragePluginConfig oldPluginConfig = bootstrapPlugins.putIfAbsent(plugin.getKey(), plugin.getValue());
+      if (oldPluginConfig != null) {
+        logger.warn("Duplicate plugin instance '[{}]' defined in [{}, {}], ignoring the later one.",
+            plugin.getKey(), pluginURLMap.get(plugin.getKey()), url);
+      } else {
+        pluginURLMap.put(plugin.getKey(), url);
+      }
+    });
+  }
+
+  /**
+   * Loads format plugins from the given URL and adds the formats to the
+   * specified storage plugins
+   *
+   * @param url
+   *          URL to the format plugins bootstrap file
+   * @param bootstrapPlugins
+   *          a collection with loaded storage plugins. New formats will be
+   *          added to them
+   * @param pluginURLMap
+   *          a map to store correspondence between storage plugins and
+   *          bootstrap files in which they are defined. Used for logging
+   * @throws IOException
+   *           if failed to retrieve a plugin from a bootstrap file
+   */
+  private void loadFormatPlugins(URL url, StoragePlugins bootstrapPlugins,
+      Map<String, URL> pluginURLMap) throws IOException {
+    StoragePlugins plugins = getPluginsFromResource(url);
+    for (Entry<String, StoragePluginConfig> sourceEntry : plugins) {
+      String pluginName = sourceEntry.getKey();
+      StoragePluginConfig sourcePlugin = sourceEntry.getValue();
+      if (!(sourcePlugin instanceof FileSystemConfig)) {
+        logger.warn("Formats are only supported by File System plugins. Source name '{}' is of type '{}'.",
+            pluginName, sourcePlugin.getClass().getName());
+        continue;
+      }
+      StoragePluginConfig targetPlugin = bootstrapPlugins.getConfig(pluginName);
+      if (targetPlugin == null) {
+        logger.warn("No boostrap storage plugin matches the name '{}'", pluginName);
+        continue;
+      }
+      if (!(targetPlugin instanceof FileSystemConfig)) {
+        logger.warn("Formats are only supported by File System plugins. Source name '{}' " +
+            "is of type '{}' but the bootstrap plugin of that name is of type '{}.",
+            pluginName, sourcePlugin.getClass().getName(),
+            targetPlugin.getClass().getName());
+        continue;
+      }
+      FileSystemConfig targetFsConfig = (FileSystemConfig) targetPlugin;
+      FileSystemConfig sourceFsConfig = (FileSystemConfig) sourcePlugin;
+      sourceFsConfig.getFormats().forEach((formatName, formatValue) -> {
+        FormatPluginConfig oldPluginConfig = targetFsConfig.getFormats().putIfAbsent(formatName, formatValue);
+        if (oldPluginConfig != null) {
+          logger.warn("Duplicate format instance '{}' defined in '{}' and '{}', ignoring the later one.",
+              formatName, pluginURLMap.get(pluginName), url);
+        }
+      });
+    }
+  }
+
+  private StoragePlugins getPluginsFromResource(URL resource) throws IOException {
+    String pluginsData = Resources.toString(resource, Charsets.UTF_8);
+    return context.mapper().readValue(pluginsData, StoragePlugins.class);
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginHandle.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginHandle.java
new file mode 100644
index 0000000..4a80e66
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginHandle.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Represents a storage plugin, defined by a (name, config) pair. The config
+ * implies a connector definition, including a way to create the plugin
+ * instance ({@code StoragePlugin}. Storage plugins are created lazily to avoid
+ * long Drillbit start times. Plugin creation is synchronized as is closing.
+ * <p>
+ * A handle has a type used to determine which operations are allowed on
+ * the handle. For example, inrinsic (system) plugins cannot be deleted or
+ * disabled.
+ *
+ * <h4>Caveats</h4>
+ *
+ * Note that race conditions are still possible:
+ * <ul>
+ * <li>User 1 submits a query that refers to plugin p. The registry creates
+ * an instance of plugin p and returns it. The planner proceeds to use it.</li>
+ * <li>User 2 disables p, causing its entry to be updated in persistent storage
+ * (but not yet in this entry)</li>
+ * <li>User 3 submits a query that refers to plugin p. The registry notices that
+ * the plugin is now disabled, removes it from the plugin list and closes the
+ * plugin.</li>
+ * <li>User 1 suffers a failure when the planner references the now-closed
+ * plugin p.</li>
+ * </ul>
+ * This issue has existed for some time and cannot be fixed here. The right
+ * solution is to introduce a reference count: each query which uses the plugin
+ * should hold a reference count until the completion of the plan or fragment.
+ * If we implement such a reference count, this is the place to maintain the count,
+ * and to close the plugin when the count goes to zero.
+ */
+public class PluginHandle {
+  private static final Logger logger = LoggerFactory.getLogger(PluginHandle.class);
+
+  enum PluginType {
+
+    /**
+     * System or similar plugin that uses a single config
+     * created along with the plugin implementation instance
+     * at Drillbit startup time. Not stored in the persistent
+     * store. The handle always contains an instance of the
+     * plugin which lives for the lifetime of the Drillbit.
+     */
+    INTRINSIC,
+
+    /**
+     * Normal plugin defined by a (name, config) pair, stored in
+     * persistent storage. Plugin instance created on demand on
+     * first use (not first access, since some accesses only need
+     * the name and config.) Lifetime is the lifetime of the Drillbit,
+     * or sooner if the plugin config is changed, in which chase the
+     * plugin becomes {@code EPHEMERAL.}
+     */
+    STORED,
+
+    /**
+     * Plugin which was either a) {@code STORED} at some point, but
+     * the user then changed the config so that the old config (plus
+     * its plugin) became {@code EPHEMERAL}, or b) created in response
+     * to a table function that defined an ad-hoc, single-query plugin.
+     * In either case, the config defines a plugin shared across fragments
+     * when running a query. Ephemeral plugins eventually timeout. Or,
+     * if the user changes a config back to match an ephemeral plugin,
+     * the plugin moves back to the {@code STORED} state.
+     */
+    EPHEMERAL }
+
+  private final String name;
+  private final StoragePluginConfig config;
+  private final ConnectorHandle connector;
+  private final PluginType type;
+  private StoragePlugin plugin;
+
+  public PluginHandle(String name, StoragePluginConfig config,
+      ConnectorHandle connector) {
+    this.name = name;
+    this.config = config;
+    this.connector = connector;
+    this.type = connector.isIntrinsic() ? PluginType.INTRINSIC : PluginType.STORED;
+  }
+
+  public PluginHandle(String name, StoragePluginConfig config,
+      ConnectorHandle connector, PluginType type) {
+    this.name = name;
+    this.config = config;
+    this.connector = connector;
+    this.type = type;
+  }
+
+  public PluginHandle(StoragePlugin plugin, ConnectorHandle connector, PluginType type) {
+    this.name = plugin.getName();
+    this.config = plugin.getConfig();
+    this.connector = connector;
+    this.plugin = plugin;
+    this.type = type;
+  }
+
+  public String name() { return name; }
+  public StoragePluginConfig config() { return config; }
+  public boolean isStored() { return type == PluginType.STORED; }
+  public boolean isIntrinsic() { return type == PluginType.INTRINSIC; }
+
+  /**
+   * Retrieve the storage plugin instance, creating it if needed. Creation can take
+   * time if the plugin creates a connection to another system, especially if that system
+   * suffers timeouts.
+   *
+   * @param context the context to use for creating a new instance, if needed
+   * @return the initialized storage plugin
+   * @throws UserException if the storage plugin creation failed due to class errors
+   * (unlikely) or external system errors (more likely)
+   */
+  public synchronized StoragePlugin plugin() {
+    if (plugin != null) {
+      return plugin;
+    }
+    logger.info("Creating storage plugin for {}", name);
+    try {
+      plugin = connector.newInstance(name, config);
+    } catch (Exception e) {
+      throw UserException.internalError(e)
+        .addContext("Plugin name", name)
+        .addContext("Plugin class", connector.connectorClass().getName())
+        .build(logger);
+    }
+    try {
+      plugin.start();
+    } catch (UserException e) {
+      plugin = null;
+      throw e;
+    } catch (Exception e) {
+      plugin = null;
+      throw UserException.dataReadError(e)
+        .addContext("Failed to start storage plugin")
+        .addContext("Plugin name", name)
+        .addContext("Plugin class", connector.connectorClass().getName())
+        .build(logger);
+    }
+    return plugin;
+  }
+
+  @VisibleForTesting
+  public synchronized boolean hasInstance() { return plugin != null; }
+
+  /**
+   * Close the plugin. Can occur when the handle is evicted from the loading
+   * cache where we must not throw an exception. Also called on shutdown.
+   */
+  public synchronized void close() {
+    try {
+      if (plugin != null) {
+        plugin.close();
+      }
+    } catch (Exception e) {
+      logger.warn("Exception while shutting down storage plugin: {}",
+          name == null ? "ephemeral" : config.getClass().getSimpleName(), e);
+    } finally {
+      plugin = null;
+    }
+  }
+
+  /**
+   * Atomically transfer the plugin instance, if any, to a new handle
+   * of the given type. Avoids race conditions when transferring a plugin
+   * from/to ephemeral storage and the plugin cache, since those two
+   * caches are not synchronized as a whole. Ensures that only one of the
+   * threads in a race condition will transfer the actual plugin instance.
+   */
+  public synchronized PluginHandle transfer(PluginType type) {
+    if (plugin == null) {
+      return new PluginHandle(name, config, connector, type);
+    } else {
+      PluginHandle newHandle = new PluginHandle(plugin, connector, type);
+      plugin = null;
+      return newHandle;
+    }
+  }
+
+  @Override
+  public String toString() {
+    StringBuilder buf = new StringBuilder()
+        .append(getClass().getSimpleName())
+        .append("[")
+        .append("name=")
+        .append(name)
+        .append(", config=")
+        .append(config.toString())
+        .append(", provider=")
+        .append(connector.getClass().getSimpleName())
+        .append(", plugin=");
+    if (plugin == null) {
+      buf.append("null");
+    } else {
+      buf.append(plugin.getClass().getSimpleName())
+         .append(" (")
+         .append(System.identityHashCode(plugin) % 1000)
+         .append(")");
+    }
+    return buf.append("]").toString();
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginRegistryContext.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginRegistryContext.java
new file mode 100644
index 0000000..3c94bf3
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PluginRegistryContext.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.server.DrillbitContext;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+/**
+ * Provides a loose coupling of the plugin registry to the resources it needs
+ * from elsewhere. Allows binding the registry via the {@code DrillbitContext}
+ * in production, and to ad-hoc versions in tests.
+ */
+public interface PluginRegistryContext {
+  DrillConfig config();
+  ObjectMapper mapper();
+  ScanResult classpathScan();
+
+  // TODO: Remove this here and from StoragePlugin constructors.
+  // DrillbitContext is too complex and intimate to expose to
+  // extensions
+  DrillbitContext drillbitContext();
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/BatchExceededException.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PrivatePlugin.java
similarity index 65%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/store/BatchExceededException.java
rename to exec/java-exec/src/main/java/org/apache/drill/exec/store/PrivatePlugin.java
index e3119df..6a25570 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/BatchExceededException.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/PrivatePlugin.java
@@ -17,10 +17,17 @@
  */
 package org.apache.drill.exec.store;
 
-import org.apache.drill.common.exceptions.DrillRuntimeException;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
 
-public class BatchExceededException extends DrillRuntimeException {
-    public BatchExceededException(int capacity, int attempted) {
-        super("Batch exceeded in size. Capacity: " + capacity + ", Attempted: " + attempted);
-    }
+/**
+ * Indicates private plugins which will be excluded from automatic plugin
+ * discovery. Used for test classes which should only be used in the tests
+ * that need them.
+ */
+@Retention(RetentionPolicy.RUNTIME)
+@Target({ElementType.TYPE})
+public @interface PrivatePlugin {
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordDataType.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordDataType.java
index 1f5b938..b556467 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordDataType.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/RecordDataType.java
@@ -26,7 +26,7 @@
 import org.apache.calcite.sql.type.SqlTypeName;
 
 /**
- * RecordDataType defines names and data types of columns in a static drill table.
+ * Defines names and data types of columns in a static drill table.
  */
 public abstract class RecordDataType {
 
@@ -66,5 +66,4 @@
     }
     return factory.createStructType(fields, names);
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
index 04e2f08..6e3f44c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ResourceInputStream.java
@@ -23,11 +23,12 @@
 
 import org.apache.hadoop.fs.PositionedReadable;
 import org.apache.hadoop.fs.Seekable;
-
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 
 public class ResourceInputStream extends ByteArrayInputStream implements Seekable, PositionedReadable {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(ResourceInputStream.class);
+  static final Logger logger = LoggerFactory.getLogger(ResourceInputStream.class);
 
   public ResourceInputStream(byte[] bytes) {
     super(bytes);
@@ -41,6 +42,7 @@
     }
   }
 
+  @Override
   public int read(long position, byte b[], int off, int len) {
     Preconditions.checkNotNull(b);
     Preconditions.checkPositionIndexes(off, off + len, b.length);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
index e243f7f..2e46770 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaConfig.java
@@ -25,7 +25,8 @@
 import org.apache.drill.shaded.guava.com.google.common.base.Strings;
 
 /**
- * Contains information needed by {@link org.apache.drill.exec.store.AbstractSchema} implementations.
+ * Contains information needed by
+ * {@link org.apache.drill.exec.store.AbstractSchema} implementations.
  */
 public class SchemaConfig {
   private final String userName;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java
index 4766e96..ea25e05 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaFactory.java
@@ -22,7 +22,7 @@
 import java.io.IOException;
 
 /**
- * StoragePlugins implements this interface to register the schemas they provide.
+ * Storage plugins implements this interface to register the schemas they provide.
  */
 public interface SchemaFactory {
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaPartitionExplorer.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaPartitionExplorer.java
index 28663aa..2530f16 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaPartitionExplorer.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaPartitionExplorer.java
@@ -21,7 +21,7 @@
 
 /**
  * Exposes partition information for a particular schema.
- *
+ * <p>
  * For a more explanation of the current use of this interface see
  * the documentation in {@link PartitionExplorer}.
  */
@@ -32,7 +32,7 @@
    * specified by partition columns and values. Individual storage
    * plugins will assign specific meaning to the parameters and return
    * values.
-   *
+   * <p>
    * For more info see docs in {@link PartitionExplorer}.
    *
    * @param partitionColumns a list of partitions to match
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
index a924fb5..dd00537 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SchemaTreeProvider.java
@@ -33,13 +33,15 @@
 import org.apache.drill.exec.util.ImpersonationUtil;
 
 import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
- * Class which creates new schema trees. It keeps track of newly created schema trees and closes them safely as
- * part of {@link #close()}.
+ * Creates new schema trees. It keeps track of newly created schema trees and
+ * closes them safely as part of {@link #close()}.
  */
 public class SchemaTreeProvider implements AutoCloseable {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(SchemaTreeProvider.class);
+  private static final Logger logger = LoggerFactory.getLogger(SchemaTreeProvider.class);
 
   private final DrillbitContext dContext;
   private final List<SchemaPlus> schemaTreesToClose;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePlugin.java
index 1613537..e589a47 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePlugin.java
@@ -38,16 +38,33 @@
  */
 public interface StoragePlugin extends SchemaFactory, AutoCloseable {
 
-  /** Indicates if Drill can read the table from this format.
+  String getName();
+
+  /**
+   * Initialize the storage plugin. The storage plugin will not be used until this method is called.
+   */
+  void start() throws IOException;
+
+  /**
+   * Indicates if Drill can read the table from this format.
   */
   boolean supportsRead();
 
-  /** Indicates if Drill can write a table to this format (e.g. as JSON, csv, etc.).
+  /**
+   * Indicates if Drill can write a table to this format (e.g. as JSON, csv, etc.).
    */
   boolean supportsWrite();
 
-  /** An implementation of this method will return one or more specialized rules that Drill query
-   *  optimizer can leverage in <i>physical</i> space. Otherwise, it should return an empty set.
+  /**
+   * Method returns a Jackson serializable object that extends a StoragePluginConfig.
+   *
+   * @return an extension of StoragePluginConfig
+  */
+  StoragePluginConfig getConfig();
+
+  /**
+   * An implementation of this method will return one or more specialized rules that Drill query
+   * optimizer can leverage in <i>physical</i> space. Otherwise, it should return an empty set.
    * @return an empty set or a set of plugin specific physical optimizer rules.
    */
   @Deprecated
@@ -70,7 +87,8 @@
    * @param options (optional) session options
    * @return The physical scan operator for the particular GroupScan (read) node.
    */
-  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, SessionOptionManager options) throws IOException;
+  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      SessionOptionManager options) throws IOException;
 
   /**
    * Get the physical scan operator for the particular GroupScan (read) node.
@@ -81,7 +99,8 @@
    * @param providerManager manager for handling metadata providers
    * @return The physical scan operator for the particular GroupScan (read) node.
    */
-  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, SessionOptionManager options, MetadataProviderManager providerManager) throws IOException;
+  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      SessionOptionManager options, MetadataProviderManager providerManager) throws IOException;
 
   /**
    * Get the physical scan operator for the particular GroupScan (read) node.
@@ -91,7 +110,8 @@
    * @param columns (optional) The list of column names to scan from the data source.
    * @return The physical scan operator for the particular GroupScan (read) node.
   */
-  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns) throws IOException;
+  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      List<SchemaPath> columns) throws IOException;
 
   /**
    * Get the physical scan operator for the particular GroupScan (read) node.
@@ -102,7 +122,8 @@
    * @param options (optional) session options
    * @return The physical scan operator for the particular GroupScan (read) node.
    */
-  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns, SessionOptionManager options) throws IOException;
+  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      List<SchemaPath> columns, SessionOptionManager options) throws IOException;
 
   /**
    * Get the physical scan operator for the particular GroupScan (read) node.
@@ -114,19 +135,8 @@
    * @param providerManager manager for handling metadata providers
    * @return The physical scan operator for the particular GroupScan (read) node.
    */
-  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns, SessionOptionManager options, MetadataProviderManager providerManager) throws IOException;
-
-  /**
-   * Method returns a Jackson serializable object that extends a StoragePluginConfig.
-   *
-   * @return an extension of StoragePluginConfig
-  */
-  StoragePluginConfig getConfig();
-
-  /**
-   * Initialize the storage plugin. The storage plugin will not be used until this method is called.
-   */
-  void start() throws IOException;
+  AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      List<SchemaPath> columns, SessionOptionManager options, MetadataProviderManager providerManager) throws IOException;
 
   /**
    * Allows to get the format plugin for current storage plugin based on appropriate format plugin config usage.
@@ -136,6 +146,4 @@
    * @throws UnsupportedOperationException, if storage plugin doesn't support format plugins.
    */
   FormatPlugin getFormatPlugin(FormatPluginConfig config);
-
-  String getName();
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginMap.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginMap.java
index 47f0975..65b1e96 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginMap.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginMap.java
@@ -18,98 +18,217 @@
 package org.apache.drill.exec.store;
 
 import java.util.Collection;
+import java.util.HashMap;
 import java.util.Iterator;
 import java.util.Map;
-import java.util.Map.Entry;
 import java.util.Set;
 
-import org.apache.drill.common.AutoCloseables;
+import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.logical.StoragePluginConfig;
-
-import org.apache.drill.shaded.guava.com.google.common.collect.LinkedListMultimap;
-import org.apache.drill.shaded.guava.com.google.common.collect.Multimap;
-import org.apache.drill.shaded.guava.com.google.common.collect.Multimaps;
 import org.apache.drill.common.map.CaseInsensitiveMap;
+import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 /**
- * Holds maps to storage plugins. Supports name => plugin and config => plugin mappings.
- *
+ * Holds maps to storage plugins. Supports name => plugin and config => plugin
+ * mappings. Name map is case insensitive. Assumes a unique config => plugin
+ * mapping. This map holds only enabled plugins; those which are disabled appear
+ * only in the persistent store.
+ * <p>
+ * The two maps are synchronized by this class, allowing the maps themselves
+ * to not be of the concurrent variety.
+ * <p>
  * This is inspired by ConcurrentMap but provides a secondary key mapping that allows an alternative lookup mechanism.
  * The class is responsible for internally managing consistency between the two maps. This class is threadsafe.
  * Name map is case insensitive.
+ *
+ * <h4>Concurrency</h4>
+ *
+ * All map access is protected to avoid race conditions across the two maps.
+ * Callers should generally remove/replace methods which take the old value
+ * as a form of optimistic concurrency: the change is made only if the value
+ * found in the map is that which is expected.
+ * <p>
+ * Plugin open and close is <b>not</b> done in this map as both operations
+ * could take considerable time and must not hold locks. The caller is
+ * responsible for checking return statuses and performing the needed
+ * close. (The one exception is final close, which is done here.)
  */
-class StoragePluginMap implements Iterable<Entry<String, StoragePlugin>>, AutoCloseable {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StoragePluginMap.class);
+class StoragePluginMap implements Iterable<PluginHandle>, AutoCloseable {
+  private static final Logger logger = LoggerFactory.getLogger(StoragePluginMap.class);
 
-  private final Map<String, StoragePlugin> nameMap = CaseInsensitiveMap.newConcurrentMap();
+  private final Map<String, PluginHandle> nameMap = CaseInsensitiveMap.newHashMap();
+  private final Map<StoragePluginConfig, PluginHandle> configMap = new HashMap<>();
 
-  @SuppressWarnings("unchecked")
-  private final Multimap<StoragePluginConfig, StoragePlugin> configMap =
-      (Multimap<StoragePluginConfig, StoragePlugin>) (Object)
-      Multimaps.synchronizedListMultimap(LinkedListMultimap.create());
-
-  public void putAll(Map<String, StoragePlugin> mapOfPlugins) {
-    for (Entry<String, StoragePlugin> entry : mapOfPlugins.entrySet()) {
-      StoragePlugin plugin = entry.getValue();
-      nameMap.put(entry.getKey(), plugin);
-      // this possibly overwrites items in a map.
-      configMap.put(plugin.getConfig(), plugin);
-    }
-  }
-
-  public boolean replace(String name, StoragePlugin oldPlugin, StoragePlugin newPlugin) {
-    boolean ok = nameMap.replace(name, oldPlugin, newPlugin);
-    if (ok) {
-      configMap.put(newPlugin.getConfig(), newPlugin);
-      configMap.remove(oldPlugin.getConfig(), oldPlugin);
-    }
-
-    return ok;
-  }
-
-  public boolean remove(String name, StoragePlugin oldPlugin) {
-    boolean ok = nameMap.remove(name, oldPlugin);
-    if (ok) {
-      configMap.remove(oldPlugin.getConfig(), oldPlugin);
-    }
-    return ok;
-  }
-
-  public void put(String name, StoragePlugin plugin) {
-    StoragePlugin oldPlugin = nameMap.put(name, plugin);
-    configMap.put(plugin.getConfig(), plugin);
+  /**
+   * Put a plugin. Replaces, and closes, any existing plugin. Safe for putting
+   * the same plugin twice. Also safe for putting a different
+   *
+   * @return the replaced entry, if any, which the caller should close
+   */
+  public synchronized PluginHandle put(PluginHandle plugin) {
+    PluginHandle oldPlugin = nameMap.put(plugin.name(), plugin);
     if (oldPlugin != null) {
-      try {
-        oldPlugin.close();
-      } catch (Exception e) {
-        logger.warn("Failure while closing plugin replaced by injection.", e);
+      if (oldPlugin == plugin || oldPlugin.config().equals(plugin.config())) {
+        return null;
       }
+      if (oldPlugin.isIntrinsic()) {
+        // Put the old one back
+        nameMap.put(oldPlugin.name(), oldPlugin);
+        throw UserException.permissionError()
+          .message("Attempt to replace a system plugin.")
+          .addContext("Plugin name", oldPlugin.name())
+          .addContext("Intrinsic plugin class", oldPlugin.config().getClass().getName())
+          .addContext("Attempted replacement", plugin.config().getClass().getName())
+          .build(logger);
+      }
+      configMap.remove(oldPlugin.config());
     }
-  }
-
-  public StoragePlugin putIfAbsent(String name, StoragePlugin plugin) {
-    StoragePlugin oldPlugin = nameMap.putIfAbsent(name, plugin);
-    if (oldPlugin == null) {
-      configMap.put(plugin.getConfig(), plugin);
-    }
+    configMap.put(plugin.config(), plugin);
     return oldPlugin;
   }
 
-  public StoragePlugin remove(String name) {
-    StoragePlugin plugin = nameMap.remove(name);
-    if (plugin != null) {
-      configMap.remove(plugin.getConfig(), plugin);
+  /**
+   * Put the given plugin, but only if no plugin already exists for the
+   * name.
+   * @param plugin the new plugin
+   * @return the resulting entry, the old one that already existed,
+   * or the new one
+   */
+  public synchronized PluginHandle putIfAbsent(PluginHandle plugin) {
+    PluginHandle oldPlugin = nameMap.putIfAbsent(plugin.name(), plugin);
+    if (oldPlugin != null) {
+      return oldPlugin;
+    } else {
+      configMap.put(plugin.config(), plugin);
+      return plugin;
     }
-    return plugin;
   }
 
-  public StoragePlugin get(String name) {
+  public synchronized PluginHandle get(String name) {
     return nameMap.get(name);
   }
 
+  /**
+   * Retrieve a plugin by config. Configs are compared by value: two instances
+   * with the same values compare as identical (assuming the plugin config
+   * implementation is correct.)
+   */
+  public synchronized PluginHandle get(StoragePluginConfig config) {
+    return configMap.get(config);
+  }
+
+  /**
+   * Replaces one plugin with another, but only if the map contains the old
+   * one.
+   *
+   * @param oldPlugin the expected old plugin to be replaced
+   * @param newPlugin the new plugin to insert
+   * @return true if the new plugin was inserted, false if not because
+   * the old plugin was not found in the map
+   */
+  public synchronized boolean replace(PluginHandle oldPlugin, PluginHandle newPlugin) {
+    Preconditions.checkArgument(oldPlugin != null);
+    Preconditions.checkArgument(newPlugin != null);
+    Preconditions.checkArgument(oldPlugin.name().equalsIgnoreCase(newPlugin.name()));
+    Preconditions.checkArgument(oldPlugin != newPlugin);
+    if (oldPlugin.isIntrinsic()) {
+      throw UserException.permissionError()
+        .message("Attempt to replace a system plugin.")
+        .addContext("Plugin name", oldPlugin.name())
+        .addContext("Intrinsic plugin class", oldPlugin.config().getClass().getName())
+        .addContext("Attempted replacement", newPlugin.config().getClass().getName())
+        .build(logger);
+    }
+    boolean ok = nameMap.replace(oldPlugin.name(), oldPlugin, newPlugin);
+    if (ok) {
+      configMap.remove(oldPlugin.config(), oldPlugin);
+      configMap.put(newPlugin.config(), newPlugin);
+    }
+    return ok;
+  }
+
+  /**
+   * Removes and returns a plugin by name and closes it. This form is not
+   * concurrency-safe: another user could have deleted and recreated the
+   * plugin between the time the current user viewed the plugin and decided
+   * to delete it.
+   *
+   * @return the doomed plugin if the plugin was removed, null if there was
+   * no entry by the given name
+   * @see {@link #remove(PluginHandle)
+   */
+  public synchronized PluginHandle remove(String name) {
+    PluginHandle plugin = get(name);
+    if (plugin == null) {
+      return null;
+    }
+    if (plugin.isIntrinsic()) {
+      throw UserException.permissionError()
+        .message("Attempt to remove a system plugin.")
+        .addContext("Plugin name", plugin.name())
+        .addContext("Intrinsic plugin class", plugin.config().getClass().getName())
+        .build(logger);
+    }
+    nameMap.remove(name);
+    configMap.remove(plugin.config(), plugin);
+    return plugin;
+  }
+
+  /**
+   * Removes the plugin, but only if it is in the map. That is,
+   * resolves the name and removes the plugin only if it resolves
+   * to the given plugin.
+   *
+   * @return true if the plugin was removed and the caller
+   * should close it, false otherwise
+   */
+  public synchronized boolean remove(PluginHandle oldPlugin) {
+    if (oldPlugin.isIntrinsic()) {
+      throw UserException.permissionError()
+        .message("Attempt to remove a system plugin.")
+        .addContext("Plugin name", oldPlugin.name())
+        .addContext("Intrinsic plugin class", oldPlugin.config().getClass().getName())
+        .build(logger);
+    }
+    boolean ok = nameMap.remove(oldPlugin.name(), oldPlugin);
+    if (ok) {
+      configMap.remove(oldPlugin.config(), oldPlugin);
+    }
+    return ok;
+  }
+
+  /**
+   * Given a name and a config (which is presumed to have become disabled),
+   * remove and return any existing plugin. Only matches if the name is found and the
+   * named plugin has the same config as the one to remove to enforce
+   * optimistic concurrency.
+   *
+   * @param name plugin name
+   * @param oldConfig expected config of the doomed plugin
+   * @return true if the plugin was removed and closed, false otherwise
+   */
+  public synchronized PluginHandle remove(String name, StoragePluginConfig oldConfig) {
+    PluginHandle oldEntry = nameMap.get(name);
+    if (oldEntry == null || !oldEntry.config().equals(oldConfig)) {
+      return null;
+    }
+    if (oldEntry.isIntrinsic()) {
+      throw UserException.permissionError()
+        .message("Attempt to remove a system plugin.")
+        .addContext("Plugin name", oldEntry.name())
+        .addContext("Intrinsic plugin class", oldEntry.config().getClass().getName())
+        .build(logger);
+    }
+    nameMap.remove(oldEntry.name());
+    configMap.remove(oldEntry.config());
+    return oldEntry;
+  }
+
   @Override
-  public Iterator<Entry<String, StoragePlugin>> iterator() {
-    return nameMap.entrySet().iterator();
+  public synchronized Iterator<PluginHandle> iterator() {
+    return nameMap.values().iterator();
   }
 
   /**
@@ -117,28 +236,29 @@
    *
    * @return plugin names
    */
-  public Set<String> getNames() {
+  public synchronized Set<String> getNames() {
     return nameMap.keySet();
   }
 
-  public StoragePlugin get(StoragePluginConfig config) {
-    Collection<StoragePlugin> plugins = configMap.get(config);
-    if (plugins == null || plugins.isEmpty()) {
-      return null;
-    } else {
-      // return first one since it doesn't matter which plugin we use for ephemeral purposes (since they are all the
-      // same, they just have different names.
-      return plugins.iterator().next();
-    }
-  }
-
-  public Iterable<StoragePlugin> plugins() {
+  public synchronized Collection<PluginHandle> plugins() {
     return nameMap.values();
   }
 
-  @Override
-  public void close() throws Exception {
-    AutoCloseables.close(configMap.values());
+  public synchronized Set<StoragePluginConfig> configs() {
+    return configMap.keySet();
   }
 
+  // Closes all plugins. Can take some time if plugins are slow to close
+  // (Suffer network timeouts, for example.) Not synchronized as should
+  // only be done during final Drillbit shutdown.
+  @Override
+  public void close() {
+    // Plugin handles do not derive from AutoCloseable. Handles must handle
+    // any errors on close so that things work when the loading cache decides
+    // to evict a plugin. So, we just use a simple per-handle iteration here.
+    plugins().stream()
+      .forEach(e -> e.close());
+    configMap.clear();
+    nameMap.clear();
+  }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginOptimizerRule.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginOptimizerRule.java
index f81c783..0eae15e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginOptimizerRule.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginOptimizerRule.java
@@ -26,5 +26,4 @@
   public StoragePluginOptimizerRule(RelOptRuleOperand operand, String description) {
     super(operand, DrillRelFactories.LOGICAL_BUILDER, description);
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
index 036187a..26f1b7d 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistry.java
@@ -22,40 +22,25 @@
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
-import org.apache.drill.exec.exception.DrillbitStartupException;
 import org.apache.drill.exec.store.dfs.FormatPlugin;
-import org.apache.drill.exec.store.sys.PersistentStore;
 
 public interface StoragePluginRegistry extends Iterable<Map.Entry<String, StoragePlugin>>, AutoCloseable {
-
-  String STORAGE_PLUGIN_REGISTRY_IMPL = "drill.exec.storage.registry";
-  String ACTION_ON_STORAGE_PLUGINS_OVERRIDE_FILE = "drill.exec.storage.action_on_plugins_override_file";
   String PSTORE_NAME = "sys.storage_plugins";
 
   /**
    * Initialize the storage plugin registry. Must be called before the registry is used.
-   *
-   * @throws DrillbitStartupException if drillbit startup fails
    */
-  void init() throws DrillbitStartupException;
+  void init();
 
   /**
-   * Delete a plugin by name
-   *
-   * @param name The name of the storage plugin to delete.
-   */
-  void deletePlugin(String name);
-
-  /**
-   * Create a plugin by name and configuration. If the plugin already exists, update the plugin
+   * Store a plugin by name and configuration. If the plugin already exists, update the plugin
    *
    * @param name The name of the plugin
    * @param config The plugin configuration
-   * @param persist Whether to persist the plugin for later use or treat it as ephemeral.
    * @return The StoragePlugin instance.
    * @throws ExecutionSetupException if plugin cannot be created
    */
-  StoragePlugin createOrUpdate(String name, StoragePluginConfig config, boolean persist) throws ExecutionSetupException;
+  void put(String name, StoragePluginConfig config) throws ExecutionSetupException;
 
   /**
    * Get a plugin by name. Create it based on the PStore saved definition if it doesn't exist.
@@ -76,12 +61,34 @@
   StoragePlugin getPlugin(StoragePluginConfig config) throws ExecutionSetupException;
 
   /**
-   * Add a plugin to the registry using the provided name.
+   * Retrieve a plugin configuration by name.
    *
-   * @param name The name of the plugin
-   * @param plugin The StoragePlugin instance
+   * @param name
+   * @return
    */
-  void addEnabledPlugin(String name, StoragePlugin plugin);
+  StoragePluginConfig getConfig(String name);
+
+  /**
+   * Remove a plugin by name
+   *
+   * @param name The name of the storage plugin to remove
+   */
+  void remove(String name);
+
+  /**
+   * Returns a copy of the set of all stored plugin configurations,
+   * directly from the persistent store.
+   * @return map of stored plugin configurations
+   */
+  Map<String, StoragePluginConfig> storedConfigs();
+
+  /**
+   * Returns a copy of the set of enabled stored plugin configurations.
+   * The registry is refreshed against the persistent store prior
+   * to building the map.
+   * @return map of enabled, stored plugin configurations
+   */
+  Map<String, StoragePluginConfig> enabledConfigs();
 
   /**
    * Get the Format plugin for the FileSystemPlugin associated with the provided storage config and format config.
@@ -94,17 +101,9 @@
   FormatPlugin getFormatPlugin(StoragePluginConfig storageConfig, FormatPluginConfig formatConfig) throws ExecutionSetupException;
 
   /**
-   * Get the PStore for this StoragePluginRegistry. (Used in the management layer.)
-   *
-   * @return PStore for StoragePlugin configuration objects.
-   */
-  PersistentStore<StoragePluginConfig> getStore();
-
-  /**
    * Get the Schema factory associated with this storage plugin registry.
    *
    * @return A SchemaFactory that can register the schemas associated with this plugin registry.
    */
   SchemaFactory getSchemaFactory();
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistryImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistryImpl.java
index 2bf6bc5..58e0cf5 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistryImpl.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginRegistryImpl.java
@@ -17,17 +17,11 @@
  */
 package org.apache.drill.exec.store;
 
-import static org.apache.drill.shaded.guava.com.google.common.base.Preconditions.checkNotNull;
-
 import java.io.IOException;
-import java.lang.reflect.Constructor;
-import java.lang.reflect.InvocationTargetException;
-import java.net.URL;
 import java.util.ArrayList;
 import java.util.Collection;
-import java.util.Collections;
 import java.util.HashMap;
-import java.util.HashSet;
+import java.util.IdentityHashMap;
 import java.util.Iterator;
 import java.util.List;
 import java.util.Map;
@@ -36,180 +30,559 @@
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
 
-import org.apache.drill.exec.store.dfs.FileSystemConfig;
-import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
 import org.apache.calcite.schema.SchemaPlus;
-import org.apache.drill.common.config.LogicalPlanPersistence;
-import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.collections.ImmutableEntry;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
+import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
-import org.apache.drill.common.map.CaseInsensitiveMap;
-import org.apache.drill.common.scanner.ClassPathScanner;
-import org.apache.drill.common.scanner.persistence.AnnotatedClassDescriptor;
-import org.apache.drill.common.scanner.persistence.ScanResult;
-import org.apache.drill.exec.ExecConstants;
-import org.apache.drill.exec.exception.StoreException;
 import org.apache.drill.exec.planner.logical.StoragePlugins;
 import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.PluginHandle.PluginType;
 import org.apache.drill.exec.store.dfs.FormatPlugin;
-import org.apache.drill.exec.store.sys.CaseInsensitivePersistentStore;
-import org.apache.drill.exec.store.sys.PersistentStore;
-import org.apache.drill.exec.store.sys.PersistentStoreConfig;
-
-import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
-import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
-import org.apache.drill.shaded.guava.com.google.common.base.Stopwatch;
 import org.apache.drill.shaded.guava.com.google.common.cache.CacheBuilder;
 import org.apache.drill.shaded.guava.com.google.common.cache.CacheLoader;
 import org.apache.drill.shaded.guava.com.google.common.cache.LoadingCache;
 import org.apache.drill.shaded.guava.com.google.common.cache.RemovalListener;
-import org.apache.drill.shaded.guava.com.google.common.io.Resources;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
+/**
+ * Plugin registry. Caches plugin instances which correspond to configurations
+ * stored in persistent storage. Synchronizes the instances and storage.
+ * <p>
+ * Allows multiple "locators" to provide plugin classes such as the "classic"
+ * version for classes in the same class loader, the "system" version for
+ * system-defined plugins.
+ * <p>
+ * provides multiple layers of abstraction:
+ * <ul>
+ * <li>A plugin config/implementation pair (called a "connector" here)
+ * is located by</li>
+ * <li>A connector locator, which also provides bootstrap plugins and can
+ * create a plugin instance from a configuration, which are cached in</li>
+ * <li>The plugin cache, which holds stored, system and ad-hoc plugins. The
+ * stored plugins are backed by</li>
+ * <li>A persistent store: the file system for tests and embedded, ZK for
+ * a distibuted server, or</li>
+ * <li>An ephemeral cache for unnamed configs, such as those created by
+ * a table function.</li>
+ * </ul>
+ * <p>
+ * The idea is to push most functionality into the above abstractions,
+ * leaving overall coordination here.
+ * <p>
+ * Plugins themselves have multiple levels of definitions:
+ * <ul>
+ * <li>The config and plugin classes, provided by the locator.</li>
+ * <li>The {@link ConnectorHandle} which defines the config class and
+ * the locator which can create instances of that class.</li>
+ * <li>A config instance which is typically deserialized from JSON
+ * independent of the implementation class.</li>
+ * <li>A {@link PluginHandle} which pairs the config with a name as
+ * the unit that the user thinks of as a "plugin." The plugin entry
+ * links to the {@code ConnectorEntry} to create the instance lazily
+ * when first requested.</li>
+ * <li>The plugin class instance, which provides long-term state and
+ * which provides the logic for the plugin.</li>
+ * </ul>
+ *
+ * <h4>Concurrency</h4>
+ *
+ * Drill is a concurrent system; multiple users can attempt to add, remove
+ * and update plugin configurations at the same time. The only good
+ * solution would be to version the plugin configs. Instead, we rely on
+ * the fact that configs change infrequently.
+ * <p>
+ * The code syncs the in-memory cache with the persistent store on each
+ * access (which is actually inefficient and should be reviewed.)
+ * <p>
+ * During refresh, it could be that another thread is doing exactly
+ * the same thing, or even fighting us by changing the config. It is
+ * impossible to ensure a totally consistent answer. The goal is to
+ * make sure that the cache ends up agreeing with the persistent store
+ * as it was at some point in time.
+ * <p>
+ * The {@link PluginsMap} class provides in-memory synchronization of the
+ * name and config maps. Careful coding is needed when handling refresh
+ * since another thread could make the same changes.
+ * <p>
+ * Once the planner obtains a plugin, another user could come along and
+ * change the config for that plugin. Drill treats that change as another
+ * plugin: the original one continues to be used by the planner (but see
+ * below), while new queries use the new version.
+ * <p>
+ * Since the config on remote servers may have changed relative to the one
+ * this Foreman used for planning, the plan includes the plugin config
+ * itself (not just a reference to the config.) This works because the
+ * config is usually small.
+ *
+ * <h4>Ephemeral Plugins</h4>
+ *
+ * An ephemeral plugin handles table functions which create a temporary,
+ * unnamed configuration that is needed only for the execution of a
+ * single query, but which may be used across many threads. If the same
+ * table function is used multiple times, then the same ephemeral plugin
+ * will be used across queries. Ephemeral plugins are are based on the
+ * same connectors as stored plugins, but are not visible to the planner.
+ * They will expire after some time or number.
+ * <p>
+ * The ephemeral store also acts as a graveyard for deleted or changed
+ * plugins. When removing a plugin, the old plugin is moved to ephemeral
+ * storage to allow running queries to locate it. Similarly, when a
+ * new configuration is stored, the corresponding plugin is retrieved
+ * from ephemeral storage, if it exists. This avoids odd cases where
+ * the same plugin exists in both normal and ephemeral storage.
+ *
+ * <h4>Caveats</h4>
+ *
+ * The main problem with synchronization at present is that plugins
+ * provide a {@link close()} method that, if used, could render the
+ * plugin unusable. Suppose a Cassandra plugin, say, maintains a connection
+ * to a server used across multiple queries and threads. Any change to
+ * the config immediately calls {@code close()} on the plugin, even though
+ * it may be in use in planning a query on another thread. Random failures
+ * will result.
+ * <p>
+ * The same issue can affect ephemeral plugins: if the number in the cache
+ * reaches the limit, the registry will start closing old ones, without
+ * knowning if that plugin is actually in use.
+ * <p>
+ * The workaround is to not actually honor the {@code close()} call. Longer
+ * term, a reference count is needed.
+ *
+ * <h4>Error Handling</h4>
+ *
+ * Error handling needs review. Those problems that result from user actions
+ * should be raised as a {@code UserException}. Those that violate invariants
+ * as other forms of exception.
+ */
 public class StoragePluginRegistryImpl implements StoragePluginRegistry {
+  private static final Logger logger = LoggerFactory.getLogger(StoragePluginRegistryImpl.class);
 
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StoragePluginRegistryImpl.class);
+  private final PluginRegistryContext context;
 
-  private final StoragePluginMap enabledPlugins;
+  /**
+   * Cache of enabled, stored plugins, as well as system and ad-hoc
+   * plugins. Plugins live in the cache until Drillbit exit, or
+   * (except for system plugins) explicitly removed.
+   */
+  private final StoragePluginMap pluginCache;
   private final DrillSchemaFactory schemaFactory;
-  private final DrillbitContext context;
-  private final LogicalPlanPersistence lpPersistence;
-  private final ScanResult classpathScan;
-  private final PersistentStore<StoragePluginConfig> pluginSystemTable;
-  private final LoadingCache<StoragePluginConfig, StoragePlugin> ephemeralPlugins;
+  private final StoragePluginStore pluginStore;
 
-  private Map<Object, Constructor<? extends StoragePlugin>> availablePlugins = Collections.emptyMap();
-  private Map<String, StoragePlugin> systemPlugins = Collections.emptyMap();
+  /**
+   * Cache of unnamed plugins typically resulting from table functions.
+   * Ephemeral plugins timeout after some time, or some max number of
+   * plugins.
+   */
+  private final LoadingCache<StoragePluginConfig, PluginHandle> ephemeralPlugins;
+
+  /**
+   * Set of locators which provide connector implementations.
+   */
+  private final List<ConnectorLocator> locators = new ArrayList<>();
+
+  /**
+   * Map of config (as deserialized from the persistent store or UI)
+   * to the connector which can instantiate a connector for that config.
+   */
+  private final Map<Class<? extends StoragePluginConfig>, ConnectorHandle> connectors =
+      new IdentityHashMap<>();
 
   public StoragePluginRegistryImpl(DrillbitContext context) {
-    this.enabledPlugins = new StoragePluginMap();
-    this.schemaFactory = new DrillSchemaFactory(null);
-    this.context = checkNotNull(context);
-    this.lpPersistence = checkNotNull(context.getLpPersistence());
-    this.classpathScan = checkNotNull(context.getClasspathScan());
-    this.pluginSystemTable = initPluginsSystemTable(context, lpPersistence);
+    this.context = new DrillbitPluginRegistryContext(context);
+    this.pluginCache = new StoragePluginMap();
+    this.schemaFactory = new DrillSchemaFactory(null, this);
+    locators.add(new ClassicConnectorLocator(this.context));
+    locators.add(new SystemPluginLocator(this.context));
+    this.pluginStore = new StoragePluginStoreImpl(context);
     this.ephemeralPlugins = CacheBuilder.newBuilder()
         .expireAfterAccess(24, TimeUnit.HOURS)
         .maximumSize(250)
         .removalListener(
-            (RemovalListener<StoragePluginConfig, StoragePlugin>) notification -> closePlugin(notification.getValue()))
-        .build(new CacheLoader<StoragePluginConfig, StoragePlugin>() {
+            (RemovalListener<StoragePluginConfig, PluginHandle>) notification -> notification.getValue().close())
+        .build(new CacheLoader<StoragePluginConfig, PluginHandle>() {
           @Override
-          public StoragePlugin load(StoragePluginConfig config) throws Exception {
-            return create(null, config);
+          public PluginHandle load(StoragePluginConfig config) throws Exception {
+            return createPluginEntry("$$ephemeral$$", config, PluginType.EPHEMERAL);
           }
         });
   }
 
   @Override
   public void init() {
-    availablePlugins = findAvailablePlugins(classpathScan);
-    systemPlugins = initSystemPlugins(classpathScan, context);
+    locators.stream().forEach(loc -> loc.init());
+    loadIntrinsicPlugins();
+    defineConnectors();
+    prepareStore();
+  }
+
+  private void loadIntrinsicPlugins() {
+    for (ConnectorLocator locator : locators) {
+      Collection<StoragePlugin> intrinsicPlugins = locator.intrinsicPlugins();
+      if (intrinsicPlugins == null) {
+        continue;
+      }
+      for (StoragePlugin sysPlugin : intrinsicPlugins) {
+        ConnectorHandle connector = ConnectorHandle.intrinsicConnector(locator, sysPlugin);
+        defineConnector(connector);
+        pluginCache.put(new PluginHandle(sysPlugin, connector, PluginType.INTRINSIC));
+      }
+    }
+  }
+
+  private void defineConnector(ConnectorHandle connector) {
+    ConnectorHandle prev = connectors.put(connector.configClass(), connector);
+    if (prev != null) {
+      String msg = String.format("Two connectors defined for the same config: " +
+          "%s -> %s and %s -> %s",
+          connector.configClass().getName(), connector.locator().getClass().getName(),
+          prev.configClass().getName(), prev.locator().getClass().getName());
+      logger.error(msg);
+      throw new IllegalStateException(msg);
+    }
+  }
+
+  private void defineConnectors() {
+    for (ConnectorLocator locator : locators) {
+      Set<Class<? extends StoragePluginConfig>> nonIntrinsicConfigs = locator.configClasses();
+      if (nonIntrinsicConfigs == null) {
+        continue;
+      }
+      for (Class<? extends StoragePluginConfig> configClass : nonIntrinsicConfigs) {
+        defineConnector(ConnectorHandle.configuredConnector(locator, configClass));
+      }
+    }
+  }
+
+  private void prepareStore() {
+    if (loadEnabledPlugins()) {
+      upgradeStore();
+    } else {
+      initStore();
+    }
+  }
+
+  private void initStore() {
+    logger.info("No storage plugin instances configured in persistent store, loading bootstrap configuration.");
+    StoragePlugins bootstrapPlugins = new StoragePlugins();
     try {
-      StoragePlugins bootstrapPlugins = pluginSystemTable.getAll().hasNext() ? null : loadBootstrapPlugins(lpPersistence);
-
-      StoragePluginsHandler storagePluginsHandler = new StoragePluginsHandlerService(context);
-      storagePluginsHandler.loadPlugins(pluginSystemTable, bootstrapPlugins);
-
-      defineEnabledPlugins();
+      for (ConnectorLocator locator : locators) {
+        StoragePlugins locatorPlugins = locator.bootstrapPlugins();
+        if (locatorPlugins != null) {
+          bootstrapPlugins.putAll(locatorPlugins);
+        }
+      }
     } catch (IOException e) {
-      logger.error("Failure setting up storage enabledPlugins.  Drillbit exiting.", e);
-      throw new IllegalStateException(e);
+      throw new IllegalStateException(
+          "Failure initializing the plugin store. Drillbit exiting.", e);
+    }
+    pluginStore.putAll(bootstrapPlugins);
+    locators.stream().forEach(loc -> loc.onUpgrade());
+  }
+
+  /**
+   * Upgrade an existing persistent plugin config store with
+   * updates available from each locator.
+   */
+  private void upgradeStore() {
+    StoragePlugins upgraded = new StoragePlugins();
+    for (ConnectorLocator locator : locators) {
+      StoragePlugins locatorPlugins = locator.updatedPlugins();
+      if (upgraded != null) {
+        upgraded.putAll(locatorPlugins);
+      }
+    }
+    if (upgraded.isEmpty()) {
+      return;
+    }
+    for (Map.Entry<String, StoragePluginConfig> newPlugin : upgraded) {
+      StoragePluginConfig oldPluginConfig = pluginStore.get(newPlugin.getKey());
+      if (oldPluginConfig != null) {
+        copyPluginStatus(oldPluginConfig, newPlugin.getValue());
+      }
+      pluginStore.put(newPlugin.getKey(), newPlugin.getValue());
     }
   }
 
-  @Override
-  public void deletePlugin(String name) {
-    StoragePlugin plugin = enabledPlugins.remove(name);
-    closePlugin(plugin);
-    pluginSystemTable.delete(name);
+  /**
+   * Identifies the enabled status for new storage plugins
+   * config. If this status is absent in the updater file, the status is kept
+   * from the configs, which are going to be updated
+   *
+   * @param oldPluginConfig
+   *          current storage plugin config from Persistent Store or bootstrap
+   *          config file
+   * @param newPluginConfig
+   *          new storage plugin config
+   */
+  protected static void copyPluginStatus(
+      StoragePluginConfig oldPluginConfig,
+      StoragePluginConfig newPluginConfig) {
+    if (!newPluginConfig.isEnabledStatusPresent()) {
+      boolean newStatus = oldPluginConfig != null && oldPluginConfig.isEnabled();
+      newPluginConfig.setEnabled(newStatus);
+    }
   }
 
-  @Override
-  public StoragePlugin createOrUpdate(String name, StoragePluginConfig config, boolean persist) throws ExecutionSetupException {
-    for (;;) {
-      StoragePlugin oldPlugin = enabledPlugins.get(name);
-      StoragePlugin newPlugin = create(name, config);
-      boolean done = false;
+  /**
+   * Initializes {@link #pluginCache} with currently enabled plugins
+   * defined in the persistent store.
+   *
+   * @return {@code true} if the persistent store contained plugins
+   * (and thus was initialized, and should perhaps be upgraded), or
+   * {@code false} if no plugins were found and this this is a new store
+   * which should be initialized. Avoids the need to check persistent
+   * store contents twice
+   */
+  private boolean loadEnabledPlugins() {
+    Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginStore.load();
+    if (!allPlugins.hasNext()) {
+      // Nothing found, this is (likely) a new install and should be
+      // initialized
+      return false;
+    }
+    while (allPlugins.hasNext()) {
+      Entry<String, StoragePluginConfig> plugin = allPlugins.next();
+      String name = plugin.getKey();
+      StoragePluginConfig config = plugin.getValue();
+      if (! config.isEnabled()) {
+        continue;
+      }
       try {
-        if (oldPlugin != null) {
-          done = newPlugin == null
-              ? enabledPlugins.remove(name, oldPlugin)
-              : enabledPlugins.replace(name, oldPlugin, newPlugin);
-        } else if (newPlugin != null) {
-          done = (null == enabledPlugins.putIfAbsent(name, newPlugin));
-        } else {
-          done = true;
-        }
-      } finally {
-        StoragePlugin pluginToClose = done ? oldPlugin : newPlugin;
-        closePlugin(pluginToClose);
+        pluginCache.put(createPluginEntry(name, config, PluginType.STORED));
+      } catch (Exception e) {
+        logger.error("Failure while setting up StoragePlugin with name: '{}', disabling.", name, e);
+        config.setEnabled(false);
+        pluginStore.put(name, config);
       }
+    }
+    // Found at least one entry, so this is an existing registry.
+    return true;
+  }
 
-      if (done) {
-        if (persist) {
-          pluginSystemTable.put(name, config);
-        }
+  @Override
+  public void put(String name, StoragePluginConfig config) throws ExecutionSetupException {
 
-        return newPlugin;
-      }
+    // Do not allow overwriting system plugins
+    // This same check is done later. However, we want to do this check
+    // before writing to the persistent store, which we must do before
+    // putting the plugin into the cache (where the second check is done.)
+    PluginHandle oldEntry = pluginCache.get(name);
+    if (oldEntry != null && oldEntry.isIntrinsic()) {
+      throw UserException.permissionError()
+        .message("Attempt to replace a system plugin.")
+        .addContext("Plugin name", name)
+        .addContext("Intrinsic plugin class", oldEntry.config().getClass().getName())
+        .addContext("Attempted replacement", config.getClass().getName())
+        .build(logger);
+    }
+
+    // Write to the store first. We are now in a race to see which
+    // thread will update the cache: might be us or might the another
+    // thread.
+    pluginStore.put(name, config);
+
+    // Will fail on an attempt to update a system plugin. Update
+    // will be rejected if another thread beats us to it.
+    if (config.isEnabled()) {
+      PluginHandle newHandle = restoreFromEphemeral(name, config);
+      oldEntry = pluginCache.put(newHandle);
+    } else {
+      oldEntry = pluginCache.remove(name, config);
+    }
+
+    // Let's optimistically assume that running queries may still use
+    // the old config, so transfer the possibly-created instance to the
+    // ephemeral store.
+    moveToEphemeral(oldEntry);
+  }
+
+  /**
+   * If there is an ephemeral plugin of this (name, config), pair,
+   * transfer that plugin out of ephemeral storage for reuse. Else
+   * create a new handle.
+   *
+   * @param name plugin name
+   * @param config plugin config
+   * @return a handle for the plugin which may have been retrieved from
+   * ephemeral storage
+   */
+  private PluginHandle restoreFromEphemeral(String name,
+      StoragePluginConfig config) {
+
+    // Benign race condition between check and invalidate.
+    PluginHandle ephemeralEntry = ephemeralPlugins.getIfPresent(config);
+    if (ephemeralEntry == null || !name.equalsIgnoreCase(ephemeralEntry.name())) {
+      return createPluginEntry(name, config, PluginType.STORED);
+    } else {
+
+      // Transfer the instance to a new handle, then invalidate the
+      // cache entry. The transfer ensures that the invalidate will
+      // not close the plugin instance
+      PluginHandle newHandle = ephemeralEntry.transfer(PluginType.STORED);
+      ephemeralPlugins.invalidate(config);
+      return newHandle;
     }
   }
 
   @Override
+  public StoragePluginConfig getConfig(String name) {
+    PluginHandle entry = getEntry(name);
+    return entry == null ? null : entry.config();
+  }
+
+  // Gets a plugin with the named configuration
+  @Override
   public StoragePlugin getPlugin(String name) throws ExecutionSetupException {
-    StoragePlugin plugin = enabledPlugins.get(name);
-    if (systemPlugins.get(name) != null) {
+    PluginHandle entry = getEntry(name);
+
+    // Lazy instantiation: the first call to plugin() creates the
+    // actual plugin instance.
+    return entry == null ? null : entry.plugin();
+  }
+
+  private PluginHandle getEntry(String name) {
+    PluginHandle plugin = pluginCache.get(name);
+    if (plugin != null && plugin.isIntrinsic()) {
       return plugin;
     }
+    StoragePluginConfig config = pluginStore.get(name);
+    if (plugin == null) {
+      return refresh(name, config);
+    } else {
+      return refresh(plugin, config);
+    }
+  }
 
-    // since we lazily manage the list of plugins per server, we need to update this once we know that it is time.
-    StoragePluginConfig config = pluginSystemTable.get(name);
-    if (config == null) {
-      if (plugin != null) {
-        enabledPlugins.remove(name);
-      }
+  // Lazy refresh for a plugin not known on this server.
+  private PluginHandle refresh(String name, StoragePluginConfig config) {
+    if (config == null || !config.isEnabled()) {
       return null;
     } else {
-      if (plugin == null
-          || !plugin.getConfig().equals(config)
-          || plugin.getConfig().isEnabled() != config.isEnabled()) {
-        plugin = createOrUpdate(name, config, false);
+
+      // Handles race conditions: some other thread may have just done what
+      // we're trying to do. Note: no need to close the new entry if
+      // there is a conflict: the plugin instance is created on demand
+      // and we've not done so.
+      return pluginCache.putIfAbsent(restoreFromEphemeral(name, config));
+    }
+  }
+
+  // Lazy refresh of a plugin we think we know about.
+  private PluginHandle refresh(PluginHandle entry, StoragePluginConfig config) {
+
+    // Deleted or disabled in persistent storage?
+    if (config == null || !config.isEnabled()) {
+
+      // Move the old config to the ephemeral store.
+      if (pluginCache.remove(entry)) {
+        moveToEphemeral(entry);
       }
-      return plugin;
+      return null;
+    }
+    // Unchanged?
+    if (entry.config().equals(config)) {
+      return entry;
+    }
+
+    // Plugin changed. Handle race condition on replacement.
+    PluginHandle newEntry = restoreFromEphemeral(entry.name(), config);
+    if (pluginCache.replace(entry, newEntry)) {
+      moveToEphemeral(entry);
+      return newEntry;
+    } else {
+      return pluginCache.get(entry.name());
+    }
+  }
+
+  private void refresh() {
+    // Iterate through the plugin instances in the persistent store adding
+    // any new ones and refreshing those whose configuration has changed
+    Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginStore.load();
+    while (allPlugins.hasNext()) {
+      Entry<String, StoragePluginConfig> plugin = allPlugins.next();
+      refresh(plugin.getKey(), plugin.getValue());
     }
   }
 
   @Override
   public StoragePlugin getPlugin(StoragePluginConfig config) throws ExecutionSetupException {
-    if (config instanceof NamedStoragePluginConfig) {
-      return getPlugin(((NamedStoragePluginConfig) config).getName());
-    } else {
-      // try to lookup plugin by configuration
-      StoragePlugin plugin = enabledPlugins.get(config);
-      if (plugin != null) {
-        return plugin;
-      }
+    // Try to lookup plugin by configuration
+    PluginHandle plugin = pluginCache.get(config);
+    if (plugin != null) {
+      return plugin.plugin();
+    }
 
-      // no named plugin matches the desired configuration, let's create an
-      // ephemeral storage plugin (or get one from the cache)
-      try {
-        return ephemeralPlugins.get(config);
-      } catch (ExecutionException e) {
-        Throwable cause = e.getCause();
-        if (cause instanceof ExecutionSetupException) {
-          throw (ExecutionSetupException) cause;
-        } else {
-          // this shouldn't happen. here for completeness.
-          throw new ExecutionSetupException("Failure while trying to create ephemeral plugin.", cause);
-        }
+    // No named plugin matches the desired configuration, let's create an
+    // ephemeral storage plugin (or get one from the cache)
+    try {
+      return ephemeralPlugins.get(config).plugin();
+    } catch (ExecutionException e) {
+      Throwable cause = e.getCause();
+      if (cause instanceof ExecutionSetupException) {
+        throw (ExecutionSetupException) cause;
+      } else {
+        // this shouldn't happen. here for completeness.
+        throw new ExecutionSetupException(
+            "Failure while trying to create ephemeral plugin.", cause);
       }
     }
   }
 
+  // This method is not thread-safe: there is no guarantee that the plugin
+  // deleted is the same one the user requested: someone else could have deleted
+  // the old one and added a new one of the same name.
+  // TODO: Fix this
   @Override
-  public void addEnabledPlugin(String name, StoragePlugin plugin) {
-    enabledPlugins.put(name, plugin);
+  public void remove(String name) {
+    PluginHandle entry  = pluginCache.remove(name);
+    if (entry != null) {
+      moveToEphemeral(entry);
+    }
+
+    // Must tell store to delete even if not known locally because
+    // the store might hold a disabled version
+    pluginStore.delete(name);
+  }
+
+  private void moveToEphemeral(PluginHandle handle) {
+    if (handle == null) {
+      return;
+    }
+
+    // If already in the ephemeral store, don't replace.
+    // Race condition is benign: two threads both doing the put
+    // will cause the first handle to be closed when the second hits.
+    if (ephemeralPlugins.getIfPresent(handle.config()) == null) {
+      ephemeralPlugins.put(handle.config(), handle.transfer(PluginType.EPHEMERAL));
+    } else {
+      handle.close();
+    }
+  }
+
+  @Override
+  public Map<String, StoragePluginConfig> storedConfigs() {
+    Map<String, StoragePluginConfig> result = new HashMap<>();
+    Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginStore.load();
+    while (allPlugins.hasNext()) {
+      Entry<String, StoragePluginConfig> plugin = allPlugins.next();
+      result.put(plugin.getKey(), plugin.getValue());
+    }
+    return result;
+  }
+
+  @Override
+  public Map<String, StoragePluginConfig> enabledConfigs() {
+    refresh();
+    Map<String, StoragePluginConfig> result = new HashMap<>();
+    for (PluginHandle entry : pluginCache) {
+      if (entry.isStored()) {
+        result.put(entry.name(), entry.config());
+      }
+    }
+    return result;
   }
 
   @Override
@@ -219,425 +592,88 @@
   }
 
   @Override
-  public PersistentStore<StoragePluginConfig> getStore() {
-    return pluginSystemTable;
-  }
-
-  @Override
   public SchemaFactory getSchemaFactory() {
     return schemaFactory;
   }
 
+  // TODO: Remove this: it will force plugins to be instantiated
+  // unnecessarily
+  private static class PluginIterator implements Iterator<Entry<String, StoragePlugin>> {
+    private final Iterator<PluginHandle> base;
+
+    public PluginIterator(Iterator<PluginHandle> base) {
+      this.base = base;
+    }
+
+    @Override
+    public boolean hasNext() {
+      return base.hasNext();
+    }
+
+    @Override
+    public Entry<String, StoragePlugin> next() {
+      PluginHandle entry = base.next();
+      return new ImmutableEntry<>(entry.name(), entry.plugin());
+    }
+  }
+
   @Override
   public Iterator<Entry<String, StoragePlugin>> iterator() {
-    return enabledPlugins.iterator();
+    return new PluginIterator(pluginCache.iterator());
   }
 
   @Override
   public synchronized void close() throws Exception {
     ephemeralPlugins.invalidateAll();
-    enabledPlugins.close();
-    pluginSystemTable.close();
+    pluginCache.close();
+    pluginStore.close();
+    locators.stream().forEach(loc -> loc.close());
   }
 
   /**
-   * Add a plugin and configuration. Assumes neither exists. Primarily for testing.
-   *
-   * @param config plugin config
-   * @param plugin plugin implementation
-   */
-  @VisibleForTesting
-  public void addPluginToPersistentStoreIfAbsent(String name, StoragePluginConfig config, StoragePlugin plugin) {
-    addEnabledPlugin(name, plugin);
-    pluginSystemTable.putIfAbsent(name, config);
-  }
-
-  /**
-   * <ol>
-   *   <li>Initializes persistent store for storage plugins.</li>
-   *   <li>Since storage plugins names are case-insensitive in Drill, to ensure backward compatibility,
-   *   re-writes those not stored in lower case with lower case names, for duplicates issues warning. </li>
-   *   <li>Wraps plugin system table into case insensitive wrapper.</li>
-   * </ol>
-   *
-   * @param context drillbit context
-   * @param lpPersistence deserialization mapper provider
-   * @return persistent store for storage plugins
-   */
-  private PersistentStore<StoragePluginConfig> initPluginsSystemTable(DrillbitContext context, LogicalPlanPersistence lpPersistence) {
-
-    try {
-      PersistentStore<StoragePluginConfig> pluginSystemTable = context
-          .getStoreProvider()
-          .getOrCreateStore(PersistentStoreConfig
-              .newJacksonBuilder(lpPersistence.getMapper(), StoragePluginConfig.class)
-              .name(PSTORE_NAME)
-              .build());
-
-      Iterator<Entry<String, StoragePluginConfig>> storedPlugins = pluginSystemTable.getAll();
-      while (storedPlugins.hasNext()) {
-        Entry<String, StoragePluginConfig> entry = storedPlugins.next();
-        String pluginName = entry.getKey();
-        if (!pluginName.equals(pluginName.toLowerCase())) {
-          logger.debug("Replacing plugin name {} with its lower case equivalent.", pluginName);
-          pluginSystemTable.delete(pluginName);
-          if (!pluginSystemTable.putIfAbsent(pluginName.toLowerCase(), entry.getValue())) {
-            logger.warn("Duplicated storage plugin name [{}] is found. Duplicate is deleted from persistent storage.", pluginName);
-          }
-        }
-      }
-
-      return new CaseInsensitivePersistentStore<>(pluginSystemTable);
-    } catch (StoreException e) {
-      logger.error("Failure while loading storage plugin registry.", e);
-      throw new DrillRuntimeException("Failure while reading and loading storage plugin configuration.", e);
-    }
-  }
-
-  /**
-   * Read bootstrap storage plugins {@link ExecConstants#BOOTSTRAP_STORAGE_PLUGINS_FILE}
-   * and format plugins {@link ExecConstants#BOOTSTRAP_FORMAT_PLUGINS_FILE} files for the first fresh
-   * instantiating of Drill
-   *
-   * @param lpPersistence deserialization mapper provider
-   * @return bootstrap storage plugins
-   * @throws IOException if a read error occurs
-   */
-  private StoragePlugins loadBootstrapPlugins(LogicalPlanPersistence lpPersistence) throws IOException {
-    // bootstrap load the config since no plugins are stored.
-    logger.info("No storage plugin instances configured in persistent store, loading bootstrap configuration.");
-    Set<URL> storageUrls = ClassPathScanner.forResource(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, false);
-    Set<URL> formatUrls = ClassPathScanner.forResource(ExecConstants.BOOTSTRAP_FORMAT_PLUGINS_FILE, false);
-    if (storageUrls != null && !storageUrls.isEmpty()) {
-      logger.info("Loading the storage plugin configs from URLs {}.", storageUrls);
-      StoragePlugins bootstrapPlugins = new StoragePlugins(new HashMap<>());
-      Map<String, URL> pluginURLMap = new HashMap<>();
-      for (URL url : storageUrls) {
-        loadStoragePlugins(url, bootstrapPlugins, pluginURLMap, lpPersistence);
-      }
-      if (formatUrls != null && !formatUrls.isEmpty()) {
-        logger.info("Loading the format plugin configs from URLs {}.", formatUrls);
-        for (URL url : formatUrls) {
-          loadFormatPlugins(url, bootstrapPlugins, pluginURLMap, lpPersistence);
-        }
-      }
-      return bootstrapPlugins;
-    } else {
-      throw new IOException("Failure finding " + ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE);
-    }
-  }
-
-  /**
-   * Loads storage plugins from the given URL
-   *
-   * @param url URL to the storage plugins bootstrap file
-   * @param bootstrapPlugins a collection where the plugins should be loaded to
-   * @param pluginURLMap a map to store correspondence between storage plugins and bootstrap files in which they are defined. Used for logging
-   * @param lpPersistence need to get an object mapper for the bootstrap files
-   * @throws IOException if failed to retrieve a plugin from a bootstrap file
-   */
-  private void loadStoragePlugins(URL url, StoragePlugins bootstrapPlugins, Map<String, URL> pluginURLMap, LogicalPlanPersistence lpPersistence) throws IOException {
-    StoragePlugins plugins = getPluginsFromResource(url, lpPersistence);
-    plugins.forEach(plugin -> {
-      StoragePluginConfig oldPluginConfig = bootstrapPlugins.putIfAbsent(plugin.getKey(), plugin.getValue());
-      if (oldPluginConfig != null) {
-        logger.warn("Duplicate plugin instance '[{}]' defined in [{}, {}], ignoring the later one.",
-            plugin.getKey(), pluginURLMap.get(plugin.getKey()), url);
-      } else {
-        pluginURLMap.put(plugin.getKey(), url);
-      }
-    });
-  }
-
-  /**
-   * Loads format plugins from the given URL and adds the formats to the specified storage plugins
-   *
-   * @param url URL to the format plugins bootstrap file
-   * @param bootstrapPlugins a collection with loaded storage plugins. New formats will be added to them
-   * @param pluginURLMap a map to store correspondence between storage plugins and bootstrap files in which they are defined. Used for logging
-   * @param lpPersistence need to get an object mapper for the bootstrap files
-   * @throws IOException if failed to retrieve a plugin from a bootstrap file
-   */
-  private void loadFormatPlugins(URL url, StoragePlugins bootstrapPlugins, Map<String, URL> pluginURLMap, LogicalPlanPersistence lpPersistence) throws IOException {
-    StoragePlugins plugins = getPluginsFromResource(url, lpPersistence);
-    plugins.forEach(formatPlugin -> {
-      String targetStoragePluginName = formatPlugin.getKey();
-      StoragePluginConfig storagePlugin = bootstrapPlugins.getConfig(targetStoragePluginName);
-      StoragePluginConfig formatPluginValue = formatPlugin.getValue();
-      if (storagePlugin == null) {
-        logger.warn("No storage plugins with the given name are registered: '[{}]'", targetStoragePluginName);
-      } else if (storagePlugin instanceof FileSystemConfig && formatPluginValue instanceof FileSystemConfig) {
-        FileSystemConfig targetPlugin = (FileSystemConfig) storagePlugin;
-        ((FileSystemConfig) formatPluginValue).getFormats().forEach((formatName, formatValue) -> {
-          FormatPluginConfig oldPluginConfig = targetPlugin.getFormats().putIfAbsent(formatName, formatValue);
-          if (oldPluginConfig != null) {
-            logger.warn("Duplicate format instance '[{}]' defined in [{}, {}], ignoring the later one.",
-                formatName, pluginURLMap.get(targetStoragePluginName), url);
-          }
-        });
-      } else {
-        logger.warn("Formats are only supported by File System plugin type: '[{}]'", targetStoragePluginName);
-      }
-    });
-  }
-
-  private StoragePlugins getPluginsFromResource(URL resource, LogicalPlanPersistence lpPersistence) throws IOException {
-    String pluginsData = Resources.toString(resource, Charsets.UTF_8);
-    return lpPersistence.getMapper().readValue(pluginsData, StoragePlugins.class);
-  }
-
-  /**
-   * Dynamically loads system plugins annotated with {@link SystemPlugin}.
-   * Will skip plugin initialization if no matching constructor, incorrect class implementation, name absence are detected.
-   *
-   * @param classpathScan classpath scan result
-   * @param context drillbit context
-   * @return map with system plugins stored by name
-   */
-  private Map<String, StoragePlugin> initSystemPlugins(ScanResult classpathScan, DrillbitContext context) {
-    Map<String, StoragePlugin> plugins = CaseInsensitiveMap.newHashMap();
-    List<AnnotatedClassDescriptor> annotatedClasses = classpathScan.getAnnotatedClasses(SystemPlugin.class.getName());
-    logger.trace("Found {} annotated classes with SystemPlugin annotation: {}.", annotatedClasses.size(), annotatedClasses);
-
-    for (AnnotatedClassDescriptor annotatedClass : annotatedClasses) {
-      try {
-        Class<?> aClass = Class.forName(annotatedClass.getClassName());
-        boolean isPluginInitialized = false;
-
-        for (Constructor<?> constructor : aClass.getConstructors()) {
-          Class<?>[] parameterTypes = constructor.getParameterTypes();
-
-          if (parameterTypes.length != 1 || parameterTypes[0] != DrillbitContext.class) {
-            logger.trace("Not matching constructor for {}. Expecting constructor with one parameter for DrillbitContext class.",
-                annotatedClass.getClassName());
-            continue;
-          }
-
-          Object instance = constructor.newInstance(context);
-          if (!(instance instanceof StoragePlugin)) {
-            logger.debug("Created instance of {} does not implement StoragePlugin interface.", annotatedClass.getClassName());
-            continue;
-          }
-
-          StoragePlugin storagePlugin = (StoragePlugin) instance;
-          String name = storagePlugin.getName();
-          if (name == null) {
-            logger.debug("Storage plugin name {} is not defined. Skipping plugin initialization.", annotatedClass.getClassName());
-            continue;
-          }
-          storagePlugin.getConfig().setEnabled(true);
-          plugins.put(name, storagePlugin);
-          isPluginInitialized = true;
-
-        }
-        if (!isPluginInitialized) {
-          logger.debug("Skipping plugin registration, did not find matching constructor or initialized object of wrong type.");
-        }
-      } catch (ReflectiveOperationException e) {
-        logger.warn("Error during system plugin {} initialization. Plugin initialization will be skipped.", annotatedClass.getClassName(), e);
-      }
-    }
-    logger.trace("The following system plugins have been initialized: {}.", plugins.keySet());
-    return plugins;
-  }
-
-  /**
-   * Get a list of all available storage plugin class constructors.
-   * @param classpathScan A classpath scan to use.
-   * @return A Map of StoragePluginConfig => StoragePlugin.<init>() constructors.
-   */
-  @SuppressWarnings("unchecked")
-  private Map<Object, Constructor<? extends StoragePlugin>> findAvailablePlugins(final ScanResult classpathScan) {
-    Map<Object, Constructor<? extends StoragePlugin>> availablePlugins = new HashMap<>();
-    final Collection<Class<? extends StoragePlugin>> pluginClasses =
-        classpathScan.getImplementations(StoragePlugin.class);
-    final String lineBrokenList =
-        pluginClasses.size() == 0
-            ? "" : "\n\t- " + Joiner.on("\n\t- ").join(pluginClasses);
-    logger.debug("Found {} storage plugin configuration classes: {}.",
-        pluginClasses.size(), lineBrokenList);
-    for (Class<? extends StoragePlugin> plugin : pluginClasses) {
-      int i = 0;
-      for (Constructor<?> c : plugin.getConstructors()) {
-        Class<?>[] params = c.getParameterTypes();
-        if (params.length != 3
-            || params[1] != DrillbitContext.class
-            || !StoragePluginConfig.class.isAssignableFrom(params[0])
-            || params[2] != String.class) {
-          logger.debug("Skipping StoragePlugin constructor {} for plugin class {} since it doesn't implement a "
-              + "[constructor(StoragePluginConfig, DrillbitContext, String)]", c, plugin);
-          continue;
-        }
-        availablePlugins.put(params[0], (Constructor<? extends StoragePlugin>) c);
-        i++;
-      }
-      if (i == 0) {
-        logger.debug("Skipping registration of StoragePlugin {} as it doesn't have a constructor with the parameters "
-            + "of (StoragePluginConfig, Config)", plugin.getCanonicalName());
-      }
-    }
-    return availablePlugins;
-  }
-
-  /**
-   * It initializes {@link #enabledPlugins} with currently enabled plugins
-   */
-  private void defineEnabledPlugins() {
-    Map<String, StoragePlugin> activePlugins = new HashMap<>();
-    Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginSystemTable.getAll();
-    while (allPlugins.hasNext()) {
-      Entry<String, StoragePluginConfig> plugin = allPlugins.next();
-      String name = plugin.getKey();
-      StoragePluginConfig config = plugin.getValue();
-      if (config.isEnabled()) {
-        try {
-          StoragePlugin storagePlugin = create(name, config);
-          activePlugins.put(name, storagePlugin);
-        } catch (ExecutionSetupException e) {
-          logger.error("Failure while setting up StoragePlugin with name: '{}', disabling.", name, e);
-          config.setEnabled(false);
-          pluginSystemTable.put(name, config);
-        }
-      }
-    }
-
-    activePlugins.putAll(systemPlugins);
-    enabledPlugins.putAll(activePlugins);
-  }
-
-  /**
-   * Creates plugin instance with the given {@code name} and configuration {@code pluginConfig}.
-   * The plugin need to be present in a list of available plugins and be enabled in the configuration
+   * Creates plugin entry with the given {@code name} and configuration {@code pluginConfig}.
+   * Validation for existence, disabled, etc. should have been done by the caller.
+   * <p>
+   * Uses the config to find the connector, then lets the connector create the plugin
+   * entry. Creation of the plugin instance is deferred until first requested.
+   * This should speed up Drillbit start, as long as other code only asks for the
+   * plugin instance when it is actually needed to plan or execute a query (not just
+   * to provide a schema.)
    *
    * @param name name of the plugin
    * @param pluginConfig plugin configuration
-   * @return plugin client or {@code null} if plugin is disabled
+   * @return handle the the plugin with metadata and deferred access to
+   * the plugin instance
    */
-  private StoragePlugin create(String name, StoragePluginConfig pluginConfig) throws ExecutionSetupException {
-    if (!pluginConfig.isEnabled()) {
-      return null;
+  private PluginHandle createPluginEntry(String name, StoragePluginConfig pluginConfig, PluginType type) {
+    ConnectorHandle connector = connectors.get(pluginConfig.getClass());
+    if (connector == null) {
+      throw UserException.internalError()
+        .message("No connector known for plugin configuration")
+        .addContext("Plugin name", name)
+        .addContext("Config class", pluginConfig.getClass().getName())
+        .build(logger);
     }
-
-    StoragePlugin plugin;
-    Constructor<? extends StoragePlugin> constructor = availablePlugins.get(pluginConfig.getClass());
-    if (constructor == null) {
-      throw new ExecutionSetupException(String.format("Failure finding StoragePlugin constructor for config %s",
-          pluginConfig));
-    }
-    try {
-      plugin = constructor.newInstance(pluginConfig, context, name);
-      plugin.start();
-      return plugin;
-    } catch (ReflectiveOperationException | IOException e) {
-      Throwable t = e instanceof InvocationTargetException ? ((InvocationTargetException) e).getTargetException() : e;
-      if (t instanceof ExecutionSetupException) {
-        throw ((ExecutionSetupException) t);
-      }
-      throw new ExecutionSetupException(String.format("Failure setting up new storage plugin configuration for config %s", pluginConfig), t);
-    }
+    return connector.pluginEntryFor(name, pluginConfig, type);
   }
 
-  private void closePlugin(StoragePlugin plugin) {
-    if (plugin == null) {
-      return;
-    }
+  // TODO: Replace this. Inefficient to obtain schemas we don't need.
+  protected void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) {
+    // Refresh against the persistent store.
+    // TODO: This will hammer the system if queries come in rapidly.
+    // Need some better solution: grace period, alert from ZK that there
+    // is something new, etc. Even better, don't register all the schemas.
+    refresh();
 
-    try {
-      plugin.close();
-    } catch (Exception e) {
-      logger.warn("Exception while shutting down storage plugin.");
-    }
-  }
-
-  public class DrillSchemaFactory extends AbstractSchemaFactory {
-
-    public DrillSchemaFactory(String name) {
-      super(name);
-    }
-
-    @Override
-    public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent) throws IOException {
-      Stopwatch watch = Stopwatch.createStarted();
-
+    // Register schemas with the refreshed plugins
+    // TODO: this code requires instantiating all plugins, even though
+    // the query won't use them. Need a way to do deferred registration.
+    for (PluginHandle plugin : pluginCache.plugins()) {
       try {
-        Set<String> currentPluginNames = new HashSet<>(enabledPlugins.getNames());
-        // iterate through the plugin instances in the persistent store adding
-        // any new ones and refreshing those whose configuration has changed
-        Iterator<Entry<String, StoragePluginConfig>> allPlugins = pluginSystemTable.getAll();
-        while (allPlugins.hasNext()) {
-          Entry<String, StoragePluginConfig> plugin = allPlugins.next();
-          if (plugin.getValue().isEnabled()) {
-            getPlugin(plugin.getKey());
-            currentPluginNames.remove(plugin.getKey());
-          }
-        }
-        // remove those which are no longer in the registry
-        for (String pluginName : currentPluginNames) {
-          if (systemPlugins.get(pluginName) != null) {
-            continue;
-          }
-          enabledPlugins.remove(pluginName);
-        }
-
-        // finally register schemas with the refreshed plugins
-        for (StoragePlugin plugin : enabledPlugins.plugins()) {
-          try {
-            plugin.registerSchemas(schemaConfig, parent);
-          } catch (Exception e) {
-            logger.warn("Error during `{}` schema initialization: {}", plugin.getName(), e.getMessage(), e.getCause());
-          }
-        }
-      } catch (ExecutionSetupException e) {
-        throw new DrillRuntimeException("Failure while updating storage plugins", e);
+        plugin.plugin().registerSchemas(schemaConfig, parent);
+      } catch (Exception e) {
+        logger.warn("Error during `{}` schema initialization: {}", plugin.name(), e.getMessage(), e.getCause());
       }
-
-      // Add second level schema as top level schema with name qualified with parent schema name
-      // Ex: "dfs" schema has "default" and "tmp" as sub schemas. Add following extra schemas "dfs.default" and
-      // "dfs.tmp" under root schema.
-      //
-      // Before change, schema tree looks like below:
-      // "root"
-      // -- "dfs"
-      // -- "default"
-      // -- "tmp"
-      // -- "hive"
-      // -- "default"
-      // -- "hivedb1"
-      //
-      // After the change, the schema tree looks like below:
-      // "root"
-      // -- "dfs"
-      // -- "default"
-      // -- "tmp"
-      // -- "dfs.default"
-      // -- "dfs.tmp"
-      // -- "hive"
-      // -- "default"
-      // -- "hivedb1"
-      // -- "hive.default"
-      // -- "hive.hivedb1"
-      List<SchemaPlus> secondLevelSchemas = new ArrayList<>();
-      for (String firstLevelSchemaName : parent.getSubSchemaNames()) {
-        SchemaPlus firstLevelSchema = parent.getSubSchema(firstLevelSchemaName);
-        for (String secondLevelSchemaName : firstLevelSchema.getSubSchemaNames()) {
-          secondLevelSchemas.add(firstLevelSchema.getSubSchema(secondLevelSchemaName));
-        }
-      }
-
-      for (SchemaPlus schema : secondLevelSchemas) {
-        AbstractSchema drillSchema;
-        try {
-          drillSchema = schema.unwrap(AbstractSchema.class);
-        } catch (ClassCastException e) {
-          throw new RuntimeException(String.format("Schema '%s' is not expected under root schema", schema.getName()));
-        }
-        SubSchemaWrapper wrapper = new SubSchemaWrapper(drillSchema);
-        parent.add(wrapper.getName(), wrapper);
-      }
-
-      logger.debug("Took {} ms to register schemas.", watch.elapsed(TimeUnit.MILLISECONDS));
     }
-
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginsHandler.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginStore.java
similarity index 61%
rename from exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginsHandler.java
rename to exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginStore.java
index 25b813c..40170834 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginsHandler.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginStore.java
@@ -17,23 +17,24 @@
  */
 package org.apache.drill.exec.store;
 
+import java.util.Iterator;
+import java.util.Map.Entry;
+
 import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.exec.planner.logical.StoragePlugins;
 import org.apache.drill.exec.store.sys.PersistentStore;
 
-
 /**
- * Storage plugins handler is an additional service for updating storage plugins configs from the file
+ * Interface to the storage mechanism used to store storage plugin
+ * configurations, typically in JSON format.
  */
-public interface StoragePluginsHandler {
-
-  /**
-   * Update incoming storage plugins configs from persistence store if present, otherwise bootstrap plugins configs.
-   *
-   * @param persistentStore the last storage plugins configs from persistence store
-   * @param bootstrapPlugins bootstrap storage plugins, which are used in case of first Drill start up
-   * @return all storage plugins, which should be loaded into persistence store
-   */
-  void loadPlugins(PersistentStore<StoragePluginConfig> persistentStore, StoragePlugins bootstrapPlugins);
-
+public interface StoragePluginStore {
+  boolean isInitialized();
+  void delete(String name);
+  Iterator<Entry<String, StoragePluginConfig>> load();
+  void put(String name, StoragePluginConfig config);
+  void putAll(StoragePlugins plugins);
+  StoragePluginConfig get(String name);
+  PersistentStore<StoragePluginConfig> getStore();
+  void close();
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginStoreImpl.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginStoreImpl.java
new file mode 100644
index 0000000..a185638
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginStoreImpl.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.apache.drill.shaded.guava.com.google.common.base.Preconditions.checkNotNull;
+
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.drill.common.config.LogicalPlanPersistence;
+import org.apache.drill.common.exceptions.DrillRuntimeException;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.exception.StoreException;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.exec.store.sys.CaseInsensitivePersistentStore;
+import org.apache.drill.exec.store.sys.PersistentStore;
+import org.apache.drill.exec.store.sys.PersistentStoreConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Concrete storage plugin (configuration) store based on the
+ * {@link PersistentStore} abstraction.
+ */
+public class StoragePluginStoreImpl implements StoragePluginStore {
+  private static final Logger logger = LoggerFactory.getLogger(StoragePluginStoreImpl.class);
+
+  private final PersistentStore<StoragePluginConfig> pluginSystemTable;
+
+  public StoragePluginStoreImpl(DrillbitContext context) {
+    this.pluginSystemTable = initPluginsSystemTable(context,
+        checkNotNull(context.getLpPersistence()));
+  }
+
+  /**
+   * <ol>
+   *   <li>Initializes persistent store for storage plugins.</li>
+   *   <li>Since storage plugins names are case-insensitive in Drill, to ensure backward compatibility,
+   *   re-writes those not stored in lower case with lower case names, for duplicates issues warning. </li>
+   *   <li>Wraps plugin system table into case insensitive wrapper.</li>
+   * </ol>
+   *
+   * @param context drillbit context
+   * @param lpPersistence deserialization mapper provider
+   * @return persistent store for storage plugins
+   */
+  private PersistentStore<StoragePluginConfig> initPluginsSystemTable(
+      DrillbitContext context, LogicalPlanPersistence lpPersistence) {
+    try {
+      PersistentStore<StoragePluginConfig> pluginSystemTable = context
+          .getStoreProvider()
+          .getOrCreateStore(PersistentStoreConfig
+              .newJacksonBuilder(lpPersistence.getMapper(), StoragePluginConfig.class)
+              .name(StoragePluginRegistryImpl.PSTORE_NAME)
+              .build());
+
+      Iterator<Entry<String, StoragePluginConfig>> storedPlugins = pluginSystemTable.getAll();
+      while (storedPlugins.hasNext()) {
+        Entry<String, StoragePluginConfig> entry = storedPlugins.next();
+        String pluginName = entry.getKey();
+        if (!pluginName.equals(pluginName.toLowerCase())) {
+          logger.debug("Replacing plugin name {} with its lower case equivalent.", pluginName);
+          pluginSystemTable.delete(pluginName);
+          if (!pluginSystemTable.putIfAbsent(pluginName.toLowerCase(), entry.getValue())) {
+            logger.warn("Duplicated storage plugin name [{}] is found. Duplicate is deleted from persistent storage.", pluginName);
+          }
+        }
+      }
+
+      return new CaseInsensitivePersistentStore<>(pluginSystemTable);
+    } catch (StoreException e) {
+      throw new DrillRuntimeException(
+          "Failure while reading and loading storage plugin configuration.");
+    }
+  }
+
+  @Override
+  public boolean isInitialized() {
+
+    // TODO: This is not the best way to check: it will deserialize the
+    // first entry. What we really want to know is: are there any
+    // entries at all? (This version is better than the previous,
+    // which deserialized all entries, then discarded them.)
+    return pluginSystemTable.getRange(0, 1).hasNext();
+  }
+
+  @Override
+  public StoragePluginConfig get(String name) {
+    return pluginSystemTable.get(name);
+  }
+
+  @Override
+  public void put(String name, StoragePluginConfig config) {
+    pluginSystemTable.put(name, config);
+  }
+
+  @Override
+  public void delete(String name) {
+    pluginSystemTable.delete(name);
+  }
+
+  @Override
+  public Iterator<Entry<String, StoragePluginConfig>> load() {
+     return pluginSystemTable.getAll();
+  }
+
+  @Override
+  public void putAll(StoragePlugins plugins) {
+    for (Map.Entry<String, StoragePluginConfig> plugin : plugins) {
+      put(plugin.getKey(), plugin.getValue());
+    }
+  }
+
+  // TODO: Can this be removed? Avoid exposing implementation?
+  @Override
+  public PersistentStore<StoragePluginConfig> getStore() {
+    return pluginSystemTable;
+  }
+
+  @Override
+  public void close() {
+    try {
+      pluginSystemTable.close();
+    } catch (Exception e) {
+      logger.warn("Error closing the storage plugin store", e);
+      // Ignore since we're shutting down the Drillbit
+    }
+  }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginsHandlerService.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginsHandlerService.java
deleted file mode 100644
index 3551b67..0000000
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StoragePluginsHandlerService.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
-import org.apache.drill.shaded.guava.com.google.common.io.Resources;
-import com.jasonclawson.jackson.dataformat.hocon.HoconFactory;
-import org.apache.drill.common.config.ConfigConstants;
-import org.apache.drill.common.config.LogicalPlanPersistence;
-import org.apache.drill.common.exceptions.DrillRuntimeException;
-import org.apache.drill.common.logical.StoragePluginConfig;
-import org.apache.drill.common.scanner.ClassPathScanner;
-import org.apache.drill.exec.planner.logical.StoragePlugins;
-import org.apache.drill.exec.server.DrillbitContext;
-import org.apache.drill.exec.store.sys.PersistentStore;
-import org.apache.drill.exec.util.ActionOnFile;
-
-import javax.annotation.Nullable;
-import javax.validation.constraints.NotNull;
-import java.io.IOException;
-import java.net.URL;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-
-import static org.apache.drill.exec.store.StoragePluginRegistry.ACTION_ON_STORAGE_PLUGINS_OVERRIDE_FILE;
-
-/**
- * Drill plugins handler, which allows to update storage plugins configs from the
- * {@link ConfigConstants#STORAGE_PLUGINS_OVERRIDE_CONF} conf file
- *
- * TODO: DRILL-6564: It can be improved with configs versioning and service of creating
- * {@link ConfigConstants#STORAGE_PLUGINS_OVERRIDE_CONF}
- */
-public class StoragePluginsHandlerService implements StoragePluginsHandler {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StoragePluginsHandlerService.class);
-
-  private final LogicalPlanPersistence lpPersistence;
-  private final DrillbitContext context;
-  private URL pluginsOverrideFileUrl;
-
-  public StoragePluginsHandlerService(DrillbitContext context) {
-    this.context = context;
-    this.lpPersistence = new LogicalPlanPersistence(context.getConfig(), context.getClasspathScan(),
-        new ObjectMapper(new HoconFactory()));
-  }
-
-  @Override
-  public void loadPlugins(@NotNull PersistentStore<StoragePluginConfig> persistentStore,
-                          @Nullable StoragePlugins bootstrapPlugins) {
-    // if bootstrapPlugins is not null -- fresh Drill set up
-    StoragePlugins pluginsForPersistentStore;
-
-    StoragePlugins newPlugins = getNewStoragePlugins();
-
-    if (newPlugins != null) {
-      pluginsForPersistentStore = new StoragePlugins(new HashMap<>());
-      Optional.ofNullable(bootstrapPlugins)
-          .ifPresent(pluginsForPersistentStore::putAll);
-
-      for (Map.Entry<String, StoragePluginConfig> newPlugin : newPlugins) {
-        String pluginName = newPlugin.getKey();
-        StoragePluginConfig oldPluginConfig = Optional.ofNullable(bootstrapPlugins)
-            .map(plugins -> plugins.getConfig(pluginName))
-            .orElse(persistentStore.get(pluginName));
-        StoragePluginConfig updatedStatusPluginConfig = updatePluginStatus(oldPluginConfig, newPlugin.getValue());
-        pluginsForPersistentStore.put(pluginName, updatedStatusPluginConfig);
-      }
-    } else {
-      pluginsForPersistentStore = bootstrapPlugins;
-    }
-
-    // load pluginsForPersistentStore to Persistent Store
-    Optional.ofNullable(pluginsForPersistentStore)
-        .ifPresent(plugins -> plugins.forEach(plugin -> persistentStore.put(plugin.getKey(), plugin.getValue())));
-
-    if (newPlugins != null) {
-      String fileAction = context.getConfig().getString(ACTION_ON_STORAGE_PLUGINS_OVERRIDE_FILE);
-      Optional<ActionOnFile> actionOnFile = Arrays.stream(ActionOnFile.values())
-          .filter(action -> action.name().equalsIgnoreCase(fileAction))
-          .findFirst();
-      actionOnFile.ifPresent(action -> action.action(pluginsOverrideFileUrl));
-      // TODO: replace with ifPresentOrElse() once the project will be on Java9
-      if (!actionOnFile.isPresent()) {
-        logger.error("Unknown value {} for {} boot option. Nothing will be done with file.",
-            fileAction, ACTION_ON_STORAGE_PLUGINS_OVERRIDE_FILE);
-      }
-    }
-  }
-
-  /**
-   * Helper method to identify the enabled status for new storage plugins config. If this status is absent in the updater
-   * file, the status is kept from the configs, which are going to be updated
-   *
-   * @param oldPluginConfig current storage plugin config from Persistent Store or bootstrap config file
-   * @param newPluginConfig new storage plugin config
-   * @return new storage plugin config with updated enabled status
-   */
-  private StoragePluginConfig updatePluginStatus(@Nullable StoragePluginConfig oldPluginConfig,
-                                                 @NotNull StoragePluginConfig newPluginConfig) {
-    if (!newPluginConfig.isEnabledStatusPresent()) {
-      boolean newStatus = oldPluginConfig != null && oldPluginConfig.isEnabled();
-      newPluginConfig.setEnabled(newStatus);
-    }
-    return newPluginConfig;
-  }
-
-  /**
-   * Get the new storage plugins from the {@link ConfigConstants#STORAGE_PLUGINS_OVERRIDE_CONF} file if it exists,
-   * null otherwise
-   *
-   * @return storage plugins
-   */
-  private StoragePlugins getNewStoragePlugins() {
-    Set<URL> urlSet = ClassPathScanner.forResource(ConfigConstants.STORAGE_PLUGINS_OVERRIDE_CONF, false);
-    if (!urlSet.isEmpty()) {
-      if (urlSet.size() != 1) {
-        DrillRuntimeException.format("More than one %s file is placed in Drill's classpath: %s",
-            ConfigConstants.STORAGE_PLUGINS_OVERRIDE_CONF, urlSet);
-      }
-      pluginsOverrideFileUrl = urlSet.iterator().next();
-      try {
-        String newPluginsData = Resources.toString(pluginsOverrideFileUrl, Charsets.UTF_8);
-        return lpPersistence.getMapper().readValue(newPluginsData, StoragePlugins.class);
-      } catch (IOException e) {
-        logger.error("Failures are obtained while loading %s file. Proceed without update",
-            ConfigConstants.STORAGE_PLUGINS_OVERRIDE_CONF, e);
-      }
-    }
-    logger.trace("The {} file is absent. Proceed without updating of the storage plugins configs",
-        ConfigConstants.STORAGE_PLUGINS_OVERRIDE_CONF);
-    return null;
-  }
-}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageStrategy.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageStrategy.java
index 31c0103..0d9f999 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageStrategy.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/StorageStrategy.java
@@ -24,12 +24,15 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.util.List;
 
 /** Contains list of parameters that will be used to store path / files on file system. */
 public class StorageStrategy {
+  private static final Logger logger = LoggerFactory.getLogger(StorageStrategy.class);
 
   /**
    * For directories: drwxrwxr-x (owner and group have full access, others can read and execute).
@@ -46,8 +49,6 @@
    */
   public static final StorageStrategy TEMPORARY = new StorageStrategy("077", true);
 
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StorageStrategy.class);
-
   private final String umask;
   private final boolean deleteOnExit;
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java
index 2539c64..3735229 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SubSchemaWrapper.java
@@ -52,9 +52,8 @@
     if (defaultSchema instanceof AbstractSchema) {
       return ((AbstractSchema) defaultSchema).getSubPartitions(table, partitionColumns, partitionValues);
     } else {
-      return Collections.EMPTY_LIST;
+      return Collections.emptyList();
     }
-
   }
 
   @Override
@@ -121,5 +120,4 @@
   public String getTypeName() {
     return innerSchema.getTypeName();
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SystemPlugin.java
index 85236af..466e1df 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SystemPlugin.java
@@ -23,7 +23,8 @@
 import java.lang.annotation.Target;
 
 /**
- * Is used to indicated system plugins which will be dynamically initialized during storage plugin registry init stage.
+ * Indicates system plugins which will be dynamically initialized during storage
+ * plugin registry init stage.
  */
 @Retention(RetentionPolicy.RUNTIME)
 @Target({ElementType.TYPE})
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/SystemPluginLocator.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SystemPluginLocator.java
new file mode 100644
index 0000000..b583ff7
--- /dev/null
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/SystemPluginLocator.java
@@ -0,0 +1,171 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.map.CaseInsensitiveMap;
+import org.apache.drill.common.scanner.persistence.AnnotatedClassDescriptor;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Locates system storage plugins. These are special in that they take
+ * no configuration: the configuration and connector names are the same so
+ * that the configuration name can be resolved directly to the connector.
+ * <p>
+ * System plugins are defined in the Drill core, reside on the default class
+ * path, and are annotated with {@code @SystemPlugin}.
+ */
+public class SystemPluginLocator implements ConnectorLocator {
+  private static final Logger logger = LoggerFactory.getLogger(SystemPluginLocator.class);
+
+  private final PluginRegistryContext context;
+  private final Map<String, StoragePlugin> plugins = CaseInsensitiveMap.newHashMap();
+
+  public SystemPluginLocator(PluginRegistryContext context) {
+    this.context = context;
+  }
+
+  /**
+   * Dynamically loads system plugins annotated with {@link SystemPlugin}.
+   * Will skip plugin initialization if no matching constructor, incorrect
+   * class implementation, name absence are detected.
+   *
+   * @param classpathScan
+   *          classpath scan result
+   * @param context
+   *          drillbit context
+   * @return map with system plugins stored by name
+   */
+  @Override
+  public void init() {
+    List<AnnotatedClassDescriptor> annotatedClasses =
+        context.classpathScan().getAnnotatedClasses(SystemPlugin.class.getName());
+    logger.trace("Found {} annotated classes with SystemPlugin annotation: {}.",
+        annotatedClasses.size(), annotatedClasses);
+
+    for (AnnotatedClassDescriptor annotatedClass : annotatedClasses) {
+      try {
+        loadPlugin(annotatedClass);
+      } catch (ReflectiveOperationException e) {
+        logger.warn("Error during system plugin {} initialization. Plugin initialization will be skipped.",
+            annotatedClass.getClassName(), e);
+      }
+    }
+    logger.trace("The following system plugins have been initialized: {}.", plugins.keySet());
+  }
+
+  private void loadPlugin(AnnotatedClassDescriptor annotatedClass) throws ReflectiveOperationException {
+    Class<?> aClass = Class.forName(annotatedClass.getClassName());
+
+    for (Constructor<?> constructor : aClass.getConstructors()) {
+      Class<?>[] parameterTypes = constructor.getParameterTypes();
+
+      if (parameterTypes.length != 1 || parameterTypes[0] != DrillbitContext.class) {
+        logger.trace("Not matching constructor for {}. Expecting constructor with one parameter for DrillbitContext class.",
+            annotatedClass.getClassName());
+        continue;
+      }
+
+      Object instance = constructor.newInstance(context.drillbitContext());
+      if (!(instance instanceof StoragePlugin)) {
+        logger.debug("Created instance of {} does not implement StoragePlugin interface.", annotatedClass.getClassName());
+        continue;
+      }
+
+      StoragePlugin storagePlugin = (StoragePlugin) instance;
+      String name = storagePlugin.getName();
+      if (name == null) {
+        logger.debug("Storage plugin name {} is not defined. Skipping plugin initialization.", annotatedClass.getClassName());
+        continue;
+      }
+      storagePlugin.getConfig().setEnabled(true);
+      plugins.put(name, storagePlugin);
+      return;
+    }
+    logger.debug("Skipping plugin registration for {}, did not find matching constructor or initialized object of wrong type.",
+        aClass.getName());
+  }
+
+  @Override
+  public StoragePlugins bootstrapPlugins() throws IOException {
+    // System plugins are not stored, so no bootstrap
+    return null;
+  }
+
+  @Override
+  public StoragePlugins updatedPlugins() {
+    // ... and no upgrades
+    return null;
+  }
+
+  @Override
+  public void onUpgrade() { }
+
+  @Override
+  public StoragePlugin get(String name) {
+    return plugins.get(name);
+  }
+
+  @Override
+  public Collection<StoragePlugin> intrinsicPlugins() {
+    return plugins.values();
+  }
+
+  @Override
+  public StoragePlugin create(String name, StoragePluginConfig pluginConfig) {
+    throw new IllegalStateException("Should not create instances of system plugins");
+  }
+
+  @Override
+  public Set<Class<? extends StoragePluginConfig>> configClasses() {
+    return null;
+  }
+
+  @Override
+  public boolean storable() {
+    // TODO Auto-generated method stub
+    return false;
+  }
+
+  @Override
+  public Class<? extends StoragePlugin> connectorClassFor(
+      Class<? extends StoragePluginConfig> configClass) {
+
+    // Not very efficient, but this method is generally for testing
+    // and their are only a few system plugins. Not worth adding a map.
+    for (StoragePlugin plugin : plugins.values()) {
+      if (configClass.isInstance(plugin.getConfig())) {
+        return plugin.getClass();
+      }
+    }
+    return null;
+  }
+
+  @Override
+  public void close() { }
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedCallable.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedCallable.java
index 9c90f08..09a0a8e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedCallable.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/TimedCallable.java
@@ -42,9 +42,13 @@
 import org.apache.drill.shaded.guava.com.google.common.util.concurrent.ThreadFactoryBuilder;
 
 /**
- * Class used to allow parallel executions of tasks in a simplified way. Also maintains and reports timings of task completion.
+ * Allows parallel executions of tasks in a simplified way. Also maintains and
+ * reports timings of task completion.
+ * <p>
  * TODO: look at switching to fork join.
- * @param <V> The time value that will be returned when the task is executed.
+ *
+ * @param <V>
+ *          The time value that will be returned when the task is executed.
  */
 public abstract class TimedCallable<V> implements Callable<V> {
   private static final Logger logger = LoggerFactory.getLogger(TimedCallable.class);
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
index 58f69a4..8f011ce 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemConfig.java
@@ -17,12 +17,16 @@
  */
 package org.apache.drill.exec.store.dfs;
 
+import java.util.HashMap;
 import java.util.LinkedHashMap;
 import java.util.Map;
+import java.util.Objects;
 import java.util.Optional;
 
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
+
+import org.apache.drill.common.PlanStringBuilder;
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.common.logical.StoragePluginConfig;
 
@@ -73,14 +77,13 @@
   }
 
   @Override
+  public String getValue(String key) {
+    return config == null ? null : config.get(key);
+  }
+
+  @Override
   public int hashCode() {
-    final int prime = 31;
-    int result = 1;
-    result = prime * result + ((config == null) ? 0 : config.hashCode());
-    result = prime * result + ((connection == null) ? 0 : connection.hashCode());
-    result = prime * result + ((formats == null) ? 0 : formats.hashCode());
-    result = prime * result + ((workspaces == null) ? 0 : workspaces.hashCode());
-    return result;
+    return Objects.hash(connection, config, formats, workspaces);
   }
 
   @Override
@@ -88,49 +91,55 @@
     if (this == obj) {
       return true;
     }
-    if (obj == null) {
-      return false;
-    }
-    if (getClass() != obj.getClass()) {
+    if (obj == null || getClass() != obj.getClass()) {
       return false;
     }
     FileSystemConfig other = (FileSystemConfig) obj;
-    if (connection == null) {
-      if (other.connection != null) {
-        return false;
-      }
-    } else if (!connection.equals(other.connection)) {
-      return false;
-    }
-    if (formats == null) {
-      if (other.formats != null) {
-        return false;
-      }
-    } else if (!formats.equals(other.formats)) {
-      return false;
-    }
-    if (workspaces == null) {
-      if (other.workspaces != null) {
-        return false;
-      }
-    } else if (!workspaces.equals(other.workspaces)) {
-      return false;
-    }
-    if (config == null) {
-      if (other.config != null) {
-        return false;
-      }
-    } else if (!config.equals(other.config)) {
-      return false;
-    }
-    return true;
+    return Objects.equals(connection, other.connection) &&
+           Objects.equals(config, other.config) &&
+           Objects.equals(formats, other.formats) &&
+           Objects.equals(workspaces, other.workspaces);
   }
 
   @Override
-  public String getValue(String key) {
-    if (config != null) {
-      return config.get(key);
+  public String toString() {
+    return new PlanStringBuilder(this)
+        .field("connection", connection)
+        .field("config", config)
+        .field("formats", formats)
+        .field("workspaces", workspaces)
+        .toString();
+  }
+
+  /**
+   * Copy the file system configuration. This <b>must</b> be done prior
+   * to modifying a config already stored in the registry. The registry
+   * maintains a key based on config value.
+   * @return a copy of this config which may be modified
+   */
+  public FileSystemConfig copy() {
+    return copyWithFormats(null);
+  }
+
+  /**
+   * Copy this file system config with the set of new/replaced formats.
+   * This <b>must</b> be done if the file system config is already stored
+   * in the plugin registry
+   * @param newFormats optional new formats to add
+   * @return copy with the new formats
+   */
+  public FileSystemConfig copyWithFormats(Map<String, FormatPluginConfig> newFormats) {
+    // Must make copies of structures. Turns out that the constructor already
+    // copies workspaces, so we need not copy it here.
+    Map<String, String> configCopy = config == null ? null : new HashMap<>(config);
+    Map<String, FormatPluginConfig> formatsCopy =
+        formats == null ? null : new LinkedHashMap<>(formats);
+    if (newFormats != null) {
+      formatsCopy = formatsCopy == null ? new LinkedHashMap<>() : formatsCopy;
+      formatsCopy.putAll(newFormats);
     }
-    return null;
+    FileSystemConfig newConfig = new FileSystemConfig(connection, configCopy, workspaces, formatsCopy);
+    newConfig.setEnabled(isEnabled());
+    return newConfig;
   }
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
index 8a97701..4360571 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FileSystemPlugin.java
@@ -64,8 +64,9 @@
   private static final Logger logger = LoggerFactory.getLogger(FileSystemPlugin.class);
 
   /**
-   * org.apache.hadoop.io.compress library supports such codecs as Gzip and Bzip2 out of box.
-   * This list stores only codecs that are missing in Hadoop library.
+   * The {@code org.apache.hadoop.io.compress} library supports such codecs as
+   * Gzip and Bzip2 out of box. This list stores only codecs that are missing in
+   * Hadoop library.
    */
   private static final List<String> ADDITIONAL_CODECS = Collections.singletonList(
     ZipCodec.class.getCanonicalName());
@@ -109,16 +110,19 @@
       List<WorkspaceSchemaFactory> factories = new ArrayList<>();
       if (!noWorkspace) {
         for (Map.Entry<String, WorkspaceConfig> space : config.getWorkspaces().entrySet()) {
-          factories.add(new WorkspaceSchemaFactory(this, space.getKey(), name, space.getValue(), matchers, context.getLpPersistence(), context.getClasspathScan()));
+          factories.add(new WorkspaceSchemaFactory(
+              this, space.getKey(), name, space.getValue(), matchers,
+              context.getLpPersistence(), context.getClasspathScan()));
         }
       }
 
       // if the "default" workspace is not given add one.
       if (noWorkspace || !config.getWorkspaces().containsKey(DEFAULT_WS_NAME)) {
-        factories.add(new WorkspaceSchemaFactory(this, DEFAULT_WS_NAME, name, WorkspaceConfig.DEFAULT, matchers, context.getLpPersistence(), context.getClasspathScan()));
+        factories.add(new WorkspaceSchemaFactory(
+            this, DEFAULT_WS_NAME, name, WorkspaceConfig.DEFAULT, matchers, context.getLpPersistence(), context.getClasspathScan()));
       }
 
-      this.schemaFactory = new FileSystemSchemaFactory(name, factories);
+      schemaFactory = new FileSystemSchemaFactory(name, factories);
     } catch (IOException e) {
       throw new ExecutionSetupException("Failure setting up file system plugin.", e);
     }
@@ -194,25 +198,33 @@
   }
 
   @Override
-  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, SessionOptionManager options) throws IOException {
-    return getPhysicalScan(userName, selection, AbstractGroupScan.ALL_COLUMNS, options, null);
+  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      SessionOptionManager options) throws IOException {
+    return getPhysicalScan(userName, selection, AbstractGroupScan.ALL_COLUMNS,
+        options, null);
   }
 
   @Override
-  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, SessionOptionManager options, MetadataProviderManager metadataProviderManager) throws IOException {
-    return getPhysicalScan(userName, selection, AbstractGroupScan.ALL_COLUMNS, options, metadataProviderManager);
+  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      SessionOptionManager options, MetadataProviderManager metadataProviderManager) throws IOException {
+    return getPhysicalScan(userName, selection, AbstractGroupScan.ALL_COLUMNS,
+        options, metadataProviderManager);
   }
 
   @Override
-  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns) throws IOException {
+  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      List<SchemaPath> columns) throws IOException {
     return getPhysicalScan(userName, selection, columns, null, null);
   }
 
   @Override
-  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns, SessionOptionManager options, MetadataProviderManager metadataProviderManager) throws IOException {
-    FormatSelection formatSelection = selection.getWith(lpPersistance, FormatSelection.class);
+  public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection,
+      List<SchemaPath> columns, SessionOptionManager options,
+      MetadataProviderManager metadataProviderManager) throws IOException {
+    FormatSelection formatSelection = selection.getWith(lpPersistance.getMapper(), FormatSelection.class);
     FormatPlugin plugin = getFormatPlugin(formatSelection.getFormat());
-    return plugin.getGroupScan(userName, formatSelection.getSelection(), columns, options, metadataProviderManager);
+    return plugin.getGroupScan(userName, formatSelection.getSelection(), columns,
+        options, metadataProviderManager);
   }
 
   @Override
@@ -225,8 +237,10 @@
   }
 
   /**
-   * If format plugin configuration is for named format plugin, will return format plugin from pre-loaded list by name.
-   * For other cases will try to find format plugin by its configuration, if not present will attempt to create one.
+   * If format plugin configuration is for named format plugin, will return
+   * format plugin from pre-loaded list by name. For other cases will try to
+   * find format plugin by its configuration, if not present will attempt to
+   * create one.
    *
    * @param config format plugin configuration
    * @return format plugin for given configuration if found, null otherwise
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java
index 447403f..8df96dd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/FormatPluginOptionExtractor.java
@@ -27,33 +27,35 @@
 import java.util.Map;
 import java.util.Set;
 
+import org.apache.drill.common.config.LogicalPlanPersistence;
 import org.apache.drill.common.exceptions.UserException;
 import org.apache.drill.common.logical.FormatPluginConfig;
-import org.apache.drill.common.logical.FormatPluginConfigBase;
 import org.apache.drill.common.scanner.persistence.ScanResult;
 import org.apache.drill.exec.store.dfs.WorkspaceSchemaFactory.TableInstance;
 import org.apache.drill.exec.store.table.function.TableParamDef;
 import org.apache.drill.exec.store.table.function.TableSignature;
 import org.slf4j.Logger;
-
+import org.slf4j.LoggerFactory;
 import org.apache.drill.shaded.guava.com.google.common.annotations.VisibleForTesting;
 
 /**
  * Manages format plugins options to define table macros.
  */
 final class FormatPluginOptionExtractor {
-  private static final Logger logger = org.slf4j.LoggerFactory.getLogger(FormatPluginOptionExtractor.class);
+  private static final Logger logger = LoggerFactory.getLogger(FormatPluginOptionExtractor.class);
 
   private final Map<String, FormatPluginOptionsDescriptor> optionsByTypeName;
 
   /**
-   * Extracts the format plugin options based on the scanned implementations of {@link FormatPluginConfig}.
+   * Extracts the format plugin options based on the scanned implementations of
+   * {@link FormatPluginConfig}.
    *
    * @param scanResult scan result of the classpath
    */
   FormatPluginOptionExtractor(ScanResult scanResult) {
     Map<String, FormatPluginOptionsDescriptor> result = new HashMap<>();
-    Set<Class<? extends FormatPluginConfig>> pluginConfigClasses = FormatPluginConfigBase.getSubTypes(scanResult);
+    Set<Class<? extends FormatPluginConfig>> pluginConfigClasses =
+        LogicalPlanPersistence.getSubTypes(scanResult, FormatPluginConfig.class);
     for (Class<? extends FormatPluginConfig> pluginConfigClass : pluginConfigClasses) {
       FormatPluginOptionsDescriptor optionsDescriptor = new FormatPluginOptionsDescriptor(pluginConfigClass);
       result.put(optionsDescriptor.typeName.toLowerCase(), optionsDescriptor);
@@ -86,10 +88,13 @@
   }
 
   /**
-   * Given a table function signature and the corresponding parameters
-   * return the corresponding formatPlugin configuration.
+   * Given a table function signature and the corresponding parameters return
+   * the corresponding formatPlugin configuration.
    *
-   * @param t the signature and parameters (it should be one of the signatures returned by {@link FormatPluginOptionExtractor#getTableSignatures(String, List)})
+   * @param t
+   *          the signature and parameters (it should be one of the signatures
+   *          returned by
+   *          {@link FormatPluginOptionExtractor#getTableSignatures(String, List)})
    * @return the config
    */
   FormatPluginConfig createConfigForTable(TableInstance t) {
@@ -117,4 +122,4 @@
     }
     return optionsDescriptor.createConfigForTable(t);
   }
-}
\ No newline at end of file
+}
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
index b68106c..f045b27 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/WorkspaceSchemaFactory.java
@@ -84,6 +84,8 @@
 import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.security.AccessControlException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.databind.ObjectMapper;
 import org.apache.drill.shaded.guava.com.google.common.base.Joiner;
@@ -93,7 +95,7 @@
 import org.apache.drill.shaded.guava.com.google.common.collect.Sets;
 
 public class WorkspaceSchemaFactory {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
+  private static final Logger logger = LoggerFactory.getLogger(WorkspaceSchemaFactory.class);
 
   private final List<FormatMatcher> fileMatchers;
   private final List<FormatMatcher> dropFileMatchers;
@@ -290,9 +292,9 @@
   public class WorkspaceSchema extends AbstractSchema implements ExpandingConcurrentMap.MapValueFactory<TableInstance, DrillTable> {
     private final ExpandingConcurrentMap<TableInstance, DrillTable> tables = new ExpandingConcurrentMap<>(this);
     private final SchemaConfig schemaConfig;
-    private DrillFileSystem fs;
+    private final DrillFileSystem fs;
     // Drill Process User file-system
-    private DrillFileSystem dpsFs;
+    private final DrillFileSystem dpsFs;
 
     public WorkspaceSchema(List<String> parentSchemaPath, String wsName, SchemaConfig schemaConfig, DrillFileSystem fs) {
       super(parentSchemaPath, wsName);
@@ -425,19 +427,20 @@
 
         for (DotDrillFile f : files) {
           switch (f.getType()) {
-          case VIEW:
-            try {
-              return new DrillViewTable(getView(f), f.getOwner(), schemaConfig.getViewExpansionContext());
-            } catch (AccessControlException e) {
-              if (!schemaConfig.getIgnoreAuthErrors()) {
-                logger.debug(e.getMessage());
-                throw UserException.permissionError(e)
-                  .message("Not authorized to read view [%s] in schema [%s]", tableName, getFullSchemaName())
-                  .build(logger);
+            case VIEW:
+              try {
+                return new DrillViewTable(getView(f), f.getOwner(), schemaConfig.getViewExpansionContext());
+              } catch (AccessControlException e) {
+                if (!schemaConfig.getIgnoreAuthErrors()) {
+                  logger.debug(e.getMessage());
+                  throw UserException.permissionError(e)
+                    .message("Not authorized to read view [%s] in schema [%s]", tableName, getFullSchemaName())
+                    .build(logger);
+                }
+              } catch (IOException e) {
+                logger.warn("Failure while trying to load {}.view.drill file in workspace [{}]", tableName, getFullSchemaName(), e);
               }
-            } catch (IOException e) {
-              logger.warn("Failure while trying to load {}.view.drill file in workspace [{}]", tableName, getFullSchemaName(), e);
-            }
+            default:
           }
         }
       } catch (UnsupportedOperationException e) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
index 652a42b..4b4e029 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/dfs/easy/EasyGroupScan.java
@@ -93,12 +93,15 @@
       ) throws IOException, ExecutionSetupException {
     super(ImpersonationUtil.resolveUserName(userName));
     this.selection = FileSelection.create(null, files, selectionRoot);
-    this.formatPlugin = Preconditions.checkNotNull((EasyFormatPlugin<?>) engineRegistry.getFormatPlugin(storageConfig, formatConfig),
+    this.formatPlugin = Preconditions.checkNotNull((EasyFormatPlugin<?>)
+        engineRegistry.getFormatPlugin(storageConfig, formatConfig),
         "Unable to load format plugin for provided format config.");
     this.columns = columns == null ? ALL_COLUMNS : columns;
     this.selectionRoot = selectionRoot;
     SimpleFileTableMetadataProviderBuilder builder =
-        (SimpleFileTableMetadataProviderBuilder) new FileSystemMetadataProviderManager().builder(MetadataProviderManager.MetadataProviderKind.SCHEMA_STATS_ONLY);
+        (SimpleFileTableMetadataProviderBuilder)
+        new FileSystemMetadataProviderManager()
+        .builder(MetadataProviderManager.MetadataProviderKind.SCHEMA_STATS_ONLY);
 
     this.metadataProvider = builder.withLocation(selection.getSelectionRoot())
         .withSchema(schema)
@@ -116,7 +119,8 @@
       ) throws IOException {
     super(userName);
     this.selection = Preconditions.checkNotNull(selection);
-    this.formatPlugin = Preconditions.checkNotNull(formatPlugin, "Unable to load format plugin for provided format config.");
+    this.formatPlugin = Preconditions.checkNotNull(formatPlugin,
+        "Unable to load format plugin for provided format config.");
     this.columns = columns == null ? ALL_COLUMNS : columns;
     this.selectionRoot = selectionRoot;
     if (metadataProviderManager == null) {
@@ -124,7 +128,8 @@
       metadataProviderManager = new FileSystemMetadataProviderManager();
     }
     SimpleFileTableMetadataProviderBuilder builder =
-        (SimpleFileTableMetadataProviderBuilder) metadataProviderManager.builder(MetadataProviderManager.MetadataProviderKind.SCHEMA_STATS_ONLY);
+        (SimpleFileTableMetadataProviderBuilder) metadataProviderManager.builder(
+            MetadataProviderManager.MetadataProviderKind.SCHEMA_STATS_ONLY);
 
     this.metadataProvider = builder.withLocation(selection.getSelectionRoot())
         .build();
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
index c090f98..522fd10 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/TextFormatPlugin.java
@@ -22,6 +22,8 @@
 import com.fasterxml.jackson.annotation.JsonInclude.Include;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
+
+import org.apache.drill.common.PlanStringBuilder;
 import org.apache.drill.common.exceptions.ChildErrorContext;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.exceptions.UserException;
@@ -65,6 +67,7 @@
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.Objects;
 
 /**
  * Text format plugin for CSV and other delimited text formats.
@@ -78,7 +81,6 @@
  * to allow tight control of the size of produced batches (as well
  * as to support provided schema.)
  */
-
 public class TextFormatPlugin extends EasyFormatPlugin<TextFormatPlugin.TextFormatConfig> {
   private final static String PLUGIN_NAME = "text";
 
@@ -108,14 +110,18 @@
   @JsonInclude(Include.NON_DEFAULT)
   public static class TextFormatConfig implements FormatPluginConfig {
 
+    // TODO: Bad things happen if field change after created.
+    // Change all these to be private final, and add constructor.
+    // See DRILL-7612
+
     public List<String> extensions = Collections.emptyList();
     public String lineDelimiter = "\n";
     public char fieldDelimiter = '\n';
     public char quote = '"';
     public char escape = '"';
     public char comment = '#';
-    public boolean skipFirstLine = false;
-    public boolean extractHeader = false;
+    public boolean skipFirstLine;
+    public boolean extractHeader;
 
     public TextFormatConfig() { }
 
@@ -138,17 +144,8 @@
 
     @Override
     public int hashCode() {
-      final int prime = 31;
-      int result = 1;
-      result = prime * result + comment;
-      result = prime * result + escape;
-      result = prime * result + ((extensions == null) ? 0 : extensions.hashCode());
-      result = prime * result + fieldDelimiter;
-      result = prime * result + ((lineDelimiter == null) ? 0 : lineDelimiter.hashCode());
-      result = prime * result + quote;
-      result = prime * result + (skipFirstLine ? 1231 : 1237);
-      result = prime * result + (extractHeader ? 1231 : 1237);
-      return result;
+      return Objects.hash(extensions, lineDelimiter, fieldDelimiter,
+          quote, escape, comment, skipFirstLine, extractHeader);
     }
 
     @Override
@@ -156,46 +153,32 @@
       if (this == obj) {
         return true;
       }
-      if (obj == null) {
-        return false;
-      }
-      if (getClass() != obj.getClass()) {
+      if (obj == null || getClass() != obj.getClass()) {
         return false;
       }
       TextFormatConfig other = (TextFormatConfig) obj;
-      if (comment != other.comment) {
-        return false;
-      }
-      if (escape != other.escape) {
-        return false;
-      }
-      if (extensions == null) {
-        if (other.extensions != null) {
-          return false;
-        }
-      } else if (!extensions.equals(other.extensions)) {
-        return false;
-      }
-      if (fieldDelimiter != other.fieldDelimiter) {
-        return false;
-      }
-      if (lineDelimiter == null) {
-        if (other.lineDelimiter != null) {
-          return false;
-        }
-      } else if (!lineDelimiter.equals(other.lineDelimiter)) {
-        return false;
-      }
-      if (quote != other.quote) {
-        return false;
-      }
-      if (skipFirstLine != other.skipFirstLine) {
-        return false;
-      }
-      if (extractHeader != other.extractHeader) {
-        return false;
-      }
-      return true;
+      return Objects.equals(extensions, other.extensions) &&
+             Objects.equals(lineDelimiter, other.lineDelimiter) &&
+             Objects.equals(fieldDelimiter, other.fieldDelimiter) &&
+             Objects.equals(quote, other.quote) &&
+             Objects.equals(escape, other.escape) &&
+             Objects.equals(comment, other.comment) &&
+             Objects.equals(skipFirstLine, other.skipFirstLine) &&
+             Objects.equals(extractHeader, other.extractHeader);
+    }
+
+    @Override
+    public String toString() {
+      return new PlanStringBuilder(this)
+        .field("extensions", extensions)
+        .field("skipFirstLine", skipFirstLine)
+        .field("extractHeader", extractHeader)
+        .escapedField("fieldDelimiter", fieldDelimiter)
+        .escapedField("lineDelimiter", lineDelimiter)
+        .escapedField("quote", quote)
+        .escapedField("escape", escape)
+        .escapedField("comment", comment)
+        .toString();
     }
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/reader/CompliantTextBatchReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/reader/CompliantTextBatchReader.java
index db2f9e8..f1512a8 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/reader/CompliantTextBatchReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/easy/text/reader/CompliantTextBatchReader.java
@@ -33,6 +33,8 @@
 import org.apache.drill.exec.record.metadata.TupleSchema;
 import org.apache.drill.exec.store.dfs.DrillFileSystem;
 import org.apache.hadoop.mapred.FileSplit;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.univocity.parsers.common.TextParsingException;
 
@@ -42,7 +44,7 @@
  * New text reader, complies with the RFC 4180 standard for text/csv files
  */
 public class CompliantTextBatchReader implements ManagedReader<ColumnsSchemaNegotiator> {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(CompliantTextBatchReader.class);
+  private static final Logger logger = LoggerFactory.getLogger(CompliantTextBatchReader.class);
 
   private static final int MAX_RECORDS_PER_BATCH = 8096;
   private static final int READ_BUFFER = 1024 * 1024;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java
index 7fbb3ba..be56e11 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/ischema/InfoSchemaStoragePlugin.java
@@ -63,7 +63,7 @@
 
   @Override
   public InfoSchemaGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns) {
-    InfoSchemaTableType table = selection.getWith(getContext().getLpPersistence(),  InfoSchemaTableType.class);
+    InfoSchemaTableType table = selection.getWith(getContext().getLpPersistence().getMapper(),  InfoSchemaTableType.class);
     return new InfoSchemaGroupScan(table);
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockBreakageStorage.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockBreakageStorage.java
index f2c2d9f..d31b061 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockBreakageStorage.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockBreakageStorage.java
@@ -25,9 +25,21 @@
 
 public class MockBreakageStorage extends MockStorageEngine {
 
+  /**
+   * Each storage plugin requires a unique config class to allow
+   * config --> impl lookups to be unique.
+   */
+  public static class MockBreakageStorageEngineConfig extends MockStorageEngineConfig {
+    public static final MockBreakageStorageEngineConfig INSTANCE = new MockBreakageStorageEngineConfig("mock:///");
+
+    public MockBreakageStorageEngineConfig(String url) {
+      super(url);
+    }
+  }
+
   private boolean breakRegister;
 
-  public MockBreakageStorage(MockStorageEngineConfig configuration, DrillbitContext context, String name) {
+  public MockBreakageStorage(MockBreakageStorageEngineConfig configuration, DrillbitContext context, String name) {
     super(configuration, context, name);
     breakRegister = false;
   }
@@ -43,5 +55,4 @@
     }
     super.registerSchemas(schemaConfig, parent);
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
index 6d18c7e..fb00f83 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngine.java
@@ -50,7 +50,6 @@
 import org.apache.drill.shaded.guava.com.google.common.io.Resources;
 
 public class MockStorageEngine extends AbstractStoragePlugin {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(MockStorageEngine.class);
 
   private final MockStorageEngineConfig configuration;
   private final MockSchema schema;
@@ -116,7 +115,7 @@
 
   private static class MockSchema extends AbstractSchema {
 
-    private MockStorageEngine engine;
+    private final MockStorageEngine engine;
     private final Map<String, Table> tableCache = new WeakHashMap<>();
 
     public MockSchema(MockStorageEngine engine) {
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java
index f20ff45..b58d9ab 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/mock/MockStorageEngineConfig.java
@@ -25,12 +25,11 @@
 
 @JsonTypeName(MockStorageEngineConfig.NAME)
 public class MockStorageEngineConfig extends StoragePluginConfigBase {
-
-  private String url;
-
   public static final String NAME = "mock";
   public static final MockStorageEngineConfig INSTANCE = new MockStorageEngineConfig("mock:///");
 
+  private final String url;
+
   @JsonCreator
   public MockStorageEngineConfig(@JsonProperty("url") String url) {
     this.url = url;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataVersion.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataVersion.java
index 1ffe59d..041130e 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataVersion.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/parquet/metadata/MetadataVersion.java
@@ -42,7 +42,8 @@
   public MetadataVersion(String metadataVersion) {
     Matcher matcher = PATTERN.matcher(metadataVersion);
     if (!matcher.matches()) {
-      DrillRuntimeException.format("Could not parse metadata version '%s' using format '%s'", metadataVersion, FORMAT);
+      throw DrillRuntimeException.create(
+          "Could not parse metadata version '%s' using format '%s'", metadataVersion, FORMAT);
     }
     this.major = Integer.parseInt(matcher.group(1));
     this.minor = matcher.group(3) != null ? Integer.parseInt(matcher.group(3)) : 0;
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/DynamicPojoRecordReader.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/DynamicPojoRecordReader.java
index a9ee538..fc0b4cb 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/DynamicPojoRecordReader.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/pojo/DynamicPojoRecordReader.java
@@ -98,8 +98,7 @@
 
     private final ObjectMapper mapper;
 
-    public Converter(ObjectMapper mapper)
-    {
+    public Converter(ObjectMapper mapper) {
       this.mapper = mapper;
     }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/BasePersistentStore.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/BasePersistentStore.java
index 9a8e477..0fce59c 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/BasePersistentStore.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/BasePersistentStore.java
@@ -26,5 +26,4 @@
   public Iterator<Map.Entry<String, V>> getAll() {
     return getRange(0, Integer.MAX_VALUE);
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/CaseInsensitivePersistentStore.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/CaseInsensitivePersistentStore.java
index 38bd529..053ad33 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/CaseInsensitivePersistentStore.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/CaseInsensitivePersistentStore.java
@@ -21,8 +21,9 @@
 import java.util.Map;
 
 /**
- * Wrapper around {@link PersistentStore} to ensure all passed keys are converted to lower case and stored this way.
- * This will ensure case-insensitivity during insert, update, deletion or search.
+ * Wrapper around {@link PersistentStore} to ensure all passed keys are
+ * converted to lower case and stored this way. This will ensure
+ * case-insensitivity during insert, update, deletion or search.
  */
 public class CaseInsensitivePersistentStore<V> implements PersistentStore<V> {
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/PersistentStore.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/PersistentStore.java
index 02959aa..48b3f41 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/PersistentStore.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/PersistentStore.java
@@ -21,7 +21,7 @@
 import java.util.Map;
 
 /**
- * An abstraction used to store and retrieve instances of given value type.
+ * Stores and retrieve instances of given value type.
  *
  * @param <V>  value type
  */
@@ -53,5 +53,4 @@
    * Returns an iterator of entries.
    */
   Iterator<Map.Entry<String, V>> getAll();
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java
index d282017..7e6223b 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePlugin.java
@@ -72,7 +72,7 @@
 
   @Override
   public AbstractGroupScan getPhysicalScan(String userName, JSONOptions selection, List<SchemaPath> columns) {
-    SystemTable table = selection.getWith(getContext().getLpPersistence(), SystemTable.class);
+    SystemTable table = selection.getWith(getContext().getLpPersistence().getMapper(), SystemTable.class);
     return new SystemTableScan(table, this);
   }
 
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePluginConfig.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePluginConfig.java
index 914fcf0..a7826c0 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePluginConfig.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/SystemTablePluginConfig.java
@@ -28,8 +28,7 @@
 
   public static final SystemTablePluginConfig INSTANCE = new SystemTablePluginConfig();
 
-  private SystemTablePluginConfig() {
-  }
+  private SystemTablePluginConfig() { }
 
   @Override
   public boolean equals(Object o) {
@@ -40,5 +39,4 @@
   public int hashCode() {
     return 1;
   }
-
 }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/LocalPersistentStore.java b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/LocalPersistentStore.java
index e8a4e22..ae6fdbd 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/LocalPersistentStore.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/store/sys/store/LocalPersistentStore.java
@@ -73,7 +73,7 @@
     try {
       fs.mkdirs(basePath);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Failure setting local persistent store path [%s]: %s",
+      throw DrillRuntimeException.create(e, "Failure setting local persistent store path [%s]: %s",
         basePath, e.getMessage());
     }
   }
@@ -113,7 +113,7 @@
     try {
       fileStatuses = DrillFileSystemUtil.listFiles(fs, basePath, false, SYS_FILE_SUFFIX_FILTER);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Unable to retrieve store data: %s", e.getMessage());
+      throw DrillRuntimeException.create(e, "Unable to retrieve store data: %s", e.getMessage());
     }
 
     if (fileStatuses.isEmpty()) {
@@ -151,7 +151,7 @@
       byte[] bytes = IOUtils.toByteArray(is);
       return deserialize(path, bytes);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Unable to retrieve store data for the path [%s]: %s",
+      throw DrillRuntimeException.create(e, "Unable to retrieve store data for the path [%s]: %s",
         path, e.getMessage());
     }
   }
@@ -179,7 +179,7 @@
     try {
       fs.delete(path, false);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Unable to delete store data for the path [%s]: %s",
+      throw DrillRuntimeException.create(e, "Unable to delete store data for the path [%s]: %s",
         path, e.getMessage());
     }
   }
@@ -226,7 +226,7 @@
 
   private Path handleInvalidKey(String key, Throwable throwable, boolean failOnInvalidKey) {
     if (failOnInvalidKey) {
-      throw DrillRuntimeException.format(throwable, "Illegal storage key name: %s", key);
+      throw DrillRuntimeException.create(throwable, "Illegal storage key name: %s", key);
     } else {
       logger.debug("Illegal storage key name: {}", key, throwable);
       return null;
@@ -237,7 +237,7 @@
     try {
       return path != null && fs.exists(path);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Unable to check store file [%s] existence: %s",
+      throw DrillRuntimeException.create(e, "Unable to check store file [%s] existence: %s",
         path, e.getMessage());
     }
   }
@@ -246,7 +246,7 @@
     try {
       return config.getSerializer().serialize(value);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Unable serialize value for the store key [%s]: %s",
+      throw DrillRuntimeException.create(e, "Unable serialize value for the store key [%s]: %s",
         path, e.getMessage());
     }
   }
@@ -255,7 +255,7 @@
     try {
       return config.getSerializer().deserialize(bytes);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Unable deserialize value for the path [%s]: %s",
+      throw DrillRuntimeException.create(e, "Unable deserialize value for the path [%s]: %s",
         path, e.getMessage());
     }
   }
@@ -264,7 +264,7 @@
     try (OutputStream os = fs.create(path)) {
       IOUtils.write(serialize(path, value), os);
     } catch (IOException e) {
-      throw DrillRuntimeException.format(e, "Unable to store data for the path [%s]: %s",
+      throw DrillRuntimeException.create(e, "Unable to store data for the path [%s]: %s",
         path, e.getMessage());
     }
   }
diff --git a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
index 71c7806..f8bd2c4 100644
--- a/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
+++ b/exec/java-exec/src/main/java/org/apache/drill/exec/util/StoragePluginTestUtils.java
@@ -28,16 +28,15 @@
 import org.apache.drill.common.logical.FormatPluginConfig;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.dfs.FileSystemConfig;
-import org.apache.drill.exec.store.dfs.FileSystemPlugin;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
 
 import org.apache.drill.exec.store.easy.sequencefile.SequenceFileFormatConfig;
 import org.apache.drill.exec.store.easy.text.TextFormatPlugin;
 
 /**
- * This class contains utility methods to speed up tests. Some of the production code currently calls this method
- * when the production code is executed as part of the test runs. That's the reason why this code has to be in
- * production module.
+ * Utility methods to speed up tests. Some of the production code currently
+ * calls this method when the production code is executed as part of the test
+ * runs. That's the reason why this code has to be in production module.
  */
 public class StoragePluginTestUtils {
   public static final String CP_PLUGIN_NAME = "cp";
@@ -66,8 +65,7 @@
                                           final StoragePluginRegistry pluginRegistry,
                                           final File tmpDirPath,
                                           String... schemas) throws ExecutionSetupException {
-    final FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin(pluginName);
-    final FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig();
+    final FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getConfig(pluginName);
 
     Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>();
     Optional.ofNullable(pluginConfig.getWorkspaces())
@@ -90,7 +88,7 @@
         newWorkspaces,
         pluginConfig.getFormats());
     newPluginConfig.setEnabled(pluginConfig.isEnabled());
-    pluginRegistry.createOrUpdate(pluginName, newPluginConfig, true);
+    pluginRegistry.put(pluginName, newPluginConfig);
   }
 
   public static void configureFormatPlugins(StoragePluginRegistry pluginRegistry) throws ExecutionSetupException {
@@ -99,8 +97,7 @@
   }
 
   public static void configureFormatPlugins(StoragePluginRegistry pluginRegistry, String storagePlugin) throws ExecutionSetupException {
-    FileSystemPlugin fileSystemPlugin = (FileSystemPlugin) pluginRegistry.getPlugin(storagePlugin);
-    FileSystemConfig fileSystemConfig = (FileSystemConfig) fileSystemPlugin.getConfig();
+    FileSystemConfig fileSystemConfig = (FileSystemConfig) pluginRegistry.getConfig(storagePlugin);
 
     Map<String, FormatPluginConfig> newFormats = new HashMap<>();
     Optional.ofNullable(fileSystemConfig.getFormats())
@@ -139,6 +136,6 @@
         newFormats);
     newFileSystemConfig.setEnabled(fileSystemConfig.isEnabled());
 
-    pluginRegistry.createOrUpdate(storagePlugin, newFileSystemConfig, true);
+    pluginRegistry.put(storagePlugin, newFileSystemConfig);
   }
 }
diff --git a/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json b/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
index 8c0428d..a8df53e 100644
--- a/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
+++ b/exec/java-exec/src/main/resources/bootstrap-storage-plugins.json
@@ -110,7 +110,13 @@
           "type" : "json",
           "extensions" : [ "json" ]
         },
-        "avro" : {
+        "pcap" : {
+          "type" : "pcap"
+        },
+        "pcapng" : {
+          "type" : "pcapng"
+        },
+       "avro" : {
           "type" : "avro"
         },
         "sequencefile" : {
@@ -144,6 +150,12 @@
           "type" : "json",
           "extensions" : [ "json" ]
         },
+        "pcap" : {
+          "type" : "pcap"
+        },
+        "pcapng" : {
+          "type" : "pcapng"
+        },
         "parquet" : {
           "type" : "parquet"
         },
diff --git a/exec/java-exec/src/main/resources/drill-module.conf b/exec/java-exec/src/main/resources/drill-module.conf
index 471f0f1..a0846af 100644
--- a/exec/java-exec/src/main/resources/drill-module.conf
+++ b/exec/java-exec/src/main/resources/drill-module.conf
@@ -113,6 +113,19 @@
         batch.size: 4000
       }
     },
+    # The name of the file to scan for "classic" storage plugins
+    # Configured here for ease of testing. Users should NEVER change
+    # this setting.
+    bootstrap: {
+      storage: "bootstrap-storage-plugins.json",
+      # Not actually supported
+      format: "bootstrap-format-plugins.json"
+    },
+    # The name of the upgrade (override) file for changes to an
+    # existing system
+    upgrade: {
+      storage: "storage-plugins-override.conf"
+    },
     # The action on the storage-plugins-override.conf after it's use.
     # Possible values are "none" (default), "rename", "remove"
     action_on_plugins_override_file: "none"
@@ -393,7 +406,7 @@
 
       // Path to local udf directory, always created on local file system.
       // Root for these directory is generated at runtime unless Drill temporary directory is set.
-      local: ${drill.exec.udf.directory.base}"/udf/local",
+      local: ${drill.exec.udf.directory.base}"/local",
 
       // Set this property if custom file system should be used to create remote directories, ex: fs: "file:///".
       // fs: "",
@@ -461,7 +474,6 @@
 # Users should use ALTER SYSTEM and ALTER SESSION to set the options.
 
 drill.exec.options: {
-    bootstrap-storage-plugins.json: .sys.drill,
     debug.validate_iterators: false,
     debug.validate_vectors: false,
     drill.exec.functions.cast_empty_string_to_null: false,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
index e309fc0..d211485 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/BaseTestImpersonation.java
@@ -132,7 +132,7 @@
 
     FileSystemConfig miniDfsPluginConfig = new FileSystemConfig(connection, null, workspaces, lfsPluginConfig.getFormats());
     miniDfsPluginConfig.setEnabled(true);
-    pluginRegistry.createOrUpdate(MINI_DFS_STORAGE_PLUGIN_NAME, miniDfsPluginConfig, true);
+    pluginRegistry.put(MINI_DFS_STORAGE_PLUGIN_NAME, miniDfsPluginConfig);
   }
 
   protected static void createAndAddWorkspace(String name, String path, short permissions, String owner,
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java
index 7ab082e..10f5d49 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationDisabledWithMiniDFS.java
@@ -96,7 +96,7 @@
 
   @AfterClass
   public static void removeMiniDfsBasedStorage() throws Exception {
-    getDrillbitContext().getStorage().deletePlugin(MINI_DFS_STORAGE_PLUGIN_NAME);
+    getDrillbitContext().getStorage().remove(MINI_DFS_STORAGE_PLUGIN_NAME);
     stopMiniDfsCluster();
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
index 0e9c0e0..ce879cb 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationMetadata.java
@@ -407,7 +407,7 @@
 
   @AfterClass
   public static void removeMiniDfsBasedStorage() {
-    getDrillbitContext().getStorage().deletePlugin(MINI_DFS_STORAGE_PLUGIN_NAME);
+    getDrillbitContext().getStorage().remove(MINI_DFS_STORAGE_PLUGIN_NAME);
     stopMiniDfsCluster();
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java
index ce03232..b6af23f 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/impersonation/TestImpersonationQueries.java
@@ -305,7 +305,7 @@
 
   @AfterClass
   public static void removeMiniDfsBasedStorage() {
-    getDrillbitContext().getStorage().deletePlugin(MINI_DFS_STORAGE_PLUGIN_NAME);
+    getDrillbitContext().getStorage().remove(MINI_DFS_STORAGE_PLUGIN_NAME);
     stopMiniDfsCluster();
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
index 869ba3d..f160569 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/sql/TestCTTAS.java
@@ -67,7 +67,7 @@
         newWorkspaces,
         pluginConfig.getFormats());
     newPluginConfig.setEnabled(pluginConfig.isEnabled());
-    pluginRegistry.createOrUpdate(DFS_PLUGIN_NAME, newPluginConfig, true);
+    pluginRegistry.put(DFS_PLUGIN_NAME, newPluginConfig);
   }
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/BasePluginRegistryTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/BasePluginRegistryTest.java
new file mode 100644
index 0000000..d41bdf7
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/BasePluginRegistryTest.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+
+import java.io.IOException;
+import java.lang.reflect.Constructor;
+import java.util.Collection;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+
+import org.apache.calcite.schema.SchemaPlus;
+import org.apache.drill.common.config.DrillConfig;
+import org.apache.drill.common.config.LogicalPlanPersistence;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.scanner.ClassPathScanner;
+import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.server.DrillbitContext;
+import org.apache.drill.test.BaseDirTestWatcher;
+import org.apache.drill.test.BaseTest;
+import org.apache.drill.test.OperatorFixture;
+import org.junit.ClassRule;
+
+import com.fasterxml.jackson.annotation.JsonCreator;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.databind.ObjectMapper;
+
+public class BasePluginRegistryTest extends BaseTest {
+
+  @ClassRule
+  public static final BaseDirTestWatcher dirTestWatcher = new BaseDirTestWatcher();
+
+  protected static final String RESOURCE_BASE = "plugins/";
+
+  protected class PluginRegistryContextFixture implements PluginRegistryContext {
+
+    private final DrillConfig drillConfig;
+    private final ScanResult classpathScan;
+    private final ObjectMapper mapper;
+
+    public PluginRegistryContextFixture(OperatorFixture opFixture) {
+      drillConfig = opFixture.config();
+      classpathScan = ClassPathScanner.fromPrescan(drillConfig);
+      LogicalPlanPersistence lpPersistence = new LogicalPlanPersistence(drillConfig, classpathScan);
+
+      mapper = lpPersistence.getMapper();
+    }
+    @Override
+    public DrillConfig config() { return drillConfig; }
+
+    @Override
+    public ObjectMapper mapper() { return mapper; }
+
+    @Override
+    public ScanResult classpathScan() { return classpathScan; }
+
+    // Not ideal, but we don't want to start the entire Drillbit
+    // for these tests.
+    @Override
+    public DrillbitContext drillbitContext() { return null; }
+  }
+
+  public static class StoragePluginFixtureConfig extends StoragePluginConfig {
+
+    private final String mode;
+
+    @JsonCreator
+    public StoragePluginFixtureConfig(@JsonProperty("mode") String mode) {
+      this.mode = mode;
+    }
+
+    @JsonProperty("mode")
+    public String mode() { return mode; }
+
+    @Override
+    public boolean equals(Object o) {
+      if (o == this) {
+        return true;
+      }
+      if (o == null || o.getClass() != getClass()) {
+        return false;
+      }
+      StoragePluginFixtureConfig other = (StoragePluginFixtureConfig) o;
+      return Objects.equals(mode, other.mode);
+    }
+
+    @Override
+    public int hashCode() {
+      return Objects.hash(mode);
+    }
+
+  }
+  @PrivatePlugin
+  public static class StoragePluginFixture extends AbstractStoragePlugin {
+
+    private final StoragePluginFixtureConfig config;
+    private int closeCount;
+
+    public StoragePluginFixture(StoragePluginFixtureConfig config, DrillbitContext inContext, String inName) {
+      super(inContext, inName);
+      this.config = config;
+      if (config.mode().equals("crash-ctor")) {
+        throw new IllegalStateException();
+      }
+    }
+
+    @Override
+    public void start() {
+      if (config.mode().equals("crash-start")) {
+        throw new IllegalStateException();
+      }
+    }
+
+    @Override
+    public StoragePluginConfig getConfig() {
+      return config;
+    }
+
+    @Override
+    public void registerSchemas(SchemaConfig schemaConfig, SchemaPlus parent)
+        throws IOException {
+      assert false;
+    }
+
+    @Override
+    public void close() {
+      closeCount++;
+      if (config.mode().equals("crash-close")) {
+        throw new IllegalStateException();
+      }
+    }
+
+    public int closeCount() { return closeCount; }
+  }
+
+  protected static class LocatorFixture implements ConnectorLocator {
+
+    private final Constructor<? extends StoragePlugin> ctor;
+
+    public LocatorFixture() {
+      Map<Class<? extends StoragePluginConfig>, Constructor<? extends StoragePlugin>> ctors =
+          ClassicConnectorLocator.constuctorsFor(StoragePluginFixture.class);
+      assertEquals(1, ctors.size());
+      assertTrue(ctors.containsKey(StoragePluginFixtureConfig.class));
+      ctor = ctors.get(StoragePluginFixtureConfig.class);
+    }
+
+    @Override
+    public void init() { }
+
+    @Override
+    public StoragePlugins bootstrapPlugins() throws IOException {
+      return null;
+    }
+
+    @Override
+    public StoragePlugins updatedPlugins() { return null; }
+
+    @Override
+    public void onUpgrade() { }
+
+    @Override
+    public Collection<StoragePlugin> intrinsicPlugins() {
+      return null;
+    }
+
+    @Override
+    public StoragePlugin get(String name) { return null; }
+
+    @Override
+    public Set<Class<? extends StoragePluginConfig>> configClasses() {
+      return null;
+    }
+
+    @Override
+    public StoragePlugin create(String name, StoragePluginConfig config)
+        throws Exception {
+      return ctor.newInstance(config, null, name);
+    }
+
+    @Override
+    public boolean storable() { return false; }
+
+    @Override
+    public Class<? extends StoragePlugin> connectorClassFor(
+        Class<? extends StoragePluginConfig> configClass) {
+      return ctor.getDeclaringClass();
+    }
+
+    @Override
+    public void close() { }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/FormatPluginSerDeTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/FormatPluginSerDeTest.java
index 4aca1a8..ebcb300 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/FormatPluginSerDeTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/FormatPluginSerDeTest.java
@@ -135,6 +135,4 @@
       PlanTestBase.testPhysicalPlanExecutionBasedOnQuery(query);
     }
   }
-
 }
-
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestBootstrapLoader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestBootstrapLoader.java
new file mode 100644
index 0000000..3344ab3
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestBootstrapLoader.java
@@ -0,0 +1,227 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.IOException;
+import java.net.URL;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.store.dfs.FileSystemConfig;
+import org.apache.drill.test.OperatorFixture;
+import org.junit.Test;
+
+/**
+ * Tests {@code PluginBootstrapLoader} and its implementation
+ * {@code PluginBootstrapLoaderImpl}.
+ */
+public class TestBootstrapLoader extends BasePluginRegistryTest {
+
+  @Test
+  public void testBootstrapLoader() throws Exception {
+    try (OperatorFixture fixture = OperatorFixture.standardFixture(dirTestWatcher)) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoaderImpl loader = new PluginBootstrapLoaderImpl(context);
+      Map<String, URL> pluginURLMap = new HashMap<>();
+      StoragePlugins plugins = loader.loadBootstrapPlugins(pluginURLMap);
+
+      // Sanity test. Change this if the bootstrap file changes.
+      // No need to test contents; here we assume serialization works.
+      // See FormatPluginSerDeTest
+      assertNotNull(plugins.getConfig("dfs"));
+      assertNotNull(plugins.getConfig("s3"));
+      assertNotNull(plugins.getConfig("cp"));
+
+      // Cannot test contrib plugins here: they are not yet
+      // available when this test is run. We'll trust the
+      // classpath scanner.
+    }
+  }
+
+  @Test
+  public void testMissingBootstrapFile() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    // Note: file does not actually exist, which is intentional.
+    String bootstrapFile = RESOURCE_BASE + "missing-bootstrap.json";
+    builder.configBuilder().put(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, bootstrapFile);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoaderImpl loader = new PluginBootstrapLoaderImpl(context);
+      try {
+        loader.loadBootstrapPlugins(new HashMap<>());
+        fail();
+      } catch (IOException e) {
+        assertTrue(e.getMessage().contains("Cannot find"));
+        assertTrue(e.getMessage().contains(bootstrapFile));
+      }
+    }
+  }
+
+  /**
+   * Few things are as frustrating as tracking down plugin errors. Here we ensure
+   * that the bootstrap loader explains where to look by naming the failed file
+   * if bootstrap fails.
+   */
+  @Test
+  public void testFailedBootstrapLoad() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    String bootstrapFile = RESOURCE_BASE + "bogus-bootstrap.json";
+    builder.configBuilder().put(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, bootstrapFile);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoaderImpl loader = new PluginBootstrapLoaderImpl(context);
+      try {
+        loader.loadBootstrapPlugins(new HashMap<>());
+        fail();
+      } catch (IOException e) {
+        // Resource URL
+        assertTrue(e.getMessage().contains(bootstrapFile));
+        // Jackson-provided bad key
+        assertTrue(e.getCause().getMessage().contains("imABadBoy"));
+      }
+    }
+  }
+
+  @Test
+  public void testDuplicateBootstrapEntries() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    String bootstrapFile = RESOURCE_BASE + "dup-bootstrap.json";
+    builder.configBuilder().put(ExecConstants.BOOTSTRAP_STORAGE_PLUGINS_FILE, bootstrapFile);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoaderImpl loader = new PluginBootstrapLoaderImpl(context);
+      StoragePlugins plugins = loader.loadBootstrapPlugins(new HashMap<>());
+
+      // Duplicates noted in log; last one wins.
+
+      StoragePluginConfig pluginConfig = plugins.getConfig("cp");
+      assertNotNull(pluginConfig);
+      assertTrue(pluginConfig instanceof FileSystemConfig);
+      FileSystemConfig cpConfig = (FileSystemConfig) pluginConfig;
+      assertNotNull(cpConfig.getFormats().get("tsv"));
+    }
+  }
+
+  @Test
+  public void testMissingBootstrapUpgrades() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    // Note: file does not actually exist, which is intentional.
+    String bootstrapFile = RESOURCE_BASE + "missing-plugin-upgrade.json";
+    builder.configBuilder().put(ExecConstants.UPGRADE_STORAGE_PLUGINS_FILE, bootstrapFile);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoader loader = new PluginBootstrapLoaderImpl(context);
+      StoragePlugins plugins = loader.updatedPlugins();
+      assertNull(plugins);
+    }
+  }
+
+  @Test
+  public void testBootstrapUpgrades() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    String bootstrapFile = RESOURCE_BASE + "mock-plugin-upgrade.json";
+    builder.configBuilder().put(ExecConstants.UPGRADE_STORAGE_PLUGINS_FILE, bootstrapFile);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoader loader = new PluginBootstrapLoaderImpl(context);
+      StoragePlugins plugins = loader.updatedPlugins();
+      assertNotNull(plugins);
+
+      // dfs: removed psv
+      StoragePluginConfig pluginConfig = plugins.getConfig("dfs");
+      assertNotNull(pluginConfig);
+      FileSystemConfig dfs = (FileSystemConfig) pluginConfig;
+      assertNull(dfs.getFormats().get("psv"));
+      assertNotNull(dfs.getFormats().get("csv"));
+
+      // local added
+      assertNotNull(plugins.getConfig("local"));
+
+      // S3, bsv added
+      pluginConfig = plugins.getConfig("s3");
+      assertNotNull(pluginConfig);
+      FileSystemConfig s3 = (FileSystemConfig) pluginConfig;
+      assertNotNull(s3.getFormats().get("bsv"));
+
+      // cp, left unchanged (not in upgrade file)
+      assertNull(plugins.getConfig("cp"));
+    }
+  }
+
+  @Test
+  public void testBootstrapLoaderWithUpgrades() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    String bootstrapFile = RESOURCE_BASE + "mock-plugin-upgrade.json";
+    builder.configBuilder().put(ExecConstants.UPGRADE_STORAGE_PLUGINS_FILE, bootstrapFile);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoader loader = new PluginBootstrapLoaderImpl(context);
+      StoragePlugins plugins = loader.bootstrapPlugins();
+
+      // dfs: removed psv
+      StoragePluginConfig pluginConfig = plugins.getConfig("dfs");
+      assertNotNull(pluginConfig);
+      FileSystemConfig dfs = (FileSystemConfig) pluginConfig;
+      assertNull(dfs.getFormats().get("psv"));
+      assertNotNull(dfs.getFormats().get("csv"));
+
+      // local added
+      assertNotNull(plugins.getConfig("local"));
+
+      // S3, bsv added
+      pluginConfig = plugins.getConfig("s3");
+      assertNotNull(pluginConfig);
+      FileSystemConfig s3 = (FileSystemConfig) pluginConfig;
+      assertNotNull(s3.getFormats().get("bsv"));
+
+      // cp, left unchanged (not in upgrade file)
+      assertNotNull(plugins.getConfig("cp"));
+    }
+  }
+
+  /**
+   * Test a format bootstrap with a mock file. Can't use a real
+   * file because those appear in modules not yet available when
+   * this test runs.
+   */
+  @Test
+  public void testBootstrapLoaderWithFormats() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    String bootstrapFile = RESOURCE_BASE + "mock-format-bootstrap.json";
+    builder.configBuilder().put(ExecConstants.BOOTSTRAP_FORMAT_PLUGINS_FILE, bootstrapFile);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      PluginBootstrapLoader loader = new PluginBootstrapLoaderImpl(context);
+      StoragePlugins plugins = loader.bootstrapPlugins();
+
+      // bsv added to dfs
+      StoragePluginConfig pluginConfig = plugins.getConfig("dfs");
+      assertNotNull(pluginConfig);
+      FileSystemConfig dfs = (FileSystemConfig) pluginConfig;
+      assertNotNull(dfs.getFormats().get("bsv"));
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestClassicLocator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestClassicLocator.java
new file mode 100644
index 0000000..ea095ab
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestClassicLocator.java
@@ -0,0 +1,136 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Collections;
+import java.util.Set;
+
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.logical.StoragePluginConfigBase;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.planner.logical.StoragePlugins;
+import org.apache.drill.exec.store.dfs.FileSystemConfig;
+import org.apache.drill.exec.store.dfs.FileSystemPlugin;
+import org.apache.drill.exec.store.sys.SystemTablePluginConfig;
+import org.apache.drill.exec.util.StoragePluginTestUtils;
+import org.apache.drill.test.OperatorFixture;
+import org.junit.Test;
+
+public class TestClassicLocator extends BasePluginRegistryTest {
+
+  @Test
+  public void testClassList() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+
+    // This test uses a cached class path. If you change the plugins
+    // on the class path, and run this test without a full build, it may
+    // fail. Uncomment the following line to (slowly) rebuild the class
+    // path scan on each run.
+    // builder.configBuilder().put(ClassPathScanner.IMPLEMENTATIONS_SCAN_CACHE, false);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      ConnectorLocator locator = new ClassicConnectorLocator(context);
+      locator.init();
+      Set<Class<? extends StoragePluginConfig>> result = locator.configClasses();
+
+      // Sanity check of known good class
+
+      assertTrue(result.contains(FileSystemConfig.class));
+
+      // System plugins do not appear
+      assertFalse(result.contains(SystemTablePluginConfig.class));
+
+      // Abstract classes do not appear
+      assertFalse(result.contains(StoragePluginConfig.class));
+      assertFalse(result.contains(StoragePluginConfigBase.class));
+
+      // The private plugin class does not appear
+      assertFalse(result.contains(StoragePluginFixtureConfig.class));
+
+      // No intrinsic plugins
+      assertNull(locator.get("dfs"));
+      assertNull(locator.intrinsicPlugins());
+
+      // Storable
+      assertTrue(locator.storable());
+
+      // Lookup by config
+      assertSame(FileSystemPlugin.class, locator.connectorClassFor(FileSystemConfig.class));
+      assertNull(locator.connectorClassFor(StoragePluginFixtureConfig.class));
+
+      // No-op
+      locator.close();
+    }
+  }
+
+  @Test
+  public void testPrivateConnector() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+
+    // See note above.
+    // builder.configBuilder().put(ClassPathScanner.IMPLEMENTATIONS_SCAN_CACHE, false);
+    builder.configBuilder().put(ExecConstants.PRIVATE_CONNECTORS,
+        Collections.singletonList(StoragePluginFixture.class.getName()));
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      ConnectorLocator locator = new ClassicConnectorLocator(context);
+      locator.init();
+      Set<Class<? extends StoragePluginConfig>> result = locator.configClasses();
+
+      // Now the private connector does appear.
+      assertTrue(result.contains(StoragePluginFixtureConfig.class));
+
+      // Create a plugin instance given a config
+
+      StoragePluginFixtureConfig config = new StoragePluginFixtureConfig("some-mode");
+      StoragePlugin plugin = locator.create("myplugin", config);
+      assertNotNull(plugin);
+      assertTrue(plugin instanceof StoragePluginFixture);
+
+      // No-op
+      locator.close();
+    }
+  }
+
+  @Test
+  public void testBootstrap() throws Exception {
+    try (OperatorFixture fixture = OperatorFixture.standardFixture(dirTestWatcher)) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      ConnectorLocator locator = new ClassicConnectorLocator(context);
+      locator.init();
+      StoragePlugins plugins = locator.bootstrapPlugins();
+
+      // Sanity test. Change this if the bootstrap file changes.
+      // No need to test contents; here we assume serialization works.
+      // See FormatPluginSerDeTest
+
+      assertNotNull(plugins.getConfig("dfs"));
+      assertNotNull(plugins.getConfig("s3"));
+      assertNotNull(plugins.getConfig(StoragePluginTestUtils.CP_PLUGIN_NAME));
+
+      // No-op
+      locator.close();
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
index 50f9786..923f12d 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestImplicitFileColumns.java
@@ -46,9 +46,11 @@
   public static final Path PARQUET_CHANGE_TBL = Paths.get("multilevel", "parquetWithSchemaChange");
   public static final Path CSV_TBL = Paths.get("multilevel", "csv");  // 1990/Q1/orders_1990_q1.csv, ..
 
+  @SuppressWarnings("serial")
   private static final JsonStringArrayList<Text> mainColumnValues = new JsonStringArrayList<Text>() {{
     add(new Text(MAIN));
   }};
+  @SuppressWarnings("serial")
   private static final JsonStringArrayList<Text> nestedColumnValues = new JsonStringArrayList<Text>() {{
     add(new Text(NESTED));
   }};
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java
index 2454b21..cc08e86 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestOutputMutator.java
@@ -17,14 +17,11 @@
  */
 package org.apache.drill.exec.store;
 
-import org.apache.drill.shaded.guava.com.google.common.collect.Lists;
 import io.netty.buffer.DrillBuf;
 
 import java.util.Iterator;
-import java.util.List;
 import java.util.Map;
 
-import org.apache.drill.common.expression.SchemaPath;
 import org.apache.drill.exec.exception.SchemaChangeException;
 import org.apache.drill.exec.expr.TypeHelper;
 import org.apache.drill.exec.memory.BufferAllocator;
@@ -36,9 +33,11 @@
 import org.apache.drill.exec.vector.ValueVector;
 
 import org.apache.drill.shaded.guava.com.google.common.collect.Maps;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 public class TestOutputMutator implements OutputMutator, Iterable<VectorWrapper<?>> {
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(TestOutputMutator.class);
+  static final Logger logger = LoggerFactory.getLogger(TestOutputMutator.class);
 
   private final VectorContainer container = new VectorContainer();
   private final Map<MaterializedField, ValueVector> fieldVectorMap = Maps.newHashMap();
@@ -62,24 +61,12 @@
     fieldVectorMap.put(vector.getField(), vector);
   }
 
-  private void replace(ValueVector newVector, SchemaPath schemaPath) {
-    List<ValueVector> vectors = Lists.newArrayList();
-    for (VectorWrapper w : container) {
-      ValueVector vector = w.getValueVector();
-      if (vector.getField().getName().equals(schemaPath.getRootSegmentPath())) {
-        vectors.add(newVector);
-      } else {
-        vectors.add(w.getValueVector());
-      }
-      container.remove(vector);
-    }
-    container.addCollection(vectors);
-  }
-
+  @Override
   public Iterator<VectorWrapper<?>> iterator() {
     return container.iterator();
   }
 
+  @Override
   public void clear() {
 
   }
@@ -94,6 +81,7 @@
     return;
   }
 
+  @SuppressWarnings("unchecked")
   @Override
   public <T extends ValueVector> T addField(MaterializedField field, Class<T> clazz) throws SchemaChangeException {
     ValueVector v = TypeHelper.getNewVector(field, allocator);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestPluginRegistry.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestPluginRegistry.java
new file mode 100644
index 0000000..27f8ef9
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestPluginRegistry.java
@@ -0,0 +1,278 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.commons.io.FileUtils;
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.ExecConstants;
+import org.apache.drill.exec.store.dfs.FileSystemConfig;
+import org.apache.drill.exec.store.dfs.FileSystemPlugin;
+import org.apache.drill.exec.util.StoragePluginTestUtils;
+import org.apache.drill.test.ClusterFixture;
+import org.apache.drill.test.ClusterFixtureBuilder;
+import org.junit.After;
+import org.junit.Test;
+
+/**
+ * Tests the storage plugin registry. Plugins are (at present)
+ * tightly coupled to the Drillbit context so we need to start
+ * a Drillbit per tests to ensure each test works from a clean,
+ * known registry.
+ * <p>
+ * This is several big tests because of the setup cost of
+ * starting the Drillbits in the needed config.
+ */
+public class TestPluginRegistry extends BasePluginRegistryTest {
+
+  @After
+  public void cleanup() throws Exception {
+    FileUtils.cleanDirectory(dirTestWatcher.getStoreDir());
+  }
+
+  @Test
+  public void testBasicLifecycle() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher);
+    try (ClusterFixture cluster = builder.build();) {
+      StoragePluginRegistry registry = cluster.storageRegistry();
+
+      // Bootstrap file loaded.
+      assertNotNull(registry.getPlugin(StoragePluginTestUtils.CP_PLUGIN_NAME)); // Normal
+      assertNotNull(registry.getPlugin("sys")); // System
+      assertNull(registry.getPlugin("bogus"));
+
+      // Enabled plugins
+      Map<String, StoragePluginConfig> configMap = registry.enabledConfigs();
+      assertTrue(configMap.containsKey(StoragePluginTestUtils.CP_PLUGIN_NAME));
+      assertFalse(configMap.containsKey("s3")); // Disabled, but still appears
+      assertFalse(configMap.containsKey("sys"));
+
+      // All stored plugins, including disabled
+      configMap = registry.storedConfigs();
+      assertTrue(configMap.containsKey(StoragePluginTestUtils.CP_PLUGIN_NAME));
+      assertTrue(configMap.containsKey("s3")); // Disabled, but still appears
+      assertFalse(configMap.containsKey("sys"));
+      int bootstrapCount = configMap.size();
+
+      // Create a new plugin
+      FileSystemConfig pConfig1 = new FileSystemConfig("myConn",
+          new HashMap<>(), new HashMap<>(), new HashMap<>());
+      pConfig1.setEnabled(true);
+      registry.put("myPlugin", pConfig1);
+      StoragePlugin plugin1 = registry.getPlugin("myPlugin");
+      assertNotNull(plugin1);
+      assertSame(plugin1, registry.getPlugin(pConfig1));
+      configMap = registry.storedConfigs();
+
+      // Names converted to lowercase in persistent storage
+      assertTrue(configMap.containsKey("myplugin"));
+      assertEquals(bootstrapCount + 1, configMap.size());
+
+      // Names are case-insensitive
+      assertSame(plugin1, registry.getPlugin("myplugin"));
+      assertSame(plugin1, registry.getPlugin("MYPLUGIN"));
+
+      // Update the plugin
+      Map<String, String> props = new HashMap<>();
+      props.put("foo", "bar");
+      FileSystemConfig pConfig2 = new FileSystemConfig("myConn",
+          props, new HashMap<>(), new HashMap<>());
+      pConfig2.setEnabled(true);
+      registry.put("myPlugin", pConfig2);
+      StoragePlugin plugin2 = registry.getPlugin("myPlugin");
+      assertNotSame(plugin1, plugin2);
+      assertTrue(plugin2 instanceof FileSystemPlugin);
+      FileSystemPlugin fsStorage = (FileSystemPlugin) plugin2;
+      assertSame(pConfig2, fsStorage.getConfig());
+      assertSame(plugin2, registry.getPlugin(pConfig2));
+
+      // Suppose a query was planned with plugin1 and now starts
+      // to execute. Plugin1 has been replaced with plugin2. However
+      // the registry moved the old plugin to ephemeral storage where
+      // it can still be found by configuration.
+      StoragePlugin ePlugin1 = registry.getPlugin(pConfig1);
+      assertSame(plugin1, ePlugin1);
+      assertNotSame(plugin2, ePlugin1);
+
+      // Now, another thread does the same. It gets the same
+      // ephemeral plugin.
+      assertSame(plugin1, registry.getPlugin(pConfig1));
+
+      // Change the stored plugin back to the first config.
+      registry.put("myPlugin", pConfig1);
+
+      // Now, lets suppose thread 3 starts to execute. It sees the original plugin
+      assertSame(plugin1, registry.getPlugin("myPlugin"));
+
+      // But, the ephemeral plugin lives on. Go back to the second
+      // config.
+      registry.put("myPlugin", pConfig2);
+      assertSame(plugin2, registry.getPlugin("myPlugin"));
+
+      // Thread 4, using the first config from planning in thread 3,
+      // still sees the first plugin.
+      assertSame(plugin1, registry.getPlugin(pConfig1));
+
+      // Disable
+      pConfig2.setEnabled(false);
+      assertNull(registry.getPlugin("myPlugin"));
+
+      // Though disabled, a running query will create an ephemeral
+      // plugin for the config.
+      assertSame(plugin2, registry.getPlugin(pConfig2));
+
+      // Disabling an ephemeral plugin neither makes sense
+      // nor will have any effect.
+      ePlugin1.getConfig().setEnabled(false);
+      assertSame(ePlugin1, registry.getPlugin(pConfig1));
+      assertTrue(registry.storedConfigs().containsKey("myplugin"));
+      assertFalse(registry.enabledConfigs().containsKey("myplugin"));
+
+      // Enable. The config is retrieved from the persistent store.
+      // We notice the config is in the ephemeral store and
+      // so we restore it.
+      pConfig2.setEnabled(true);
+      assertSame(plugin2, registry.getPlugin("myPlugin"));
+      assertSame(plugin2, registry.getPlugin(pConfig2));
+      assertTrue(registry.storedConfigs().containsKey("myplugin"));
+      assertTrue(registry.enabledConfigs().containsKey("myplugin"));
+
+      // Delete the plugin
+      registry.remove("myPlugin");
+      assertNull(registry.getPlugin("myPlugin"));
+
+      // Again a running query will retrieve the plugin from ephemeral storage
+      assertSame(plugin1, registry.getPlugin(pConfig1));
+      assertSame(plugin2, registry.getPlugin(pConfig2));
+
+      // Delete again, no-op
+      registry.remove("myPlugin");
+
+      // The retrieve-from-ephemeral does not kick in if we create
+      // a new plugin with the same config but a different name.
+      pConfig1.setEnabled(true);
+      registry.put("alias", pConfig1);
+      StoragePlugin plugin4 = registry.getPlugin("alias");
+      assertNotNull(plugin4);
+      assertNotSame(plugin1, plugin4);
+
+      // Delete the second name. The config is the same as one already
+      // in ephemeral store, so the second is closed. The first will
+      // be returned on subsequent queries.
+      registry.remove("alias");
+      assertNull(registry.getPlugin("alias"));
+      assertSame(plugin1, registry.getPlugin(pConfig1));
+
+      // Try to change a system plugin
+      StoragePlugin sysPlugin = registry.getPlugin("sys");
+      assertNotNull(sysPlugin);
+      FileSystemConfig pConfig3 = new FileSystemConfig("myConn",
+          props, new HashMap<>(), new HashMap<>());
+      pConfig3.setEnabled(true);
+      try {
+        registry.put("sys", pConfig3);
+        fail();
+      } catch (UserException e) {
+        // Expected
+      }
+      pConfig3.setEnabled(false);
+      try {
+        registry.put("sys", pConfig3);
+        fail();
+      } catch (UserException e) {
+        // Expected
+      }
+      assertSame(sysPlugin, registry.getPlugin("sys"));
+
+      // Try to delete a system plugin
+      try {
+        registry.remove("sys");
+        fail();
+      } catch (UserException e) {
+        // Expected
+      }
+
+      // There is no protection for disabling a system plugin because
+      // there is no code that will allow that at present.
+    }
+  }
+
+  @Test
+  public void testStoreSync() throws Exception {
+    ClusterFixtureBuilder builder = ClusterFixture.builder(dirTestWatcher)
+        .withBits("bit1", "bit2");
+
+    // We want a non-buffered, local file system store, in a known location
+    // so that the two Drillbits will coordinate roughly he same way they
+    // will when using the ZK store in distributed mode.
+    builder.configBuilder()
+      .put(ExecConstants.SYS_STORE_PROVIDER_LOCAL_ENABLE_WRITE, true)
+      .put(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH,
+          dirTestWatcher.getStoreDir().getAbsolutePath());
+    try (ClusterFixture cluster = builder.build();) {
+      StoragePluginRegistry registry1 = cluster.storageRegistry("bit1");
+      StoragePluginRegistry registry2 = cluster.storageRegistry("bit2");
+
+      // Define a plugin in Drillbit 1
+      FileSystemConfig pConfig1 = new FileSystemConfig("myConn",
+          new HashMap<>(), new HashMap<>(), new HashMap<>());
+      pConfig1.setEnabled(true);
+      registry1.put("myPlugin", pConfig1);
+      StoragePlugin plugin1 = registry1.getPlugin("myPlugin");
+      assertNotNull(plugin1);
+
+      // Should appear in Drillbit 2
+      StoragePlugin plugin2 = registry2.getPlugin("myPlugin");
+      assertNotNull(plugin2);
+      assertEquals(pConfig1, plugin1.getConfig());
+
+      // Change in Drillbit 1
+      Map<String, String> props = new HashMap<>();
+      props.put("foo", "bar");
+      FileSystemConfig pConfig3 = new FileSystemConfig("myConn",
+          props, new HashMap<>(), new HashMap<>());
+      pConfig3.setEnabled(true);
+      registry1.put("myPlugin", pConfig3);
+      plugin1 = registry1.getPlugin("myPlugin");
+      assertSame(pConfig3, plugin1.getConfig());
+
+      // Change should appear in Drillbit 2
+      plugin2 = registry2.getPlugin("myPlugin");
+      assertNotNull(plugin2);
+      assertEquals(pConfig3, plugin1.getConfig());
+
+      // Delete in Drillbit 2
+      registry2.remove("myPlugin");
+
+      // Should not be available in Drillbit 1
+      assertNull(registry1.getPlugin("myPlugin"));
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestPluginsMap.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestPluginsMap.java
new file mode 100644
index 0000000..5413b7c
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestPluginsMap.java
@@ -0,0 +1,406 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.util.Collection;
+import java.util.Set;
+
+import org.apache.drill.common.exceptions.UserException;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.exec.store.PluginHandle.PluginType;
+import org.apache.drill.test.OperatorFixture;
+import org.junit.Test;
+
+public class TestPluginsMap extends BasePluginRegistryTest {
+
+  private ConnectorHandle fixtureConnector() {
+    return ConnectorHandle.configuredConnector(new LocatorFixture(),
+        StoragePluginFixtureConfig.class);
+  }
+
+  @Test
+  public void testEntry() {
+
+    ConnectorHandle connector = fixtureConnector();
+    assertSame(StoragePluginFixture.class, connector.connectorClass());
+    assertSame(StoragePluginFixtureConfig.class, connector.configClass());
+    assertFalse(connector.isIntrinsic());
+
+    StoragePluginFixtureConfig config = new StoragePluginFixtureConfig("ok1");
+    PluginHandle entry = new PluginHandle("p1", config, connector);
+    assertEquals("p1", entry.name());
+    assertSame(config, entry.config());
+    assertFalse(entry.hasInstance());
+
+    // Create the plugin instance
+
+    StoragePlugin plugin = entry.plugin();
+    assertNotNull(plugin);
+    assertTrue(plugin instanceof StoragePluginFixture);
+    StoragePluginFixture fixture = (StoragePluginFixture) plugin;
+    assertTrue(entry.hasInstance());
+    assertEquals(entry.name(), fixture.getName());
+    assertSame(entry.config(), fixture.getConfig());
+    assertEquals(0, fixture.closeCount());
+
+    // Close the plugin
+
+    entry.close();
+    assertEquals(1, fixture.closeCount());
+    assertNotNull(plugin);
+
+    // OK to close twice
+
+    entry.close();
+    assertEquals(1, fixture.closeCount());
+    assertNotNull(plugin);
+  }
+
+  @Test
+  public void testEntryFailures() {
+    ConnectorHandle connector = fixtureConnector();
+
+    // Failure on constructor
+    StoragePluginFixtureConfig config1 = new StoragePluginFixtureConfig("crash-ctor");
+    PluginHandle entry1 = new PluginHandle("p1", config1, connector);
+    try {
+      entry1.plugin();
+      fail();
+    } catch (UserException e) {
+      // Expected
+    }
+    assertFalse(entry1.hasInstance());
+    entry1.close(); // No-op
+
+    // Failure on start
+    StoragePluginFixtureConfig config2 = new StoragePluginFixtureConfig("crash-start");
+    PluginHandle entry2 = new PluginHandle("p2", config2, connector);
+    try {
+      entry2.plugin();
+      fail();
+    } catch (UserException e) {
+      // Expected
+    }
+    assertFalse(entry2.hasInstance());
+    entry2.close(); // No-op
+
+    // Failure on close
+    StoragePluginFixtureConfig config3 = new StoragePluginFixtureConfig("crash-close");
+    PluginHandle entry3 = new PluginHandle("p3", config3, connector);
+    StoragePlugin plugin3 = entry3.plugin();
+
+    // Fails silently
+    entry3.close();
+    StoragePluginFixture fixture3 = (StoragePluginFixture) plugin3;
+    assertEquals(1, fixture3.closeCount());
+  }
+
+  @Test
+  public void testBasics() {
+    ConnectorHandle connector = fixtureConnector();
+
+    StoragePluginFixtureConfig config1a = new StoragePluginFixtureConfig("ok1");
+    StoragePluginFixtureConfig config1b = new StoragePluginFixtureConfig("ok1");
+    StoragePluginFixtureConfig config2 = new StoragePluginFixtureConfig("ok2");
+
+    // Sanity check that compare-by-value works for configs
+    assertTrue(config1a.equals(config1b));
+    assertFalse(config1a.equals(config2));
+
+    // Get with empty map
+    StoragePluginMap map = new StoragePluginMap();
+    assertNull(map.get("plugin1"));
+    assertNull(map.get(config1a));
+
+    PluginHandle entry1 = new PluginHandle("plugin1", config1a, connector);
+    assertNull(map.put(entry1));
+    assertSame(entry1, map.get(entry1.name()));
+    assertSame(entry1, map.get(entry1.config()));
+    assertEquals(1, map.configs().size());
+
+    // Put twice, no effect
+    assertNull(map.put(entry1));
+    assertEquals(1, map.configs().size());
+
+    // Config lookup is by value
+    assertSame(entry1, map.get(config1b));
+
+    // Add second entry
+    PluginHandle entry2 = new PluginHandle("plugin2", config2, connector);
+    assertNull(map.put(entry2));
+
+    // Accessors
+    Set<String> names = map.getNames();
+    assertEquals(2, names.size());
+    assertTrue(names.contains(entry1.name()));
+    assertTrue(names.contains(entry2.name()));
+
+    Collection<PluginHandle> plugins = map.plugins();
+    assertEquals(2, plugins.size());
+    assertTrue(plugins.contains(entry1));
+    assertTrue(plugins.contains(entry2));
+
+    Set<StoragePluginConfig> configs = map.configs();
+    assertEquals(2, configs.size());
+    assertTrue(configs.contains(entry1.config()));
+    assertTrue(configs.contains(entry2.config()));
+
+    // Convenience (but not optimistically locked) remove
+    map.remove(entry1);
+    assertNull(map.get(entry1.name()));
+    assertNull(map.get(entry1.config()));
+
+    map.remove(entry2);
+
+    assertTrue(map.getNames().isEmpty());
+    assertTrue(map.plugins().isEmpty());
+    assertTrue(map.configs().isEmpty());
+
+    map.close();
+  }
+
+  @Test
+  public void testRemoveByName() {
+    ConnectorHandle connector = fixtureConnector();
+
+    StoragePluginMap map = new StoragePluginMap();
+    StoragePluginFixtureConfig config1 = new StoragePluginFixtureConfig("ok1");
+    PluginHandle entry1 = new PluginHandle("plugin1", config1, connector);
+    map.put(entry1);
+    StoragePluginFixture fixture1 = (StoragePluginFixture) entry1.plugin();
+    assertEquals(0, fixture1.closeCount());
+
+    // Missing entry
+    assertNull(map.remove("foo"));
+
+    // Entry is present: closed by caller
+    assertSame(entry1, map.remove("plugin1"));
+    assertEquals(0, fixture1.closeCount());
+    // Simulate caller
+    fixture1.close();
+    assertEquals(1, fixture1.closeCount());
+
+    map.close();
+    assertEquals(1, fixture1.closeCount());
+  }
+
+  @Test
+  public void testSafePutRemove() {
+    ConnectorHandle connector = fixtureConnector();
+
+    StoragePluginMap map = new StoragePluginMap();
+    StoragePluginFixtureConfig config1 = new StoragePluginFixtureConfig("ok1");
+    PluginHandle entry1 = new PluginHandle("plugin1", config1, connector);
+    map.put(entry1);
+
+    // Replacing returns original
+    StoragePluginFixtureConfig config2 = new StoragePluginFixtureConfig("ok2");
+    PluginHandle entry2 = new PluginHandle("plugin1", config2, connector);
+    assertSame(entry1, map.put(entry2));
+    assertSame(entry2, map.get(entry1.name()));
+
+    // Put if absent
+    StoragePluginFixtureConfig config3 = new StoragePluginFixtureConfig("ok3");
+    PluginHandle entry3 = new PluginHandle("plugin2", config3, connector);
+    assertSame(entry3, map.putIfAbsent(entry3));
+
+    StoragePluginFixtureConfig config4 = new StoragePluginFixtureConfig("ok4");
+    PluginHandle entry4 = new PluginHandle("plugin2", config4, connector);
+    assertSame(entry3, map.putIfAbsent(entry4));
+
+    // Remove
+    assertFalse(map.remove(entry1)); // Already replaced
+    assertTrue(map.remove(entry2)); // currently in map
+    assertTrue(map.remove(entry3));
+    assertFalse(map.remove(entry4));
+
+    assertTrue(map.getNames().isEmpty());
+    assertTrue(map.plugins().isEmpty());
+    assertTrue(map.configs().isEmpty());
+
+    map.close();
+  }
+
+  @Test
+  public void testReplace() {
+    ConnectorHandle connector = fixtureConnector();
+
+    StoragePluginMap map = new StoragePluginMap();
+    StoragePluginFixtureConfig config1 = new StoragePluginFixtureConfig("ok1");
+    PluginHandle entry1 = new PluginHandle("plugin1", config1, connector);
+    map.put(entry1);
+
+    // Replace existing item
+    StoragePluginFixtureConfig config2 = new StoragePluginFixtureConfig("ok2");
+    PluginHandle entry2 = new PluginHandle("plugin1", config2, connector);
+    assertTrue(map.replace(entry1, entry2));
+
+    // Replace non-existing entry
+    StoragePluginFixtureConfig config3 = new StoragePluginFixtureConfig("ok3");
+    PluginHandle entry3 = new PluginHandle("plugin1", config3, connector);
+    assertFalse(map.replace(entry1, entry3));
+    assertSame(entry2, map.get(entry1.name()));
+    assertNull(map.get(entry1.config()));
+    assertSame(entry2, map.get(entry2.config()));
+    assertNull(map.get(entry3.config()));
+
+    assertEquals(1, map.getNames().size());
+    assertEquals(1, map.plugins().size());
+    assertEquals(1, map.configs().size());
+    map.close();
+  }
+
+  @Test
+  public void testSafeRemove() {
+    ConnectorHandle connector = fixtureConnector();
+
+    StoragePluginMap map = new StoragePluginMap();
+    StoragePluginFixtureConfig config1 = new StoragePluginFixtureConfig("ok1");
+    PluginHandle entry1 = new PluginHandle("plugin1", config1, connector);
+    map.put(entry1);
+
+    // Wrong name, config OK
+    assertNull(map.remove("plugin2", config1));
+
+    // Name OK, wrong config
+    StoragePluginFixtureConfig config2 = new StoragePluginFixtureConfig("ok2");
+    assertNull(map.remove("plugin1", config2));
+    assertEquals(1, map.getNames().size());
+
+    // Name and config match, removed
+    StoragePluginFixtureConfig config3 = new StoragePluginFixtureConfig("ok1");
+    PluginHandle ret = map.remove("plugin1", config3);
+    assertSame(entry1, ret);
+
+    assertTrue(map.getNames().isEmpty());
+
+    map.close();
+  }
+
+  public void testIntrinsic() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      ConnectorLocator locator = new SystemPluginLocator(context);
+      locator.init();
+
+      Collection<StoragePlugin> sysPlugins = locator.intrinsicPlugins();
+      assertTrue(!sysPlugins.isEmpty());
+      StoragePlugin sysPlugin = sysPlugins.iterator().next();
+      ConnectorHandle connector = ConnectorHandle.intrinsicConnector(locator, sysPlugin);
+      assertTrue(connector.isIntrinsic());
+
+      StoragePluginMap map = new StoragePluginMap();
+      PluginHandle sysEntry = new PluginHandle(sysPlugin, connector, PluginType.INTRINSIC);
+      assertNull(map.put(sysEntry));
+      assertSame(sysEntry, map.get(sysPlugin.getName()));
+
+      // Second request to put the same (system) plugin is ignored
+      assertNull(map.put(sysEntry));
+
+      // Attempt to overwrite a system plugin is forcefully denied.
+      // Users can make this mistake, so a UserException is thrown
+      StoragePluginFixtureConfig config1 = new StoragePluginFixtureConfig("ok1");
+      PluginHandle entry1 = new PluginHandle(sysPlugin.getName(), config1, connector);
+      try {
+        map.put(entry1);
+        fail();
+      } catch (UserException e) {
+        // Expected
+      }
+      assertSame(sysEntry, map.get(sysPlugin.getName()));
+
+      // putIfAbsent does not replace an existing plugin
+      assertSame(sysEntry, map.putIfAbsent(entry1));
+      assertSame(sysEntry, map.get(sysPlugin.getName()));
+
+      // Replace fails. Caller should have checked if the entry
+      // is intrinsic.
+      try {
+        map.replace(sysEntry, entry1);
+        fail();
+      } catch (IllegalArgumentException e) {
+        // Expected
+      }
+      assertSame(sysEntry, map.get(sysPlugin.getName()));
+
+      // Remove by entry fails for the same reasons as above.
+      try {
+        map.remove(sysEntry);
+        fail();
+      } catch (IllegalArgumentException e) {
+        // Expected
+      }
+      assertSame(sysEntry, map.get(sysPlugin.getName()));
+
+      // Request to remove by name is ignored
+      // Caller can't be expected to know the meaning of the name
+      // Request to remove an intrinsic plugin by name is treated the
+      // same as a request to remove a non-existent plugin
+      assertNull(map.remove(sysPlugin.getName()));
+      assertSame(sysEntry, map.get(sysPlugin.getName()));
+
+      // Request to remove by name and config fails
+      // as above.
+
+      assertNull(map.remove(sysPlugin.getName(), sysPlugin.getConfig()));
+      assertSame(sysEntry, map.get(sysPlugin.getName()));
+
+      // Close does close intrinsic plugins, but no way to check
+      // it without elaborate mocking
+      map.close();
+    }
+  }
+
+  public void testClose() {
+    ConnectorHandle connector = fixtureConnector();
+
+    StoragePluginMap map = new StoragePluginMap();
+    StoragePluginFixtureConfig config1 = new StoragePluginFixtureConfig("ok1");
+    PluginHandle entry1 = new PluginHandle("plugin1", config1, connector);
+    map.put(entry1);
+
+    // Create the plugin instance
+    StoragePlugin plugin1 = entry1.plugin();
+    assertNotNull(plugin1);
+    assertTrue(entry1.hasInstance());
+
+    // Second, no instance
+    StoragePluginFixtureConfig config2 = new StoragePluginFixtureConfig("ok2");
+    PluginHandle entry2 = new PluginHandle("plugin2", config2, connector);
+    map.put(entry2);
+    assertFalse(entry2.hasInstance());
+
+    // Close the map
+    map.close();
+
+    // Everything closed
+    assertFalse(entry1.hasInstance());
+    assertFalse(entry2.hasInstance());
+    StoragePluginFixture fixture1 = (StoragePluginFixture) plugin1;
+    assertEquals(1, fixture1.closeCount());
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestSystemPluginLocator.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestSystemPluginLocator.java
new file mode 100644
index 0000000..2e68fab
--- /dev/null
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/TestSystemPluginLocator.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.drill.exec.store;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertSame;
+import static org.junit.Assert.assertTrue;
+
+import java.util.Collection;
+
+import org.apache.drill.exec.store.sys.SystemTablePlugin;
+import org.apache.drill.exec.store.sys.SystemTablePluginConfig;
+import org.apache.drill.test.OperatorFixture;
+import org.junit.Test;
+
+public class TestSystemPluginLocator extends BasePluginRegistryTest {
+
+  // Uses a null Drillbit context. This is benign for the current
+  // set of system plugins. If this test fails, check if a system
+  // plugin uses the Drillbit context. Eventually, we'll use a different
+  // context for plugins. But, if the crash happens sooner, change this
+  // to a cluster test so a DrillbitContext is available.
+  @Test
+  public void testSystemLocator() throws Exception {
+    OperatorFixture.Builder builder = OperatorFixture.builder(dirTestWatcher);
+    try (OperatorFixture fixture = builder.build()) {
+      PluginRegistryContextFixture context = new PluginRegistryContextFixture(fixture);
+      ConnectorLocator locator = new SystemPluginLocator(context);
+      locator.init();
+
+      // No bootstrap or upgrade configs
+      assertNull(locator.bootstrapPlugins());
+      assertNull(locator.updatedPlugins());
+
+       // No user-creatable configs
+      assertNull(locator.configClasses());
+
+      // Test intrinsic plugins
+      assertNotNull(locator.get("sys"));
+
+      Collection<StoragePlugin> intrinsics = locator.intrinsicPlugins();
+      assertNotNull(intrinsics);
+      assertTrue(intrinsics.contains(locator.get("sys")));
+
+      // System plugins are not storable
+      assertFalse(locator.storable());
+
+      // Map from config to impl class
+      assertSame(SystemTablePlugin.class, locator.connectorClassFor(SystemTablePluginConfig.class));
+
+      // No-op
+      locator.close();
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TextRecordReaderTest.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TextRecordReaderTest.java
deleted file mode 100644
index 8e37c30..0000000
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/easy/text/compliant/TextRecordReaderTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.drill.exec.store.easy.text.compliant;
-
-import static org.junit.Assert.assertEquals;
-
-import java.util.List;
-
-import org.apache.drill.common.util.DrillFileUtils;
-import org.apache.drill.exec.client.DrillClient;
-import org.apache.drill.exec.pop.PopUnitTestBase;
-import org.apache.drill.exec.record.RecordBatchLoader;
-import org.apache.drill.exec.rpc.user.QueryDataBatch;
-import org.apache.drill.exec.server.Drillbit;
-import org.apache.drill.exec.server.RemoteServiceSet;
-import org.apache.drill.exec.util.VectorUtil;
-import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
-import org.apache.drill.shaded.guava.com.google.common.io.Files;
-import org.junit.Test;
-
-public class TextRecordReaderTest extends PopUnitTestBase {
-
-  @Test
-  public void testFullExecution() throws Exception {
-    RemoteServiceSet serviceSet = RemoteServiceSet.getLocalServiceSet();
-
-    try(Drillbit bit1 = new Drillbit(CONFIG, serviceSet);
-        DrillClient client = new DrillClient(CONFIG, serviceSet.getCoordinator())) {
-
-      bit1.run();
-      client.connect();
-      List<QueryDataBatch> results = client.runQuery(org.apache.drill.exec.proto.UserBitShared.QueryType.PHYSICAL,
-              Files.asCharSource(
-                      DrillFileUtils.getResourceAsFile("/store/text/test.json"), Charsets.UTF_8).read()
-                      .replace("#{DATA_FILE}", DrillFileUtils.getResourceAsFile("/store/text/data/regions.csv").toURI().toString()));
-      int count = 0;
-      RecordBatchLoader loader = new RecordBatchLoader(bit1.getContext().getAllocator());
-      for(QueryDataBatch b : results) {
-        if (b.getHeader().getRowCount() != 0) {
-          count += b.getHeader().getRowCount();
-        }
-        loader.load(b.getHeader().getDef(), b.getData());
-        VectorUtil.logVectorAccessibleContent(loader);
-        loader.clear();
-        b.release();
-      }
-      assertEquals(5, count);
-    }
-  }
-}
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
index 12006fb..8b95fbf 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/httpd/TestHTTPDLogReader.java
@@ -66,7 +66,7 @@
     final FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin("cp");
     final FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig();
     pluginConfig.getFormats().put("sample", sampleConfig);
-    pluginRegistry.createOrUpdate("cp", pluginConfig, false);
+    pluginRegistry.put("cp", pluginConfig);
   }
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
index 91ee0e8..3ca31e8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/log/TestLogReader.java
@@ -151,7 +151,7 @@
     pluginConfig.getFormats().put("date-log",logDateConfig);
     pluginConfig.getFormats().put("mysql-log", mysqlLogConfig);
     pluginConfig.getFormats().put("ssdlog", firewallConfig);
-    pluginRegistry.createOrUpdate("cp", pluginConfig, false);
+    pluginRegistry.put("cp", pluginConfig);
 
     // Config similar to the above, but with no type info. Types
     // will be provided via the provided schema mechanism. Column names
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
index 3b878c3..5441670 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestPcapEVFReader.java
@@ -51,7 +51,7 @@
     final FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin("cp");
     final FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig();
     pluginConfig.getFormats().put("sample", sampleConfig);
-    pluginRegistry.createOrUpdate("cp", pluginConfig, false);
+    pluginRegistry.put("cp", pluginConfig);
   }
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestSessionizePCAP.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestSessionizePCAP.java
index fe1c3d5..e0a0c6c 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestSessionizePCAP.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/pcap/TestSessionizePCAP.java
@@ -34,14 +34,11 @@
 import java.nio.file.Paths;
 import org.junit.BeforeClass;
 import org.junit.Test;
-import java.time.format.DateTimeFormatter;
 
 import static org.junit.Assert.assertEquals;
 
 public class TestSessionizePCAP extends ClusterTest {
 
-  private static final DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSS");
-
   @BeforeClass
   public static void setup() throws Exception {
     ClusterTest.startCluster(ClusterFixture.builder(dirTestWatcher));
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestDisabledPlugin.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestDisabledPlugin.java
index c342e8f..74add91 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestDisabledPlugin.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/store/TestDisabledPlugin.java
@@ -46,13 +46,13 @@
     pluginRegistry = cluster.drillbit().getContext().getStorage();
     pluginConfig = (FileSystemConfig) pluginRegistry.getPlugin(CP_PLUGIN_NAME).getConfig();
     pluginConfig.setEnabled(false);
-    pluginRegistry.createOrUpdate(CP_PLUGIN_NAME, pluginConfig, true);
+    pluginRegistry.put(CP_PLUGIN_NAME, pluginConfig);
   }
 
   @AfterClass
   public static void restore() throws Exception {
     pluginConfig.setEnabled(true);
-    pluginRegistry.createOrUpdate(CP_PLUGIN_NAME, pluginConfig, true);
+    pluginRegistry.put(CP_PLUGIN_NAME, pluginConfig);
   }
 
   @Test
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/store/sys/TestLocalPersistentStore.java b/exec/java-exec/src/test/java/org/apache/drill/exec/store/sys/TestLocalPersistentStore.java
index 6e773db..b26ccca 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/store/sys/TestLocalPersistentStore.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/store/sys/TestLocalPersistentStore.java
@@ -70,97 +70,104 @@
   @Test
   public void testAbsentGet() throws Exception {
     Path path = new Path(root.newFolder("absent-get").toURI().getPath());
-    LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);
+    try (LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);) {
 
-    assertNull(store.get("abc"));
+      assertNull(store.get("abc"));
 
-    ILLEGAL_KEYS.stream()
-      .map(store::get)
-      .forEach(Assert::assertNull);
+      ILLEGAL_KEYS.stream()
+        .map(store::get)
+        .forEach(Assert::assertNull);
+    }
   }
 
   @Test
   public void testContains() throws Exception {
     Path path = new Path(root.newFolder("contains").toURI().getPath());
-    LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);
-    store.put("abc", "desc");
+    try (LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);) {
+      store.put("abc", "desc");
 
-    ILLEGAL_KEYS.stream()
-      .map(store::contains)
-      .forEach(Assert::assertFalse);
+      ILLEGAL_KEYS.stream()
+        .map(store::contains)
+        .forEach(Assert::assertFalse);
 
-    assertFalse(store.contains("a"));
-    assertTrue(store.contains("abc"));
+      assertFalse(store.contains("a"));
+      assertTrue(store.contains("abc"));
+    }
   }
 
   @Test
   public void testPutAndGet() throws Exception {
     Path path = new Path(root.newFolder("put-and-get").toURI().getPath());
-    LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);
+    try (LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);) {
 
-    store.put("abc", "desc");
-    assertEquals("desc", store.get("abc"));
+      store.put("abc", "desc");
+      assertEquals("desc", store.get("abc"));
 
-    store.put("abc", "new-desc");
-    assertEquals("new-desc", store.get("abc"));
+      store.put("abc", "new-desc");
+      assertEquals("new-desc", store.get("abc"));
+    }
   }
 
   @Test
   public void testIllegalPut() throws Exception {
     Path path = new Path(root.newFolder("illegal-put").toURI().getPath());
-    LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);
+    try (LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);) {
 
-    ILLEGAL_KEYS.forEach(key -> {
-      try {
-        store.put(key, "desc");
-        fail(String.format("Key [%s] should be illegal, put in the store should have failed", key));
-      } catch (DrillRuntimeException e) {
-        assertTrue(e.getMessage().startsWith("Illegal storage key name"));
-      }
-    });
+      ILLEGAL_KEYS.forEach(key -> {
+        try {
+          store.put(key, "desc");
+          fail(String.format("Key [%s] should be illegal, put in the store should have failed", key));
+        } catch (DrillRuntimeException e) {
+          assertTrue(e.getMessage().startsWith("Illegal storage key name"));
+        }
+      });
+    }
   }
 
   @Test
   public void testPutIfAbsent() throws Exception {
     Path path = new Path(root.newFolder("put-if-absent").toURI().getPath());
-    LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);
+    try (LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);) {
 
-    assertTrue(store.putIfAbsent("abc", "desc"));
-    assertFalse(store.putIfAbsent("abc", "new-desc"));
-    assertEquals("desc", store.get("abc"));
+      assertTrue(store.putIfAbsent("abc", "desc"));
+      assertFalse(store.putIfAbsent("abc", "new-desc"));
+      assertEquals("desc", store.get("abc"));
+    }
   }
 
   @Test
   public void testIllegalPutIfAbsent() throws Exception {
     Path path = new Path(root.newFolder("illegal-put-if-absent").toURI().getPath());
-    LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);
+    try (LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);) {
 
-    ILLEGAL_KEYS.forEach(key -> {
-      try {
-        store.putIfAbsent(key, "desc");
-        fail(String.format("Key [%s] should be illegal, putIfAbsent in the store should have failed", key));
-      } catch (DrillRuntimeException e) {
-        assertTrue(e.getMessage().startsWith("Illegal storage key name"));
-      }
-    });
+      ILLEGAL_KEYS.forEach(key -> {
+        try {
+          store.putIfAbsent(key, "desc");
+          fail(String.format("Key [%s] should be illegal, putIfAbsent in the store should have failed", key));
+        } catch (DrillRuntimeException e) {
+          assertTrue(e.getMessage().startsWith("Illegal storage key name"));
+        }
+      });
+    }
   }
 
   @Test
   public void testRange() throws Exception {
     Path path = new Path(root.newFolder("range").toURI().getPath());
-    LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);
+    try (LocalPersistentStore<String> store = new LocalPersistentStore<>(fs, path, DEFAULT_STORE_CONFIG);) {
 
-    assertEquals(0, Lists.newArrayList(store.getRange(0, 10)).size());
+      assertEquals(0, Lists.newArrayList(store.getRange(0, 10)).size());
 
-    IntStream.range(0, 10)
-      .forEach(i -> store.put("key_" + i, "value_" + i));
+      IntStream.range(0, 10)
+        .forEach(i -> store.put("key_" + i, "value_" + i));
 
-    assertEquals(10, Lists.newArrayList(store.getRange(0, 20)).size());
-    assertEquals(10, Lists.newArrayList(store.getRange(0, 10)).size());
-    assertEquals(9, Lists.newArrayList(store.getRange(0, 9)).size());
-    assertEquals(0, Lists.newArrayList(store.getRange(10, 2)).size());
-    assertEquals(5, Lists.newArrayList(store.getRange(2, 5)).size());
-    assertEquals(0, Lists.newArrayList(store.getRange(0, 0)).size());
-    assertEquals(0, Lists.newArrayList(store.getRange(4, 0)).size());
+      assertEquals(10, Lists.newArrayList(store.getRange(0, 20)).size());
+      assertEquals(10, Lists.newArrayList(store.getRange(0, 10)).size());
+      assertEquals(9, Lists.newArrayList(store.getRange(0, 9)).size());
+      assertEquals(0, Lists.newArrayList(store.getRange(10, 2)).size());
+      assertEquals(5, Lists.newArrayList(store.getRange(2, 5)).size());
+      assertEquals(0, Lists.newArrayList(store.getRange(0, 0)).size());
+      assertEquals(0, Lists.newArrayList(store.getRange(4, 0)).size());
+    }
   }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
index c83d24a..454a944 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/exec/udf/dynamic/TestDynamicUDFSupport.java
@@ -24,9 +24,7 @@
 import org.apache.drill.categories.SlowTest;
 import org.apache.drill.categories.SqlFunctionTest;
 import org.apache.drill.common.config.ConfigConstants;
-import org.apache.drill.common.config.DrillConfig;
 import org.apache.drill.common.exceptions.UserRemoteException;
-import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.exception.VersionMismatchException;
 import org.apache.drill.exec.expr.fn.FunctionImplementationRegistry;
 import org.apache.drill.exec.expr.fn.registry.LocalFunctionRegistry;
@@ -53,7 +51,6 @@
 import java.nio.file.Path;
 import java.nio.file.Paths;
 import java.util.List;
-import java.util.Properties;
 import java.util.concurrent.CountDownLatch;
 
 import static org.apache.drill.test.HadoopUtils.hadoopToJavaPath;
@@ -102,13 +99,7 @@
 
   @Before
   public void setupNewDrillbit() throws Exception {
-    File udfLocalDir = new File(dirTestWatcher.getUdfDir(), "local");
-    Properties overrideProps = new Properties();
-    overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, dirTestWatcher.getUdfDir().getAbsolutePath());
-    overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_LOCAL, udfLocalDir.getAbsolutePath());
-    overrideProps.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
-    updateTestCluster(1, DrillConfig.create(overrideProps));
-
+    updateTestCluster(1, config);
     fsUri = getLocalFileSystem().getUri();
   }
 
@@ -566,9 +557,12 @@
         .baselineValues(true, String.format(summary, defaultBinaryJar))
         .go();
 
-    thrown.expect(UserRemoteException.class);
-    thrown.expectMessage(containsString("No match found for function signature custom_lower(<CHARACTER>)"));
-    test("select custom_lower('A') from (values(1))");
+    try {
+      test("select custom_lower('A') from (values(1))");
+      fail();
+    } catch (UserRemoteException e) {
+      assertTrue(e.getMessage().contains("No match found for function signature custom_lower(<CHARACTER>)"));
+    }
 
     RemoteFunctionRegistry remoteFunctionRegistry = getDrillbitContext().getRemoteFunctionRegistry();
     Path registryPath = hadoopToJavaPath(remoteFunctionRegistry.getRegistryArea());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java b/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java
index 7f65750..7654b19 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/BaseDirTestWatcher.java
@@ -23,43 +23,47 @@
 import java.nio.file.Paths;
 
 import org.apache.commons.io.FileUtils;
+import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.shaded.guava.com.google.common.base.Charsets;
 import org.junit.runner.Description;
 
 /**
  * <h4>Overview</h4>
  * <p>
- * This is a {@link DirTestWatcher} which creates all the temporary directories required by a Drillbit and the various <b>dfs.*</b> storage workspaces. It also
- * provides convenience methods that do the following:
+ * This is a {@link DirTestWatcher} which creates all the temporary directories
+ * required by a Drillbit and the various <b>dfs.*</b> storage workspaces. It
+ * also provides convenience methods that do the following:
  *
  * <ol>
- *   <li>Copy project files to temp directories. This is useful for copying the sample data into a temp directory.</li>
- *   <li>Copy resource files to temp.</li>
- *   <li>Updating parquet metadata files.</li>
+ * <li>Copy project files to temp directories. This is useful for copying the
+ * sample data into a temp directory.</li>
+ * <li>Copy resource files to temp.</li>
+ * <li>Updating parquet metadata files.</li>
  * </ol>
- * </p>
+ * </p><p>
+ * The {@link BaseDirTestWatcher} creates the following directories in the
+ * <b>base temp directory</b> (for a description of where the <b>base temp
+ * directory</b> is located please read the docs for {@link DirTestWatcher}):
  *
- * <p>
- *   The {@link BaseDirTestWatcher} creates the following directories in the <b>base temp directory</b> (for a description of where the <b>base temp directory</b>
- *   is located please read the docs for {@link DirTestWatcher}):
- *
- *   <ul>
- *     <li><b>tmp:</b> {@link #getTmpDir()}</li>
- *     <li><b>store:</b> {@link #getStoreDir()}</li>
- *     <li><b>root:</b> {@link #getRootDir()}</li>
- *     <li><b>dfsTestTmp:</b> {@link #getDfsTestTmpDir()}</li>
- *   </ul>
+ * <ul>
+ * <li><b>tmp:</b> {@link #getTmpDir()}</li>
+ * <li><b>store:</b> {@link #getStoreDir()}</li>
+ * <li><b>root:</b> {@link #getRootDir()}</li>
+ * <li><b>dfsTestTmp:</b> {@link #getDfsTestTmpDir()}</li>
+ * </ul>
  * </p>
  *
  * <h4>Examples</h4>
  * <p>
- *   The {@link BaseDirTestWatcher} is used in {@link BaseTestQuery} and an example of how it is used in conjunction with the {@link ClusterFixture} can be found in
- *   {@link ExampleTest}.
+ * The {@link BaseDirTestWatcher} is used in {@link BaseTestQuery} and an
+ * example of how it is used in conjunction with the {@link ClusterFixture} can
+ * be found in {@link ExampleTest}.
  * </p>
  */
 public class BaseDirTestWatcher extends DirTestWatcher {
   /**
-   * An enum used to represent the directories mapped to the <b>dfs.root</b> and <b>dfs.tmp</b> workspaces repectively.
+   * An enum used to represent the directories mapped to the <b>dfs.root</b> and
+   * <b>dfs.tmp</b> workspaces respectively.
    */
   public enum DirType {
     ROOT, // Corresponds to the directory that should be mapped to dfs.root
@@ -73,7 +77,7 @@
   private File dfsTestTmpParentDir;
   private File dfsTestTmpDir;
   private File rootDir;
-  private File udfDir;
+  private File homeDir;
 
   /**
    * Creates a {@link BaseDirTestWatcher} which does not delete it's temp directories at the end of tests.
@@ -84,7 +88,10 @@
 
   /**
    * Creates a {@link BaseDirTestWatcher}.
-   * @param deleteDirAtEnd If true, temp directories are deleted at the end of tests. If false, temp directories are not deleted at the end of tests.
+   *
+   * @param deleteDirAtEnd
+   *          If true, temp directories are deleted at the end of tests. If
+   *          false, temp directories are not deleted at the end of tests.
    */
   public BaseDirTestWatcher(boolean deleteDirAtEnd) {
     super(deleteDirAtEnd);
@@ -102,9 +109,9 @@
     spillDir = makeSubDir(Paths.get("spill"));
     rootDir = makeSubDir(Paths.get("root"));
     tmpDir = makeSubDir(Paths.get("tmp"));
-    storeDir = makeSubDir(Paths.get("store"));
+    storeDir = makeSubDir(Paths.get(StoragePluginRegistry.PSTORE_NAME));
     dfsTestTmpParentDir = makeSubDir(Paths.get("dfsTestTmp"));
-    udfDir = makeSubDir(Paths.get("udf"));
+    homeDir = makeSubDir(Paths.get("home"));
 
     newDfsTestTmpDir();
   }
@@ -120,23 +127,34 @@
       FileUtils.cleanDirectory(tmpDir);
       FileUtils.cleanDirectory(storeDir);
       FileUtils.cleanDirectory(dfsTestTmpDir);
-      FileUtils.cleanDirectory(udfDir);
+      FileUtils.cleanDirectory(homeDir);
     } catch (IOException e) {
       throw new RuntimeException(e);
     }
   }
 
   /**
-   * Gets the temp directory that should be used as a Drillbit's tmp directory.
-   * @return The temp directory that should be used as a Drillbit's tmp directory.
+   * Gets the temp directory that should be used as a Drillbit's temp tmp directory.
+   * @return The temp directory that should be used as a Drillbit's temp tmp directory.
    */
   public File getTmpDir() {
     return tmpDir;
   }
 
   /**
-   * Gets the temp directory that should be used by the {@link org.apache.drill.exec.store.sys.store.LocalPersistentStore}.
-   * @return The temp directory that should be used by the {@link org.apache.drill.exec.store.sys.store.LocalPersistentStore}.
+   * Gets the temp directory that is a proxy for the user's home directory.
+   * @return proxy for the user's home directory
+   */
+  public File getHomeDir() {
+    return homeDir;
+  }
+
+  /**
+   * Gets the temp directory that should be used by the
+   * {@link org.apache.drill.exec.store.sys.store.LocalPersistentStore}.
+   *
+   * @return The temp directory that should be used by the
+   *         {@link org.apache.drill.exec.store.sys.store.LocalPersistentStore}.
    */
   public File getStoreDir() {
     return storeDir;
@@ -151,8 +169,11 @@
   }
 
   /**
-   * Gets the temp directory that should be used to hold the contents of the <b>dfs.root</b> workspace.
-   * @return The temp directory that should be used to hold the contents of the <b>dfs.root</b> workspace.
+   * Gets the temp directory that should be used to hold the contents of the
+   * <b>dfs.root</b> workspace.
+   *
+   * @return The temp directory that should be used to hold the contents of the
+   *         <b>dfs.root</b> workspace.
    */
   public File getRootDir() {
     return rootDir;
@@ -171,14 +192,6 @@
   }
 
   /**
-   * Gets the temp directory that should be used as base directory for dynamic UDFs.
-   * @return The temp directory that should be used as base directory for dynamic UDFs.
-   */
-  public File getUdfDir() {
-    return udfDir;
-  }
-
-  /**
    * This methods creates a new directory which can be mapped to <b>dfs.tmp</b>.
    */
   public void newDfsTestTmpDir() {
@@ -186,8 +199,10 @@
   }
 
   /**
-   * A helper method which returns the correct directory corresponding to the given {@link DirType}.
-   * @param type The directory to return.
+   * Returns the correct directory corresponding to the given {@link DirType}.
+   *
+   * @param type
+   *          The directory to return.
    * @return The directory corresponding to the given {@link DirType}.
    */
   private File getDir(DirType type) {
@@ -202,18 +217,28 @@
   }
 
   /**
-   * Creates a directory in the temp root directory (corresponding to <b>dfs.root</b>) at the given relative path.
-   * @param relPath The relative path in the temp root directory at which to create a directory.
-   * @return The {@link java.io.File} corresponding to the sub directory that was created.
+   * Creates a directory in the temp root directory (corresponding to
+   * <b>dfs.root</b>) at the given relative path.
+   *
+   * @param relPath
+   *          The relative path in the temp root directory at which to create a
+   *          directory.
+   * @return The {@link java.io.File} corresponding to the sub directory that
+   *         was created.
    */
   public File makeRootSubDir(Path relPath) {
     return makeSubDir(relPath, DirType.ROOT);
   }
 
   /**
-   * Creates a directory in the temp tmp directory (corresponding to <b>dfs.tmp</b>) at the given relative path.
-   * @param relPath The relative path in the temp tmp directory at which to create a directory.
-   * @return The {@link java.io.File} corresponding to the sub directory that was created.
+   * Creates a directory in the tmp directory (corresponding to <b>dfs.tmp</b>)
+   * at the given relative path.
+   *
+   * @param relPath
+   *          The relative path in the tmp directory at which to create a
+   *          directory.
+   * @return The {@link java.io.File} corresponding to the sub directory that
+   *         was created.
    */
   public File makeTestTmpSubDir(Path relPath) {
     return makeSubDir(relPath, DirType.TEST_TMP);
@@ -229,31 +254,50 @@
   }
 
   /**
-   * This copies a file or directory from <b>src/test/resources</b> into the temp root directory (corresponding to <b>dfs.root</b>). The relative path of the file or
-   * directory in <b>src/test/resources</b> is preserved in the temp root directory.
-   * @param relPath The relative path of the file or directory in <b>src/test/resources</b> to copy into the root temp folder.
-   * @return The {@link java.io.File} corresponding to the copied file or directory in the temp root directory.
+   * This copies a file or directory from {@code src/test/resources} into the
+   * temp root directory (corresponding to {@code dfs.root}). The relative path
+   * of the file or directory in {@code src/test/resources} is preserved in the
+   * temp root directory.
+   *
+   * @param relPath
+   *          The relative path of the file or directory in
+   *          {@code src/test/resources} to copy into the root temp folder.
+   * @return The {@link java.io.File} corresponding to the copied file or
+   *         directory in the temp root directory.
    */
   public File copyResourceToRoot(Path relPath) {
     return copyTo(relPath, relPath, TestTools.FileSource.RESOURCE, DirType.ROOT);
   }
 
   /**
-   * This copies a filed or directory from the maven project into the temp root directory (corresponding to <b>dfs.root</b>). The relative path of the file or directory
-   * in the maven module is preserved in the temp root directory.
-   * @param relPath The relative path of the file or directory in the maven module to copy into the root temp folder.
-   * @return The {@link java.io.File} corresponding to the copied file or directory in the temp root directory.
+   * This copies a filed or directory from the maven project into the temp root
+   * directory (corresponding to <b>dfs.root</b>). The relative path of the file
+   * or directory in the maven module is preserved in the temp root directory.
+   *
+   * @param relPath
+   *          The relative path of the file or directory in the maven module to
+   *          copy into the root temp folder.
+   * @return The {@link java.io.File} corresponding to the copied file or
+   *         directory in the temp root directory.
    */
   public File copyFileToRoot(Path relPath) {
     return copyTo(relPath, relPath, TestTools.FileSource.PROJECT, DirType.ROOT);
   }
 
   /**
-   * This copies a file or directory from <b>src/test/resources</b> into the temp root directory (corresponding to <b>dfs.root</b>). The file or directory is copied
-   * to the provided relative destPath in the temp root directory.
-   * @param relPath The source relative path of a file or directory from <b>src/test/resources</b> that will be copied.
-   * @param destPath The destination relative path of the file or directory in the temp root directory.
-   * @return The {@link java.io.File} corresponding to the final copied file or directory in the temp root directory.
+   * This copies a file or directory from <b>src/test/resources</b> into the
+   * temp root directory (corresponding to <b>dfs.root</b>). The file or
+   * directory is copied to the provided relative destPath in the temp root
+   * directory.
+   *
+   * @param relPath
+   *          The source relative path of a file or directory from
+   *          <b>src/test/resources</b> that will be copied.
+   * @param destPath
+   *          The destination relative path of the file or directory in the temp
+   *          root directory.
+   * @return The {@link java.io.File} corresponding to the final copied file or
+   *         directory in the temp root directory.
    */
   public File copyResourceToRoot(Path relPath, Path destPath) {
     return copyTo(relPath, destPath, TestTools.FileSource.RESOURCE, DirType.ROOT);
@@ -276,11 +320,19 @@
   }
 
   /**
-   * This copies a file or directory from <b>src/test/resources</b> into the temp root directory (corresponding to <b>dfs.root</b>). The file or directory is copied
-   * to the provided relative destPath in the temp root directory.
-   * @param relPath The source relative path of a file or directory from <b>src/test/resources</b> that will be copied.
-   * @param destPath The destination relative path of the file or directory in the temp root directory.
-   * @return The {@link java.io.File} corresponding to the final copied file or directory in the temp root directory.
+   * This copies a file or directory from {@code src/test/resources} into the
+   * temp root directory (corresponding to {@code dfs.root}). The file or
+   * directory is copied to the provided relative destPath in the temp root
+   * directory.
+   *
+   * @param relPath
+   *          The source relative path of a file or directory from
+   *          <b>src/test/resources</b> that will be copied.
+   * @param destPath
+   *          The destination relative path of the file or directory in the temp
+   *          root directory.
+   * @return The {@link java.io.File} corresponding to the final copied file or
+   *         directory in the temp root directory.
    */
   public File copyResourceToTestTmp(Path relPath, Path destPath) {
     return copyTo(relPath, destPath, TestTools.FileSource.RESOURCE, DirType.TEST_TMP);
@@ -318,10 +370,16 @@
   }
 
   /**
-   * This is a convenience method that replaces placeholders in test parquet metadata files.
-   * @param metaDataFile The parquet metadata file to do string replacement on.
-   * @param replacePath The path to replace <b>REPLACED_IN_TEST</b> with in the parquet metadata file.
-   * @param customStringReplacement If this is provided a <b>CUSTOM_STRING_REPLACEMENT</b> is replaced in the parquet metadata file with this string.
+   * Replaces placeholders in test Parquet metadata files.
+   *
+   * @param metaDataFile
+   *          The Parquet metadata file to do string replacement on.
+   * @param replacePath
+   *          The path to replace {@code >REPLACED_IN_TEST} with in the Parquet
+   *          metadata file.
+   * @param customStringReplacement
+   *          If this is provided a {@code CUSTOM_STRING_REPLACEMENT} is replaced
+   *          in the Parquet metadata file with this string.
    */
   public void replaceMetaDataContents(File metaDataFile, File replacePath, String customStringReplacement) {
     try {
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
index 4418f4d..5dbf5c1 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/BaseTestQuery.java
@@ -68,6 +68,7 @@
 import org.apache.drill.shaded.guava.com.google.common.base.Preconditions;
 import org.apache.drill.shaded.guava.com.google.common.io.Resources;
 import org.apache.drill.test.DrillTestWrapper.TestServices;
+import org.apache.hadoop.fs.FileSystem;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 import org.slf4j.Logger;
@@ -165,6 +166,8 @@
 
     props.setProperty(ExecConstants.DRILL_TMP_DIR, dirTestWatcher.getTmpDir().getAbsolutePath());
     props.setProperty(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH, dirTestWatcher.getStoreDir().getAbsolutePath());
+    props.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, dirTestWatcher.getHomeDir().getAbsolutePath());
+    props.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
 
     return props;
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
index a7e8ef7..b1f8aed 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixture.java
@@ -36,11 +36,11 @@
 import java.util.Map;
 import java.util.Optional;
 import java.util.Properties;
-import java.util.function.Function;
 
 import org.apache.drill.common.config.DrillProperties;
 import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.common.logical.StoragePluginConfig;
 import org.apache.drill.exec.ExecConstants;
 import org.apache.drill.exec.ZookeeperHelper;
 import org.apache.drill.exec.client.DrillClient;
@@ -50,16 +50,12 @@
 import org.apache.drill.exec.proto.UserBitShared.QueryType;
 import org.apache.drill.exec.rpc.user.QueryDataBatch;
 import org.apache.drill.exec.server.Drillbit;
-import org.apache.drill.exec.server.DrillbitContext;
 import org.apache.drill.exec.server.RemoteServiceSet;
 import org.apache.drill.exec.store.SchemaFactory;
-import org.apache.drill.exec.store.StoragePlugin;
 import org.apache.drill.exec.store.StoragePluginRegistry;
 import org.apache.drill.exec.store.StoragePluginRegistryImpl;
 import org.apache.drill.exec.store.dfs.FileSystemConfig;
-import org.apache.drill.exec.store.dfs.FileSystemPlugin;
 import org.apache.drill.exec.store.dfs.WorkspaceConfig;
-import org.apache.drill.exec.store.mock.MockStorageEngine;
 import org.apache.drill.exec.store.mock.MockStorageEngineConfig;
 import org.apache.drill.exec.store.sys.store.provider.ZookeeperPersistentStoreProvider;
 import org.apache.drill.exec.util.StoragePluginTestUtils;
@@ -68,6 +64,7 @@
 import org.apache.drill.shaded.guava.com.google.common.collect.ImmutableMap;
 import org.apache.drill.shaded.guava.com.google.common.io.Resources;
 import org.apache.drill.test.DrillTestWrapper.TestServices;
+import org.apache.hadoop.fs.FileSystem;
 
 /**
  * Test fixture to start a Drillbit with provide options, create a client, and
@@ -121,7 +118,8 @@
       // storage. Profiles will go here when running in distributed
       // mode.
 
-      put(ZookeeperPersistentStoreProvider.DRILL_EXEC_SYS_STORE_PROVIDER_ZK_BLOBROOT, "/tmp/drill/tests");
+      put(ZookeeperPersistentStoreProvider.DRILL_EXEC_SYS_STORE_PROVIDER_ZK_BLOBROOT,
+          "/tmp/drill/tests");
     }
   };
 
@@ -152,7 +150,7 @@
       // Translate exceptions to unchecked to avoid cluttering
       // tests. Failures will simply fail the test itself.
 
-      throw new IllegalStateException( "Cluster fixture setup failed", e );
+      throw new IllegalStateException("Cluster fixture setup failed", e);
     }
   }
 
@@ -278,19 +276,14 @@
         builder.dirTestWatcher.getRootDir(), SchemaFactory.DEFAULT_WS_NAME);
 
     // Create the mock data plugin
-
     MockStorageEngineConfig config = MockStorageEngineConfig.INSTANCE;
-    MockStorageEngine plugin = new MockStorageEngine(
-        MockStorageEngineConfig.INSTANCE, bit.getContext(),
-        MockStorageEngineConfig.NAME);
     config.setEnabled(true);
-    ((StoragePluginRegistryImpl) pluginRegistry).addPluginToPersistentStoreIfAbsent(
-        MockStorageEngineConfig.NAME, config, plugin);
+    pluginRegistry.put(MockStorageEngineConfig.NAME, config);
   }
 
   private void applyOptions() throws Exception {
-    // Apply system options
 
+    // Apply system options
     if (builder.systemOptions != null) {
       for (ClusterFixtureBuilder.RuntimeOption option : builder.systemOptions) {
         clientFixture().alterSystem(option.key, option.value);
@@ -298,7 +291,6 @@
     }
 
     // Apply session options.
-
     if (builder.sessionOptions != null) {
       for (ClusterFixtureBuilder.RuntimeOption option : builder.sessionOptions) {
         clientFixture().alterSession(option.key, option.value);
@@ -369,7 +361,6 @@
    *
    * @return a JDBC connection to the default Drillbit
    */
-
   public Connection jdbcConnection() {
     try {
       Class.forName("org.apache.drill.jdbc.Driver");
@@ -398,14 +389,12 @@
    * the test code to detect any state corruption which only shows
    * itself when shutting down resources (memory leaks, for example.)
    */
-
   @Override
   public void close() throws Exception {
     Exception ex = null;
 
     // Close clients. Clients remove themselves from the client
     // list.
-
     while (!clients.isEmpty()) {
       ex = safeClose(clients.get(0), ex);
     }
@@ -472,11 +461,14 @@
     return ex;
   }
 
-  public void defineStoragePlugin(Function<DrillbitContext, StoragePlugin> pluginFactory) {
-    for (Drillbit drillbit : drillbits()) {
-      StoragePluginRegistryImpl registry = (StoragePluginRegistryImpl) drillbit.getContext().getStorage();
-      StoragePlugin plugin = pluginFactory.apply(drillbit.getContext());
-      registry.addPluginToPersistentStoreIfAbsent(plugin.getName(), plugin.getConfig(), plugin);
+  public void defineStoragePlugin(String name, StoragePluginConfig config) {
+    try {
+      for (Drillbit drillbit : drillbits()) {
+        StoragePluginRegistryImpl registry = (StoragePluginRegistryImpl) drillbit.getContext().getStorage();
+        registry.put(name, config);
+      }
+    } catch (ExecutionSetupException e) {
+      throw new IllegalStateException("Plugin definition failed", e);
     }
   }
 
@@ -489,7 +481,6 @@
    * @param path directory location (usually local)
    * @param defaultFormat default format for files in the schema
    */
-
   public void defineWorkspace(String pluginName, String schemaName, String path,
       String defaultFormat) {
     defineWorkspace(pluginName, schemaName, path, defaultFormat, null);
@@ -504,7 +495,9 @@
         // This functionality is supposed to work in tests. Change
         // exception to unchecked to make test code simpler.
 
-        throw new IllegalStateException(e);
+        throw new IllegalStateException(String.format(
+            "Failed to define a workspace for plugin %s, schema %s, path %s, default format %s",
+            pluginName, schemaName, path, defaultFormat), e);
       }
     }
   }
@@ -513,8 +506,7 @@
       String schemaName, String path, String defaultFormat, FormatPluginConfig format)
       throws ExecutionSetupException {
     final StoragePluginRegistry pluginRegistry = drillbit.getContext().getStorage();
-    final FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin(pluginName);
-    final FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig();
+    final FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getConfig(pluginName);
     final WorkspaceConfig newTmpWSConfig = new WorkspaceConfig(path, true, defaultFormat, false);
 
     Map<String, WorkspaceConfig> newWorkspaces = new HashMap<>();
@@ -549,15 +541,9 @@
                              String pluginName,
                              Map<String, FormatPluginConfig> formats) throws ExecutionSetupException {
     StoragePluginRegistry pluginRegistry = drillbit.getContext().getStorage();
-    FileSystemPlugin plugin = (FileSystemPlugin) pluginRegistry.getPlugin(pluginName);
-    FileSystemConfig pluginConfig = (FileSystemConfig) plugin.getConfig();
-
-    Map<String, FormatPluginConfig> newFormats = new HashMap<>();
-    Optional.ofNullable(pluginConfig.getFormats())
-      .ifPresent(newFormats::putAll);
-    newFormats.putAll(formats);
-
-    updatePlugin(pluginRegistry, pluginName, pluginConfig, null, newFormats);
+    FileSystemConfig pluginConfig = (FileSystemConfig) pluginRegistry.getConfig(pluginName);
+    pluginConfig = pluginConfig.copyWithFormats(formats);
+    pluginRegistry.put(pluginName, pluginConfig);
   }
 
   private void updatePlugin(StoragePluginRegistry pluginRegistry,
@@ -572,7 +558,7 @@
       newFormats == null ? pluginConfig.getFormats() : newFormats);
     newPluginConfig.setEnabled(pluginConfig.isEnabled());
 
-    pluginRegistry.createOrUpdate(pluginName, newPluginConfig, true);
+    pluginRegistry.put(pluginName, newPluginConfig);
   }
 
   public static final String EXPLAIN_PLAN_TEXT = "text";
@@ -584,8 +570,9 @@
     Properties props = new Properties();
     props.putAll(ClusterFixture.TEST_CONFIGURATIONS);
     props.setProperty(ExecConstants.DRILL_TMP_DIR, dirTestWatcher.getTmpDir().getAbsolutePath());
-    props.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, dirTestWatcher.getUdfDir().getAbsolutePath());
+    props.setProperty(ExecConstants.UDF_DIRECTORY_ROOT, dirTestWatcher.getHomeDir().getAbsolutePath());
     props.setProperty(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH, dirTestWatcher.getStoreDir().getAbsolutePath());
+    props.setProperty(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
 
     builder.configBuilder.configProps(props);
     return builder;
@@ -609,7 +596,6 @@
    * Shim class to allow the {@link TestBuilder} class to work with the
    * cluster fixture.
    */
-
   public static class FixtureTestServices implements TestServices {
 
     private final ClientFixture client;
@@ -687,7 +673,6 @@
    * @param resource path to the resource
    * @return the resource contents as a string
    */
-
   public static String loadResource(String resource) {
     try {
       return getResource(resource);
@@ -703,7 +688,6 @@
    * @param path resource path with optional leading slash
    * @return same path without the leading slash
    */
-
   public static String trimSlash(String path) {
     if (path == null) {
       return path;
@@ -725,7 +709,6 @@
    * @return location of the directory which can be used to create
    * temporary input files
    */
-
   public File makeDataDir(String key, String defaultFormat, FormatPluginConfig formatPluginConfig) {
     File dir = builder.dirTestWatcher.makeSubDir(Paths.get(key));
     defineWorkspace("dfs", key, dir.getAbsolutePath(), defaultFormat, formatPluginConfig);
@@ -747,7 +730,6 @@
    *
    * @return query profile directory
    */
-
   public File getProfileDir() {
     File baseDir;
     if (usesZk) {
@@ -757,4 +739,12 @@
     }
     return new File(baseDir, "profiles");
   }
+
+  public StoragePluginRegistry storageRegistry() {
+    return drillbit().getContext().getStorage();
+  }
+
+  public StoragePluginRegistry storageRegistry(String name) {
+    return drillbit(name).getContext().getStorage();
+  }
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixtureBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixtureBuilder.java
index 9657a5e..24beb3a 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixtureBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterFixtureBuilder.java
@@ -31,7 +31,6 @@
  * builder starts an embedded Drillbit, with the "dfs" name space,
  * a max width (parallelization) of 2.
  */
-
 public class ClusterFixtureBuilder {
 
   public static class RuntimeOption {
@@ -46,7 +45,6 @@
 
   // Values in the drill-module.conf file for values that are customized
   // in the defaults.
-
   public static final int DEFAULT_ZK_REFRESH = 500; // ms
 
   protected ConfigBuilder configBuilder = new ConfigBuilder();
@@ -195,7 +193,7 @@
    * @param bitNames array of (unique) Drillbit names
    * @return this builder
    */
-  public ClusterFixtureBuilder withBits(String bitNames[]) {
+  public ClusterFixtureBuilder withBits(String...bitNames) {
     this.bitNames = bitNames;
     bitCount = bitNames.length;
     return this;
@@ -276,8 +274,6 @@
    * {@link ClusterFixture#clientBuilder()}. Using the client builder
    * also lets you set client-side options in the rare cases that you
    * need them.
-   *
-   * @return
    */
   public ClusterFixture build() {
     return new ClusterFixture(this);
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterMockStorageFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterMockStorageFixture.java
index fef4bd1..9588ea9 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterMockStorageFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterMockStorageFixture.java
@@ -17,35 +17,38 @@
  */
 package org.apache.drill.test;
 
+import org.apache.drill.common.exceptions.ExecutionSetupException;
 import org.apache.drill.exec.server.Drillbit;
 import org.apache.drill.exec.store.StoragePluginRegistry;
-import org.apache.drill.exec.store.StoragePluginRegistryImpl;
 import org.apache.drill.exec.store.mock.MockBreakageStorage;
-import org.apache.drill.exec.store.mock.MockStorageEngineConfig;
+import org.apache.drill.exec.store.mock.MockBreakageStorage.MockBreakageStorageEngineConfig;
 
 public class ClusterMockStorageFixture extends ClusterFixture {
+
   ClusterMockStorageFixture(ClusterFixtureBuilder builder) {
     super(builder);
-
   }
 
   /**
    * This should be called after bits are started
-   * @param name nthe mock storage name we are going to create
+   * @param name the mock storage name we are going to create
    */
   public void insertMockStorage(String name, boolean breakRegisterSchema) {
-    for (Drillbit bit : drillbits()) {
+    for (Drillbit bit: drillbits()) {
 
       // Bit name and registration.
       final StoragePluginRegistry pluginRegistry = bit.getContext().getStorage();
-      MockStorageEngineConfig config = MockStorageEngineConfig.INSTANCE;
-      MockBreakageStorage plugin = new MockBreakageStorage(
-          MockStorageEngineConfig.INSTANCE, bit.getContext(), name);
-      config.setEnabled(true);
-      ((StoragePluginRegistryImpl) pluginRegistry).addPluginToPersistentStoreIfAbsent(name, config, plugin);
+      MockBreakageStorage plugin;
+      try {
+        MockBreakageStorageEngineConfig config = MockBreakageStorageEngineConfig.INSTANCE;
+        config.setEnabled(true);
+        pluginRegistry.put(name, config);
+        plugin = (MockBreakageStorage) pluginRegistry.getPlugin(name);
+      } catch (ExecutionSetupException e) {
+        throw new IllegalStateException(e);
+      }
 
       plugin.setBreakRegister(breakRegisterSchema);
     }
   }
-
 }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
index 542ad73..7255d94 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ClusterTest.java
@@ -70,7 +70,6 @@
  * The try-with-resources block ensures that the cluster is shut down at
  * the end of each test method.
  */
-
 public class ClusterTest extends DrillTest {
 
   @ClassRule
@@ -94,7 +93,6 @@
    * cluster fixture.
    * @return a test builder that works against the cluster fixture
    */
-
   public TestBuilder testBuilder() {
     return client.testBuilder();
   }
@@ -104,7 +102,6 @@
    * cluster fixture.
    * @return the contents of the resource text file
    */
-
   public String getFile(String resource) throws IOException {
     return ClusterFixture.getResource(resource);
   }
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java b/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java
index 2d4dd78..98baca6 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/ConfigBuilder.java
@@ -39,8 +39,8 @@
 public class ConfigBuilder {
 
   protected String configResource;
-  protected Properties configProps = createDefaultProperties();
-  protected CaseInsensitiveMap<OptionDefinition> definitions = SystemOptionManager.createDefaultOptionDefinitions();
+  protected final Properties configProps = createDefaultProperties();
+  protected final CaseInsensitiveMap<OptionDefinition> definitions = SystemOptionManager.createDefaultOptionDefinitions();
 
   /**
    * Use the given configuration properties as overrides.
@@ -89,7 +89,6 @@
    * @return this builder
    * @see #put(String, Object)
    */
-
   public ConfigBuilder resource(String configResource) {
 
     if (configProps != null) {
@@ -100,7 +99,6 @@
     // TypeSafe gets unhappy about a leading slash, but other functions
     // require it. Silently discard the leading slash if given to
     // preserve the test writer's sanity.
-
     this.configResource = ClusterFixture.trimSlash(configResource);
     return this;
   }
@@ -111,7 +109,6 @@
    * @param value property value
    * @return this builder
    */
-
   public ConfigBuilder put(String key, Object value) {
     if (hasResource()) {
       // Drill provides no constructor for this use case.
@@ -127,8 +124,7 @@
     return this;
   }
 
-  private static Properties createDefaultProperties()
-  {
+  private static Properties createDefaultProperties() {
     Properties properties = new Properties();
     properties.put(ExecConstants.CAST_EMPTY_STRING_TO_NULL, "false");
     properties.put(ExecConstants.USE_DYNAMIC_UDFS_KEY, "false");
@@ -152,7 +148,6 @@
     // Create a config
     // Because of the way DrillConfig works, we can set the ZK
     // connection string only if a property set is provided.
-
     if (hasResource()) {
       return DrillConfig.create(configResource);
     } else if (configProps != null) {
@@ -166,9 +161,10 @@
     Properties stringProps = new Properties();
     Properties collectionProps = new Properties();
 
-    // Filter out the collection type configs and other configs which can be converted to string.
-    for(Entry<Object, Object> entry : configProps.entrySet()) {
-      if(entry.getValue() instanceof Collection<?>) {
+    // Filter out the collection type configs and other configs
+    // which can be converted to string.
+    for (Entry<Object, Object> entry : configProps.entrySet()) {
+      if (entry.getValue() instanceof Collection<?>) {
         collectionProps.put(entry.getKey(), entry.getValue());
       } else {
         stringProps.setProperty(entry.getKey().toString(), entry.getValue().toString());
diff --git a/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java b/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java
index 3ded2f8..16d90e8 100644
--- a/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java
+++ b/exec/java-exec/src/test/java/org/apache/drill/test/OperatorFixture.java
@@ -62,6 +62,7 @@
 import org.apache.drill.exec.server.options.OptionManager;
 import org.apache.drill.exec.server.options.SystemOptionManager;
 import org.apache.drill.exec.store.PartitionExplorer;
+import org.apache.drill.exec.store.sys.PersistentStoreProvider;
 import org.apache.drill.exec.store.sys.store.provider.LocalPersistentStoreProvider;
 import org.apache.drill.exec.testing.ExecutionControls;
 import org.apache.drill.exec.work.filter.RuntimeFilterWritable;
@@ -72,6 +73,7 @@
 import org.apache.drill.exec.physical.rowSet.RowSet;
 import org.apache.drill.exec.physical.rowSet.RowSet.ExtendableRowSet;
 import org.apache.drill.exec.physical.rowSet.RowSetBuilder;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.security.UserGroupInformation;
 import java.util.concurrent.TimeUnit;
 
@@ -127,7 +129,8 @@
         configBuilder.put(ExecConstants.SYS_STORE_PROVIDER_LOCAL_PATH, dirTestWatcher.getStoreDir().getAbsolutePath());
         configBuilder.put(ExecConstants.SPILL_DIRS, Lists.newArrayList(dirTestWatcher.getSpillDir().getAbsolutePath()));
         configBuilder.put(ExecConstants.HASHJOIN_SPILL_DIRS, Lists.newArrayList(dirTestWatcher.getSpillDir().getAbsolutePath()));
-        configBuilder.put(ExecConstants.UDF_DIRECTORY_ROOT, dirTestWatcher.getUdfDir().getAbsolutePath());
+        configBuilder.put(ExecConstants.UDF_DIRECTORY_ROOT, dirTestWatcher.getHomeDir().getAbsolutePath());
+        configBuilder.put(ExecConstants.UDF_DIRECTORY_FS, FileSystem.DEFAULT_FS);
       }
     }
 
@@ -350,7 +353,7 @@
 
   private final SystemOptionManager options;
   private final MockFragmentContext context;
-  private LocalPersistentStoreProvider provider;
+  private PersistentStoreProvider provider;
 
   protected OperatorFixture(Builder builder) {
     config = builder.configBuilder().build();
@@ -358,21 +361,19 @@
     options = createOptionManager();
     context = new MockFragmentContext(config, options, allocator, builder.scanExecutor, builder.scanDecoderExecutor);
     applySystemOptions(builder.systemOptions);
-   }
+  }
 
-   private void applySystemOptions(List<RuntimeOption> systemOptions) {
+  private void applySystemOptions(List<RuntimeOption> systemOptions) {
     for (RuntimeOption option : systemOptions) {
       options.setLocalOption(option.key, option.value);
     }
   }
 
-  public OptionManager getOptionManager()
-  {
+  public OptionManager getOptionManager() {
     return options;
   }
 
-  private SystemOptionManager createOptionManager()
-  {
+  private SystemOptionManager createOptionManager() {
     try {
       provider = new LocalPersistentStoreProvider(config);
       provider.start();
@@ -389,7 +390,7 @@
     }
 
     return options;
-   }
+  }
 
   public FragmentContext getFragmentContext() { return context; }
 
diff --git a/exec/java-exec/src/test/resources/plugins/bogus-bootstrap.json b/exec/java-exec/src/test/resources/plugins/bogus-bootstrap.json
new file mode 100644
index 0000000..835866e
--- /dev/null
+++ b/exec/java-exec/src/test/resources/plugins/bogus-bootstrap.json
@@ -0,0 +1,22 @@
+{
+  "storage" : {
+    "dfs" : {
+      "type" : "file",
+      "connection" : "file:///",
+      "workspaces" : {
+        "root" : {
+          "location" : "/",
+          "writable" : false,
+          "allowAccessOutsideWorkspace" : false
+        },
+        "tmp" : {
+          "location" : "/tmp",
+          "writable" : true,
+          "allowAccessOutsideWorkspace" : false
+        }
+      },
+      "imABadBoy" : "this file is intentionally bogus to test errors",
+      "enabled" : true
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/resources/plugins/dup-bootstrap.json b/exec/java-exec/src/test/resources/plugins/dup-bootstrap.json
new file mode 100644
index 0000000..b90b92c
--- /dev/null
+++ b/exec/java-exec/src/test/resources/plugins/dup-bootstrap.json
@@ -0,0 +1,28 @@
+{
+  "storage" : {
+    "cp" : {
+      "type" : "file",
+      "connection" : "classpath:///",
+      "formats" : {
+        "csv" : {
+          "type" : "text",
+          "extensions" : [ "csv" ],
+          "delimiter" : ","
+        }
+      },
+      "enabled" : true
+    },
+    "cp" : {
+      "type" : "file",
+      "connection" : "classpath:///",
+      "formats" : {
+        "tsv" : {
+          "type" : "text",
+          "extensions" : [ "tsv" ],
+          "delimiter" : "\t"
+        }
+      },
+      "enabled" : true
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/resources/plugins/mock-format-bootstrap.json b/exec/java-exec/src/test/resources/plugins/mock-format-bootstrap.json
new file mode 100644
index 0000000..c7a53ac
--- /dev/null
+++ b/exec/java-exec/src/test/resources/plugins/mock-format-bootstrap.json
@@ -0,0 +1,14 @@
+{
+  "storage" : {
+    "dfs" : {
+      "type" : "file",
+      "formats" : {
+         "bsv" : {
+          "type" : "text",
+          "extensions" : [ "bsv" ],
+          "delimiter" : "!"
+        }
+      }
+    }
+  }
+}
diff --git a/exec/java-exec/src/test/resources/plugins/mock-plugin-upgrade.json b/exec/java-exec/src/test/resources/plugins/mock-plugin-upgrade.json
new file mode 100644
index 0000000..ad39fa1
--- /dev/null
+++ b/exec/java-exec/src/test/resources/plugins/mock-plugin-upgrade.json
@@ -0,0 +1,195 @@
+{
+  "storage" : {
+    "dfs" : {
+      "type" : "file",
+      "connection" : "file:///",
+      "workspaces" : {
+        "root" : {
+          "location" : "/",
+          "writable" : false,
+          "allowAccessOutsideWorkspace" : false
+        },
+        "tmp" : {
+          "location" : "/tmp",
+          "writable" : true,
+          "allowAccessOutsideWorkspace" : false
+        }
+      },
+      "formats" : {
+        "csv" : {
+          "type" : "text",
+          "extensions" : [ "csv" ],
+          "delimiter" : ","
+        },
+        "tsv" : {
+          "type" : "text",
+          "extensions" : [ "tsv" ],
+          "delimiter" : "\t"
+        },
+        "httpd" : {
+          "type" : "httpd",
+          "logFormat" : "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"",
+          "timestampFormat" : "dd/MMM/yyyy:HH:mm:ss ZZ"
+        },
+        "parquet" : {
+          "type" : "parquet"
+        },
+        "json" : {
+          "type" : "json",
+          "extensions" : [ "json" ]
+        },
+        "pcap" : {
+          "type" : "pcap"
+        },
+        "pcapng" : {
+          "type" : "pcapng"
+        },
+        "avro" : {
+          "type" : "avro",
+          "extensions" : [ "avro" ]
+        },
+        "sequencefile" : {
+          "type" : "sequencefile",
+          "extensions" : [ "seq" ]
+        },
+        "csvh" : {
+          "type" : "text",
+          "extensions" : [ "csvh" ],
+          "delimiter" : ",",
+          "extractHeader" : true
+        },
+        "image" : {
+          "type" : "image",
+          "extensions" : [ "jpg", "jpeg", "jpe", "tif", "tiff", "dng", "psd", "png", "bmp", "gif", "ico", "pcx", "wav", "wave", "avi", "webp", "mov", "mp4", "m4a", "m4p", "m4b", "m4r", "m4v", "3gp", "3g2", "eps", "epsf", "epsi", "ai", "arw", "crw", "cr2", "nef", "orf", "raf", "rw2", "rwl", "srw", "x3f" ]
+        }
+      },
+      "enabled" : true
+    },
+    "s3" : {
+      "type" : "file",
+      "connection" : "s3a://my.bucket.location.com",
+      "config" : {
+        "fs.s3a.access.key" : "ID",
+        "fs.s3a.secret.key" : "SECRET"
+      },
+      "workspaces" : {
+        "root" : {
+          "location" : "/",
+          "writable" : false
+        },
+        "tmp" : {
+          "location" : "/tmp",
+          "writable" : true
+        }
+      },
+      "formats" : {
+        "psv" : {
+          "type" : "text",
+          "extensions" : [ "tbl" ],
+          "delimiter" : "|"
+        },
+        "csv" : {
+          "type" : "text",
+          "extensions" : [ "csv" ],
+          "delimiter" : ","
+        },
+        "tsv" : {
+          "type" : "text",
+          "extensions" : [ "tsv" ],
+          "delimiter" : "\t"
+        },
+        "bsv" : {
+          "type" : "text",
+          "extensions" : [ "bsv" ],
+          "delimiter" : "!"
+        },
+        "parquet" : {
+          "type" : "parquet"
+        },
+        "json" : {
+          "type" : "json",
+          "extensions" : [ "json" ]
+        },
+        "avro" : {
+          "type" : "avro"
+        },
+        "sequencefile" : {
+          "type" : "sequencefile",
+          "extensions" : [ "seq" ]
+        },
+        "csvh" : {
+          "type" : "text",
+          "extensions" : [ "csvh" ],
+          "delimiter" : ",",
+          "extractHeader" : true
+        }
+      },
+      "enabled" : false
+    },
+    "local" : {
+      "type" : "file",
+      "connection" : "file:///",
+      "workspaces" : {
+        "root" : {
+          "location" : "/",
+          "writable" : false,
+          "allowAccessOutsideWorkspace" : false
+        },
+        "tmp" : {
+          "location" : "/tmp",
+          "writable" : true,
+          "allowAccessOutsideWorkspace" : false
+        }
+      },
+      "formats" : {
+        "csv" : {
+          "type" : "text",
+          "extensions" : [ "csv" ],
+          "delimiter" : ","
+        },
+        "tsv" : {
+          "type" : "text",
+          "extensions" : [ "tsv" ],
+          "delimiter" : "\t"
+        },
+        "httpd" : {
+          "type" : "httpd",
+          "logFormat" : "%h %l %u %t \"%r\" %>s %b \"%{Referer}i\" \"%{User-agent}i\"",
+          "timestampFormat" : "dd/MMM/yyyy:HH:mm:ss ZZ"
+        },
+        "parquet" : {
+          "type" : "parquet"
+        },
+        "json" : {
+          "type" : "json",
+          "extensions" : [ "json" ]
+        },
+        "pcap" : {
+          "type" : "pcap"
+        },
+        "pcapng" : {
+          "type" : "pcapng"
+        },
+        "avro" : {
+          "type" : "avro",
+          "extensions" : [ "avro" ]
+        },
+        "sequencefile" : {
+          "type" : "sequencefile",
+          "extensions" : [ "seq" ]
+        },
+        "csvh" : {
+          "type" : "text",
+          "extensions" : [ "csvh" ],
+          "delimiter" : ",",
+          "extractHeader" : true
+        },
+        "image" : {
+          "type" : "image",
+          "extensions" : [ "jpg", "jpeg", "jpe", "tif", "tiff", "dng", "psd", "png", "bmp", "gif", "ico", "pcx", "wav", "wave", "avi", "webp", "mov", "mp4", "m4a", "m4p", "m4b", "m4r", "m4v", "3gp", "3g2", "eps", "epsf", "epsi", "ai", "arw", "crw", "cr2", "nef", "orf", "raf", "rw2", "rwl", "srw", "x3f" ]
+        }
+      },
+      "enabled" : true
+    }
+  }
+}
diff --git a/logical/src/main/java/org/apache/drill/common/JSONOptions.java b/logical/src/main/java/org/apache/drill/common/JSONOptions.java
index 43e51b2..dcb4700 100644
--- a/logical/src/main/java/org/apache/drill/common/JSONOptions.java
+++ b/logical/src/main/java/org/apache/drill/common/JSONOptions.java
@@ -66,7 +66,7 @@
   }
 
   @SuppressWarnings("unchecked")
-  public <T> T getWith(LogicalPlanPersistence lpPersistance, Class<T> c) {
+  public <T> T getWith(ObjectMapper mapper, Class<T> c) {
     try {
       if (opaque != null) {
         final Class<?> opaqueClass = opaque.getClass();
@@ -88,7 +88,7 @@
       }
 
       //logger.debug("Read tree {}", root);
-      return lpPersistance.getMapper().treeToValue(root, c);
+      return mapper.treeToValue(root, c);
     } catch (JsonProcessingException e) {
       throw new LogicalPlanParsingException(String.format("Failure while trying to convert late bound " +
         "json options to type of %s. Reference was originally located at line %d, column %d.",
diff --git a/logical/src/main/java/org/apache/drill/common/config/LogicalPlanPersistence.java b/logical/src/main/java/org/apache/drill/common/config/LogicalPlanPersistence.java
index e04f3ad..2ac5b8a 100644
--- a/logical/src/main/java/org/apache/drill/common/config/LogicalPlanPersistence.java
+++ b/logical/src/main/java/org/apache/drill/common/config/LogicalPlanPersistence.java
@@ -22,10 +22,12 @@
 import com.fasterxml.jackson.databind.ser.impl.SimpleFilterProvider;
 import org.apache.drill.common.expression.LogicalExpression;
 import org.apache.drill.common.expression.SchemaPath;
-import org.apache.drill.common.logical.FormatPluginConfigBase;
-import org.apache.drill.common.logical.StoragePluginConfigBase;
-import org.apache.drill.common.logical.data.LogicalOperatorBase;
+import org.apache.drill.common.logical.FormatPluginConfig;
+import org.apache.drill.common.logical.StoragePluginConfig;
+import org.apache.drill.common.logical.data.LogicalOperator;
 import org.apache.drill.common.scanner.persistence.ScanResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.core.JsonParser.Feature;
@@ -35,6 +37,8 @@
 
 
 public class LogicalPlanPersistence {
+  private static final Logger logger = LoggerFactory.getLogger(LogicalPlanPersistence.class);
+
   private final ObjectMapper mapper;
 
   public LogicalPlanPersistence(DrillConfig conf, ScanResult scanResult) {
@@ -54,9 +58,12 @@
     mapper.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
     mapper.configure(Feature.ALLOW_COMMENTS, true);
     mapper.setFilterProvider(new SimpleFilterProvider().setFailOnUnknownId(false));
-    registerSubtypes(LogicalOperatorBase.getSubTypes(scanResult));
-    registerSubtypes(StoragePluginConfigBase.getSubTypes(scanResult));
-    registerSubtypes(FormatPluginConfigBase.getSubTypes(scanResult));
+    // For LogicalOperatorBase
+    registerSubtypes(getSubTypes(scanResult, LogicalOperator.class));
+    // For StoragePluginConfigBase
+    registerSubtypes(getSubTypes(scanResult, StoragePluginConfig.class));
+    // For FormatPluginConfigBase
+    registerSubtypes(getSubTypes(scanResult, FormatPluginConfig.class));
   }
 
   public ObjectMapper getMapper() {
@@ -68,4 +75,29 @@
       mapper.registerSubtypes(type);
     }
   }
+
+  /**
+   * Scan for implementations of the given interface.
+   *
+   * @param classpathScan Drill configuration object used to find the packages to scan
+   * @return list of classes that implement the interface.
+   */
+  public static <T> Set<Class<? extends T>> getSubTypes(final ScanResult classpathScan, Class<T> parent) {
+    Set<Class<? extends T>> subclasses = classpathScan.getImplementations(parent);
+    if (logger.isDebugEnabled()) {
+      StringBuilder sb = new StringBuilder()
+        .append("Found ")
+        .append(subclasses.size())
+        .append(" ")
+        .append(parent.getSimpleName())
+        .append(" subclasses:\n");
+      for (Class<?> c : subclasses) {
+        sb.append('\t');
+        sb.append(c.getName());
+        sb.append('\n');
+      }
+      logger.debug(sb.toString());
+    }
+    return subclasses;
+  }
 }
diff --git a/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfig.java b/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfig.java
index 5674933..b0d1180 100644
--- a/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfig.java
+++ b/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfig.java
@@ -34,5 +34,4 @@
  */
 @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
 public interface FormatPluginConfig {
-
 }
diff --git a/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java b/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java
index 90ffaa8..4848524 100644
--- a/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java
+++ b/logical/src/main/java/org/apache/drill/common/logical/FormatPluginConfigBase.java
@@ -17,42 +17,5 @@
  */
 package org.apache.drill.common.logical;
 
-import java.util.Set;
-
-import org.apache.drill.common.scanner.persistence.ScanResult;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
 public abstract class FormatPluginConfigBase implements FormatPluginConfig {
-
-  private static final Logger logger = LoggerFactory.getLogger(FormatPluginConfigBase.class);
-
-  /**
-   * scan for implementations of see <b>FormatPlugin</b>.
-   *
-   * @param classpathScan - Drill configuration object, used to find the packages to scan
-   * @return - list of classes that implement the interface.
-   */
-  public static Set<Class<? extends FormatPluginConfig>> getSubTypes(final ScanResult classpathScan) {
-    final Set<Class<? extends FormatPluginConfig>> pluginClasses = classpathScan.getImplementations(FormatPluginConfig.class);
-    if (logger.isDebugEnabled()) {
-      StringBuilder sb = new StringBuilder();
-      sb.append("Found ");
-      sb.append(pluginClasses.size());
-      sb.append(" format plugin configuration classes:\n");
-      for (Class<?> c : pluginClasses) {
-        sb.append('\t');
-        sb.append(c.getName());
-        sb.append('\n');
-      }
-      logger.debug(sb.toString());
-    }
-    return pluginClasses;
-  }
-
-  @Override
-  public abstract boolean equals(Object o);
-
-  @Override
-  public abstract int hashCode();
 }
diff --git a/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfig.java b/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfig.java
index 22c5ffd..970b58e 100644
--- a/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfig.java
+++ b/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfig.java
@@ -23,7 +23,6 @@
 
 @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
 public abstract class StoragePluginConfig {
-
   private Boolean enabled;
 
   /**
@@ -35,7 +34,6 @@
     return enabled != null && enabled;
   }
 
-
   public void setEnabled(Boolean enabled) {
     this.enabled = enabled;
   }
diff --git a/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfigBase.java b/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfigBase.java
index 957c427..d8c76d7 100644
--- a/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfigBase.java
+++ b/logical/src/main/java/org/apache/drill/common/logical/StoragePluginConfigBase.java
@@ -17,21 +17,5 @@
  */
 package org.apache.drill.common.logical;
 
-import java.util.Set;
-
-import org.apache.drill.common.scanner.persistence.ScanResult;
-
-
 public abstract class StoragePluginConfigBase extends StoragePluginConfig {
-  private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(StoragePluginConfigBase.class);
-
-  public static Set<Class<? extends StoragePluginConfig>> getSubTypes(final ScanResult classpathScan) {
-    final Set<Class<? extends StoragePluginConfig>> packages = classpathScan.getImplementations(StoragePluginConfig.class);
-    logger.debug("Found {} logical operator classes: {}.", packages.size(), packages);
-    return packages;
-  }
-
-  @Override
-  public abstract boolean equals(Object o);
-
 }
diff --git a/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java b/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java
index de7e5cc..7c6cf58 100644
--- a/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java
+++ b/logical/src/main/java/org/apache/drill/common/logical/data/LogicalOperatorBase.java
@@ -20,11 +20,9 @@
 import java.util.ArrayList;
 import java.util.Collection;
 import java.util.List;
-import java.util.Set;
 
 import org.apache.drill.common.graph.GraphVisitor;
 import org.apache.drill.common.logical.ValidationError;
-import org.apache.drill.common.scanner.persistence.ScanResult;
 
 import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonInclude.Include;
@@ -32,9 +30,8 @@
 
 
 public abstract class LogicalOperatorBase implements LogicalOperator{
-  static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(LogicalOperatorBase.class);
 
-  private List<LogicalOperator> children = new ArrayList<LogicalOperator>();
+  private final List<LogicalOperator> children = new ArrayList<>();
 
   private String memo;
 
@@ -83,10 +80,4 @@
   public void setMemo(String memo) {
     this.memo = memo;
   }
-
-  public static Set<Class<? extends LogicalOperator>> getSubTypes(final ScanResult classpathScan) {
-    final Set<Class<? extends LogicalOperator>> ops = classpathScan.getImplementations(LogicalOperator.class);
-    logger.debug("Found {} logical operator classes: {}.", ops.size(), ops);
-    return ops;
-  }
 }
diff --git a/logical/src/main/java/org/apache/drill/common/logical/data/Scan.java b/logical/src/main/java/org/apache/drill/common/logical/data/Scan.java
index 838f3cb..b879fcc 100644
--- a/logical/src/main/java/org/apache/drill/common/logical/data/Scan.java
+++ b/logical/src/main/java/org/apache/drill/common/logical/data/Scan.java
@@ -72,7 +72,5 @@
     public Scan build() {
       return new Scan(storageEngine, selection);
     }
-
   }
-
 }