Merge pull request #3069 from krichter722/checkstyle-jdbc

STORM-3451: jdbc: fix all checkstyle warnings
diff --git a/examples/storm-hbase-examples/pom.xml b/examples/storm-hbase-examples/pom.xml
index dd733dc..65b71fe 100644
--- a/examples/storm-hbase-examples/pom.xml
+++ b/examples/storm-hbase-examples/pom.xml
@@ -87,7 +87,7 @@
                 <artifactId>maven-checkstyle-plugin</artifactId>
                 <!--Note - the version would be inherited-->
                 <configuration>
-                    <maxAllowedViolations>16</maxAllowedViolations>
+                    <maxAllowedViolations>0</maxAllowedViolations>
                 </configuration>
             </plugin>
             <plugin>
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java
index 1c365c4..7f3afb1 100644
--- a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/LookupWordCount.java
@@ -38,7 +38,6 @@
         config.put("hbase.conf", hbConf);
 
         WordSpout spout = new WordSpout();
-        TotalWordCounter totalBolt = new TotalWordCounter();
 
         SimpleHBaseMapper mapper = new SimpleHBaseMapper().withRowKeyField("word");
         HBaseProjectionCriteria projectionCriteria = new HBaseProjectionCriteria();
@@ -46,14 +45,15 @@
 
         WordCountValueMapper rowToTupleMapper = new WordCountValueMapper();
 
-        HBaseLookupBolt hBaseLookupBolt = new HBaseLookupBolt("WordCount", mapper, rowToTupleMapper)
-            .withConfigKey("hbase.conf")
-            .withProjectionCriteria(projectionCriteria);
+        HBaseLookupBolt lookupBolt = new HBaseLookupBolt("WordCount", mapper, rowToTupleMapper)
+                .withConfigKey("hbase.conf")
+                .withProjectionCriteria(projectionCriteria);
 
         //wordspout -> lookupbolt -> totalCountBolt
         TopologyBuilder builder = new TopologyBuilder();
         builder.setSpout(WORD_SPOUT, spout, 1);
-        builder.setBolt(LOOKUP_BOLT, hBaseLookupBolt, 1).shuffleGrouping(WORD_SPOUT);
+        builder.setBolt(LOOKUP_BOLT, lookupBolt, 1).shuffleGrouping(WORD_SPOUT);
+        TotalWordCounter totalBolt = new TotalWordCounter();
         builder.setBolt(TOTAL_COUNT_BOLT, totalBolt, 1).fieldsGrouping(LOOKUP_BOLT, new Fields("columnName"));
         String topoName = "test";
         if (args.length == 1) {
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java
index 015ce85..0c929a3 100644
--- a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/PersistentWordCount.java
@@ -12,7 +12,6 @@
 
 package org.apache.storm.hbase.topology;
 
-
 import java.util.HashMap;
 import java.util.Map;
 import org.apache.storm.Config;
@@ -63,8 +62,10 @@
         if (args.length == 2) {
             topoName = args[0];
         } else if (args.length == 4) {
-            System.out.println("hdfs url: " + args[0] + ", keytab file: " + args[2] +
-                               ", principal name: " + args[3] + ", toplogy name: " + args[1]);
+            System.out.println("hdfs url: " + args[0]
+                    + ", keytab file: " + args[2]
+                    + ", principal name: " + args[3]
+                    + ", toplogy name: " + args[1]);
             hbConf.put(HBaseSecurityUtil.STORM_KEYTAB_FILE_KEY, args[2]);
             hbConf.put(HBaseSecurityUtil.STORM_USER_NAME_KEY, args[3]);
             config.setNumWorkers(3);
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java
index a1017df..4199319 100644
--- a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/TotalWordCounter.java
@@ -12,6 +12,8 @@
 
 package org.apache.storm.hbase.topology;
 
+import static org.apache.storm.utils.Utils.tuple;
+
 import java.math.BigInteger;
 import java.util.Map;
 import java.util.Random;
@@ -24,8 +26,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.storm.utils.Utils.tuple;
-
 public class TotalWordCounter implements IBasicBolt {
 
     private static final Logger LOG = LoggerFactory.getLogger(TotalWordCounter.class);
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java
index 33acfab..d8f6eb2 100644
--- a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountClient.java
@@ -24,7 +24,7 @@
 /**
  * Connects to the 'WordCount' table and prints counts for each word.
  *
- * Assumes you have run (or are running) <code>PersistentWordCount</code>
+ * <p>Assumes you have run (or are running) <code>PersistentWordCount</code>
  */
 public class WordCountClient {
 
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
index 15ece78..9cb8115 100644
--- a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCountValueMapper.java
@@ -12,7 +12,6 @@
 
 package org.apache.storm.hbase.topology;
 
-
 import java.util.ArrayList;
 import java.util.List;
 import org.apache.hadoop.hbase.Cell;
@@ -26,15 +25,15 @@
 import org.apache.storm.tuple.Values;
 
 /**
- * Takes a Hbase result and returns a value list that has a value instance for each column and corresponding value. So if the result from
- * Hbase was
+ * Takes a Hbase result and returns a value list that has a value instance for each column and corresponding value. So
+ * if the result from Hbase was
  * <pre>
  * WORD, COUNT
  * apple, 10
  * bannana, 20
  * </pre>
  *
- * this will return
+ * <p>this will return
  * <pre>
  *     [WORD, apple]
  *     [COUNT, 10]
diff --git a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java
index 699b2ca..b2cadb8 100644
--- a/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java
+++ b/examples/storm-hbase-examples/src/main/java/org/apache/storm/hbase/topology/WordCounter.java
@@ -12,6 +12,8 @@
 
 package org.apache.storm.hbase.topology;
 
+import static org.apache.storm.utils.Utils.tuple;
+
 import java.util.Map;
 import org.apache.storm.task.TopologyContext;
 import org.apache.storm.topology.BasicOutputCollector;
@@ -20,8 +22,6 @@
 import org.apache.storm.tuple.Fields;
 import org.apache.storm.tuple.Tuple;
 
-import static org.apache.storm.utils.Utils.tuple;
-
 public class WordCounter implements IBasicBolt {
 
 
diff --git a/examples/storm-starter/pom.xml b/examples/storm-starter/pom.xml
index 97a6610..3835bbc 100644
--- a/examples/storm-starter/pom.xml
+++ b/examples/storm-starter/pom.xml
@@ -229,7 +229,7 @@
           <artifactId>maven-checkstyle-plugin</artifactId>
           <!--Note - the version would be inherited-->
           <configuration>
-              <maxAllowedViolations>263</maxAllowedViolations>
+              <maxAllowedViolations>0</maxAllowedViolations>
           </configuration>
       </plugin>
         <plugin>
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/BasicDRPCTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/BasicDRPCTopology.java
index 6bb787b..70e03a4 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/BasicDRPCTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/BasicDRPCTopology.java
@@ -29,6 +29,7 @@
  *
  * @see <a href="http://storm.apache.org/documentation/Distributed-RPC.html">Distributed RPC</a>
  */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class BasicDRPCTopology {
     public static void main(String[] args) throws Exception {
         Config conf = new Config();
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/BlobStoreAPIWordCountTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/BlobStoreAPIWordCountTopology.java
index caa751a..46df78e 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/BlobStoreAPIWordCountTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/BlobStoreAPIWordCountTopology.java
@@ -54,6 +54,7 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class BlobStoreAPIWordCountTopology {
     private static final Logger LOG = LoggerFactory.getLogger(BlobStoreAPIWordCountTopology.class);
     private static ClientBlobStore store; // Client API to invoke blob store API functionality
@@ -70,10 +71,10 @@
     // storm blobstore create --file blacklist.txt --acl o::rwa key
     private static void createBlobWithContent(String blobKey, ClientBlobStore clientBlobStore, File file)
         throws AuthorizationException, KeyAlreadyExistsException, IOException, KeyNotFoundException {
-        String stringBlobACL = "o::rwa";
-        AccessControl blobACL = BlobStoreAclHandler.parseAccessControl(stringBlobACL);
+        String stringBlobAcl = "o::rwa";
+        AccessControl blobAcl = BlobStoreAclHandler.parseAccessControl(stringBlobAcl);
         List<AccessControl> acls = new LinkedList<AccessControl>();
-        acls.add(blobACL); // more ACLs can be added here
+        acls.add(blobAcl); // more ACLs can be added here
         SettableBlobMeta settableBlobMeta = new SettableBlobMeta(acls);
         AtomicOutputStream blobStream = clientBlobStore.createBlob(blobKey, settableBlobMeta);
         blobStream.write(readFile(file).toString().getBytes());
@@ -214,17 +215,17 @@
 
     // Spout implementation
     public static class RandomSentenceSpout extends BaseRichSpout {
-        SpoutOutputCollector _collector;
+        SpoutOutputCollector collector;
 
         @Override
         public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) {
-            _collector = collector;
+            this.collector = collector;
         }
 
         @Override
         public void nextTuple() {
             Utils.sleep(100);
-            _collector.emit(new Values(getRandomSentence()));
+            collector.emit(new Values(getRandomSentence()));
         }
 
         @Override
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/ExclamationTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/ExclamationTopology.java
index 73f2067..22cb2ba 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/ExclamationTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/ExclamationTopology.java
@@ -54,17 +54,17 @@
     }
 
     public static class ExclamationBolt extends BaseRichBolt {
-        OutputCollector _collector;
+        OutputCollector collector;
 
         @Override
         public void prepare(Map<String, Object> conf, TopologyContext context, OutputCollector collector) {
-            _collector = collector;
+            this.collector = collector;
         }
 
         @Override
         public void execute(Tuple tuple) {
-            _collector.emit(tuple, new Values(tuple.getString(0) + "!!!"));
-            _collector.ack(tuple);
+            collector.emit(tuple, new Values(tuple.getString(0) + "!!!"));
+            collector.ack(tuple);
         }
 
         @Override
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/FastWordCountTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/FastWordCountTopology.java
index f881a86..c066d16 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/FastWordCountTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/FastWordCountTopology.java
@@ -43,6 +43,7 @@
  * java.  This can show how fast the word count can run.
  */
 public class FastWordCountTopology {
+
     public static void printMetrics(Nimbus.Iface client, String name) throws Exception {
         ClusterSummary summary = client.getClusterInfo();
         String id = null;
@@ -80,8 +81,10 @@
             }
         }
         double avgLatency = weightedAvgTotal / acked;
-        System.out.println("uptime: " + uptime + " acked: " + acked + " avgLatency: " + avgLatency + " acked/sec: " +
-                           (((double) acked) / uptime + " failed: " + failed));
+        System.out.println("uptime: " + uptime
+                + " acked: " + acked
+                + " avgLatency: " + avgLatency
+                + " acked/sec: " + (((double) acked) / uptime + " failed: " + failed));
     }
 
     public static void kill(Nimbus.Iface client, String name) throws Exception {
@@ -130,19 +133,19 @@
             "this is a test of the emergency broadcast system this is only a test",
             "peter piper picked a peck of pickeled peppers"
         };
-        SpoutOutputCollector _collector;
-        Random _rand;
+        SpoutOutputCollector collector;
+        Random rand;
 
         @Override
         public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) {
-            _collector = collector;
-            _rand = ThreadLocalRandom.current();
+            this.collector = collector;
+            rand = ThreadLocalRandom.current();
         }
 
         @Override
         public void nextTuple() {
-            String sentence = CHOICES[_rand.nextInt(CHOICES.length)];
-            _collector.emit(new Values(sentence), sentence);
+            String sentence = CHOICES[rand.nextInt(CHOICES.length)];
+            collector.emit(new Values(sentence), sentence);
         }
 
         @Override
@@ -152,7 +155,7 @@
 
         @Override
         public void fail(Object id) {
-            _collector.emit(new Values(id), id);
+            collector.emit(new Values(id), id);
         }
 
         @Override
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/InOrderDeliveryTest.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/InOrderDeliveryTest.java
index 5d314e0..e3cfd25 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/InOrderDeliveryTest.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/InOrderDeliveryTest.java
@@ -38,6 +38,7 @@
 import org.apache.storm.utils.Utils;
 
 public class InOrderDeliveryTest {
+
     public static void printMetrics(Nimbus.Iface client, String name) throws Exception {
         ClusterSummary summary = client.getClusterInfo();
         String id = null;
@@ -75,8 +76,10 @@
             }
         }
         double avgLatency = weightedAvgTotal / acked;
-        System.out.println("uptime: " + uptime + " acked: " + acked + " avgLatency: " + avgLatency + " acked/sec: " +
-                           (((double) acked) / uptime + " failed: " + failed));
+        System.out.println("uptime: " + uptime
+                + " acked: " + acked
+                + " avgLatency: " + avgLatency
+                + " acked/sec: " + (((double) acked) / uptime + " failed: " + failed));
     }
 
     public static void kill(Nimbus.Iface client, String name) throws Exception {
@@ -116,21 +119,21 @@
     }
 
     public static class InOrderSpout extends BaseRichSpout {
-        SpoutOutputCollector _collector;
-        int _base = 0;
-        int _i = 0;
+        SpoutOutputCollector collector;
+        int base = 0;
+        int count = 0;
 
         @Override
         public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) {
-            _collector = collector;
-            _base = context.getThisTaskIndex();
+            this.collector = collector;
+            base = context.getThisTaskIndex();
         }
 
         @Override
         public void nextTuple() {
-            Values v = new Values(_base, _i);
-            _collector.emit(v, "ACK");
-            _i++;
+            Values v = new Values(base, count);
+            collector.emit(v, "ACK");
+            count++;
         }
 
         @Override
@@ -157,7 +160,9 @@
             Integer c1 = tuple.getInteger(0);
             Integer c2 = tuple.getInteger(1);
             Integer exp = expected.get(c1);
-            if (exp == null) exp = 0;
+            if (exp == null) {
+                exp = 0;
+            }
             if (c2.intValue() != exp.intValue()) {
                 System.out.println(c1 + " " + c2 + " != " + exp);
                 throw new FailedException(c1 + " " + c2 + " != " + exp);
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/LambdaTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/LambdaTopology.java
index 94b1c38..e7c134b 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/LambdaTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/LambdaTopology.java
@@ -54,16 +54,3 @@
         return submit("lambda-demo", conf, builder);
     }
 }
-
-class Prefix implements Serializable {
-    private String str;
-
-    public Prefix(String str) {
-        this.str = str;
-    }
-
-    @Override
-    public String toString() {
-        return this.str;
-    }
-}
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/ManualDRPC.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/ManualDRPC.java
index 2038399..e685ca1 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/ManualDRPC.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/ManualDRPC.java
@@ -25,7 +25,9 @@
 import org.apache.storm.tuple.Values;
 import org.apache.storm.utils.DRPCClient;
 
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class ManualDRPC {
+
     public static void main(String[] args) throws Exception {
         TopologyBuilder builder = new TopologyBuilder();
 
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/MultipleLoggerTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/MultipleLoggerTopology.java
index 954e195..9410931 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/MultipleLoggerTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/MultipleLoggerTopology.java
@@ -49,37 +49,37 @@
     }
 
     public static class ExclamationLoggingBolt extends BaseRichBolt {
-        OutputCollector _collector;
-        Logger _rootLogger = LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
+        OutputCollector collector;
+        Logger rootLogger = LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME);
         // ensure the loggers are configured in the worker.xml before
         // trying to use them here
-        Logger _logger = LoggerFactory.getLogger("com.myapp");
-        Logger _subLogger = LoggerFactory.getLogger("com.myapp.sub");
+        Logger logger = LoggerFactory.getLogger("com.myapp");
+        Logger subLogger = LoggerFactory.getLogger("com.myapp.sub");
 
         @Override
         public void prepare(Map<String, Object> conf, TopologyContext context, OutputCollector collector) {
-            _collector = collector;
+            this.collector = collector;
         }
 
         @Override
         public void execute(Tuple tuple) {
-            _rootLogger.debug("root: This is a DEBUG message");
-            _rootLogger.info("root: This is an INFO message");
-            _rootLogger.warn("root: This is a WARN message");
-            _rootLogger.error("root: This is an ERROR message");
+            rootLogger.debug("root: This is a DEBUG message");
+            rootLogger.info("root: This is an INFO message");
+            rootLogger.warn("root: This is a WARN message");
+            rootLogger.error("root: This is an ERROR message");
 
-            _logger.debug("myapp: This is a DEBUG message");
-            _logger.info("myapp: This is an INFO message");
-            _logger.warn("myapp: This is a WARN message");
-            _logger.error("myapp: This is an ERROR message");
+            logger.debug("myapp: This is a DEBUG message");
+            logger.info("myapp: This is an INFO message");
+            logger.warn("myapp: This is a WARN message");
+            logger.error("myapp: This is an ERROR message");
 
-            _subLogger.debug("myapp.sub: This is a DEBUG message");
-            _subLogger.info("myapp.sub: This is an INFO message");
-            _subLogger.warn("myapp.sub: This is a WARN message");
-            _subLogger.error("myapp.sub: This is an ERROR message");
+            subLogger.debug("myapp.sub: This is a DEBUG message");
+            subLogger.info("myapp.sub: This is an INFO message");
+            subLogger.warn("myapp.sub: This is a WARN message");
+            subLogger.error("myapp.sub: This is an ERROR message");
 
-            _collector.emit(tuple, new Values(tuple.getString(0) + "!!!"));
-            _collector.ack(tuple);
+            collector.emit(tuple, new Values(tuple.getString(0) + "!!!"));
+            collector.ack(tuple);
         }
 
         @Override
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/PersistentWindowingTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/PersistentWindowingTopology.java
index 46f29a0..62b3ed5 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/PersistentWindowingTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/PersistentWindowingTopology.java
@@ -18,6 +18,8 @@
 
 package org.apache.storm.starter;
 
+import static org.apache.storm.topology.base.BaseWindowedBolt.Duration;
+
 import java.util.Iterator;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
@@ -38,8 +40,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.storm.topology.base.BaseWindowedBolt.Duration;
-
 /**
  * An example that demonstrates the usage of {@link org.apache.storm.topology.IStatefulWindowedBolt} with window persistence.
  * <p>
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/Prefix.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/Prefix.java
new file mode 100644
index 0000000..94df975
--- /dev/null
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/Prefix.java
@@ -0,0 +1,28 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.  The ASF licenses this file to you under the Apache License, Version
+ * 2.0 (the "License"); you may not use this file except in compliance with the License.  You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
+ * and limitations under the License.
+ */
+
+package org.apache.storm.starter;
+
+import java.io.Serializable;
+
+class Prefix implements Serializable {
+    private String str;
+
+    public Prefix(String str) {
+        this.str = str;
+    }
+
+    @Override
+    public String toString() {
+        return this.str;
+    }
+}
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/ReachTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/ReachTopology.java
index 1e57a94..5905132 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/ReachTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/ReachTopology.java
@@ -35,35 +35,39 @@
 /**
  * This is a good example of doing complex Distributed RPC on top of Storm. This program creates a topology that can
  * compute the reach for any URL on Twitter in realtime by parallelizing the whole computation.
- * <p/>
- * Reach is the number of unique people exposed to a URL on Twitter. To compute reach, you have to get all the people
+ *
+ * <p>Reach is the number of unique people exposed to a URL on Twitter. To compute reach, you have to get all the people
  * who tweeted the URL, get all the followers of all those people, unique that set of followers, and then count the
  * unique set. It's an intense computation that can involve thousands of database calls and tens of millions of follower
  * records.
- * <p/>
- * This Storm topology does every piece of that computation in parallel, turning what would be a computation that takes
+ *
+ * <p>This Storm topology does every piece of that computation in parallel, turning what would be a computation that takes
  * minutes on a single machine into one that takes just a couple seconds.
- * <p/>
- * For the purposes of demonstration, this topology replaces the use of actual DBs with in-memory hashmaps.
+ *
+ * <p>For the purposes of demonstration, this topology replaces the use of actual DBs with in-memory hashmaps.
  *
  * @see <a href="http://storm.apache.org/documentation/Distributed-RPC.html">Distributed RPC</a>
  */
 public class ReachTopology {
-    public static Map<String, List<String>> TWEETERS_DB = new HashMap<String, List<String>>() {{
-        put("foo.com/blog/1", Arrays.asList("sally", "bob", "tim", "george", "nathan"));
-        put("engineering.twitter.com/blog/5", Arrays.asList("adam", "david", "sally", "nathan"));
-        put("tech.backtype.com/blog/123", Arrays.asList("tim", "mike", "john"));
-    }};
+    public static Map<String, List<String>> TWEETERS_DB = new HashMap<String, List<String>>() {
+        {
+            put("foo.com/blog/1", Arrays.asList("sally", "bob", "tim", "george", "nathan"));
+            put("engineering.twitter.com/blog/5", Arrays.asList("adam", "david", "sally", "nathan"));
+            put("tech.backtype.com/blog/123", Arrays.asList("tim", "mike", "john"));
+        }
+    };
 
-    public static Map<String, List<String>> FOLLOWERS_DB = new HashMap<String, List<String>>() {{
-        put("sally", Arrays.asList("bob", "tim", "alice", "adam", "jim", "chris", "jai"));
-        put("bob", Arrays.asList("sally", "nathan", "jim", "mary", "david", "vivian"));
-        put("tim", Arrays.asList("alex"));
-        put("nathan", Arrays.asList("sally", "bob", "adam", "harry", "chris", "vivian", "emily", "jordan"));
-        put("adam", Arrays.asList("david", "carissa"));
-        put("mike", Arrays.asList("john", "bob"));
-        put("john", Arrays.asList("alice", "nathan", "jim", "mike", "bob"));
-    }};
+    public static Map<String, List<String>> FOLLOWERS_DB = new HashMap<String, List<String>>() {
+        {
+            put("sally", Arrays.asList("bob", "tim", "alice", "adam", "jim", "chris", "jai"));
+            put("bob", Arrays.asList("sally", "nathan", "jim", "mary", "david", "vivian"));
+            put("tim", Arrays.asList("alex"));
+            put("nathan", Arrays.asList("sally", "bob", "adam", "harry", "chris", "vivian", "emily", "jordan"));
+            put("adam", Arrays.asList("david", "carissa"));
+            put("mike", Arrays.asList("john", "bob"));
+            put("john", Arrays.asList("alice", "nathan", "jim", "mike", "bob"));
+        }
+    };
 
     public static LinearDRPCTopologyBuilder construct() {
         LinearDRPCTopologyBuilder builder = new LinearDRPCTopologyBuilder("reach");
@@ -132,24 +136,24 @@
     }
 
     public static class PartialUniquer extends BaseBatchBolt<Object> {
-        BatchOutputCollector _collector;
-        Object _id;
-        Set<String> _followers = new HashSet<String>();
+        BatchOutputCollector collector;
+        Object id;
+        Set<String> followers = new HashSet<String>();
 
         @Override
         public void prepare(Map<String, Object> conf, TopologyContext context, BatchOutputCollector collector, Object id) {
-            _collector = collector;
-            _id = id;
+            this.collector = collector;
+            this.id = id;
         }
 
         @Override
         public void execute(Tuple tuple) {
-            _followers.add(tuple.getString(1));
+            followers.add(tuple.getString(1));
         }
 
         @Override
         public void finishBatch() {
-            _collector.emit(new Values(_id, _followers.size()));
+            collector.emit(new Values(id, followers.size()));
         }
 
         @Override
@@ -159,24 +163,24 @@
     }
 
     public static class CountAggregator extends BaseBatchBolt<Object> {
-        BatchOutputCollector _collector;
-        Object _id;
-        int _count = 0;
+        BatchOutputCollector collector;
+        Object id;
+        int count = 0;
 
         @Override
         public void prepare(Map<String, Object> conf, TopologyContext context, BatchOutputCollector collector, Object id) {
-            _collector = collector;
-            _id = id;
+            this.collector = collector;
+            this.id = id;
         }
 
         @Override
         public void execute(Tuple tuple) {
-            _count += tuple.getInteger(1);
+            count += tuple.getInteger(1);
         }
 
         @Override
         public void finishBatch() {
-            _collector.emit(new Values(_id, _count));
+            collector.emit(new Values(id, count));
         }
 
         @Override
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/ResourceAwareExampleTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/ResourceAwareExampleTopology.java
index 18af7ea..20f951e 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/ResourceAwareExampleTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/ResourceAwareExampleTopology.java
@@ -106,7 +106,7 @@
         private static final ConcurrentHashMap<String, String> myCrummyCache =
             new ConcurrentHashMap<>();
         private static final int CACHE_SIZE = 100_000;
-        OutputCollector _collector;
+        OutputCollector collector;
 
         protected static String getFromCache(String key) {
             return myCrummyCache.get(key);
@@ -127,7 +127,7 @@
 
         @Override
         public void prepare(Map<String, Object> conf, TopologyContext context, OutputCollector collector) {
-            _collector = collector;
+            this.collector = collector;
         }
 
         @Override
@@ -138,8 +138,8 @@
                 ret = orig + "!!!";
                 addToCache(orig, ret);
             }
-            _collector.emit(tuple, new Values(ret));
-            _collector.ack(tuple);
+            collector.emit(tuple, new Values(ret));
+            collector.ack(tuple);
         }
 
         @Override
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/RollingTopWords.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/RollingTopWords.java
index f8ad8c2..ea40e8b 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/RollingTopWords.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/RollingTopWords.java
@@ -43,15 +43,13 @@
     /**
      * Submits (runs) the topology.
      *
-     * Usage: "RollingTopWords [topology-name] [-local]"
+     * <p>Usage: "RollingTopWords [topology-name] [-local]"
      *
-     * By default, the topology is run locally under the name
+     * <p>By default, the topology is run locally under the name
      * "slidingWindowCounts".
      *
-     * Examples:
-     *
+     * <p>Examples:
      * ```
-     *
      * # Runs in remote/cluster mode, with topology name "production-topology"
      * $ storm jar storm-starter-jar-with-dependencies.jar org.apache.storm.starter.RollingTopWords production-topology ```
      *
@@ -59,7 +57,6 @@
      *          First positional argument (optional) is topology name, second
      *          positional argument (optional) defines whether to run the topology
      *          locally ("-local") or remotely, i.e. on a real cluster.
-     * @throws Exception
      */
     @Override
     protected int run(String[] args) {
@@ -71,11 +68,11 @@
         String spoutId = "wordGenerator";
         String counterId = "counter";
         String intermediateRankerId = "intermediateRanker";
-        String totalRankerId = "finalRanker";
         builder.setSpout(spoutId, new TestWordSpout(), 5);
         builder.setBolt(counterId, new RollingCountBolt(9, 3), 4).fieldsGrouping(spoutId, new Fields("word"));
         builder.setBolt(intermediateRankerId, new IntermediateRankingsBolt(TOP_N), 4).fieldsGrouping(counterId,
                                                                                                      new Fields("obj"));
+        String totalRankerId = "finalRanker";
         builder.setBolt(totalRankerId, new TotalRankingsBolt(TOP_N)).globalGrouping(intermediateRankerId);
         LOG.info("Topology name: " + topologyName);
 
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/SingleJoinExample.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/SingleJoinExample.java
index 4f4b6b6..ad3ab3d 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/SingleJoinExample.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/SingleJoinExample.java
@@ -21,10 +21,10 @@
 import org.apache.storm.tuple.Values;
 import org.apache.storm.utils.NimbusClient;
 
-/** Example of using a simple custom join bolt
- *  NOTE: Prefer to use the built-in JoinBolt wherever applicable
+/**
+ * Example of using a simple custom join bolt.
+ * NOTE: Prefer to use the built-in JoinBolt wherever applicable
  */
-
 public class SingleJoinExample {
     public static void main(String[] args) throws Exception {
         if (!NimbusClient.isLocalOverride()) {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/SkewedRollingTopWords.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/SkewedRollingTopWords.java
index a173854..a983b7c 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/SkewedRollingTopWords.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/SkewedRollingTopWords.java
@@ -46,23 +46,21 @@
     /**
      * Submits (runs) the topology.
      *
-     * Usage: "SkewedRollingTopWords [topology-name] [-local]"
+     * <p>Usage: "SkewedRollingTopWords [topology-name] [-local]"
      *
-     * By default, the topology is run locally under the name
+     * <p>By default, the topology is run locally under the name
      * "slidingWindowCounts".
      *
-     * Examples:
+     * <p>Examples:
      *
-     * ```
-     *
+     * <p>```
      * # Runs in remote/cluster mode, with topology name "production-topology"
      * $ storm jar storm-starter-jar-with-dependencies.jar org.apache.storm.starter.SkewedRollingTopWords production-topology ```
      *
      * @param args
      *          First positional argument (optional) is topology name, second
      *          positional argument (optional) defines whether to run the topology
-     *          locally ("-local") or remotely, i.e. on a real cluster.
-     * @throws Exception
+     *          locally ("-local") or remotely, i.e. on a real cluster
      */
     @Override
     protected int run(String[] args) {
@@ -74,12 +72,12 @@
         String spoutId = "wordGenerator";
         String counterId = "counter";
         String aggId = "aggregator";
-        String intermediateRankerId = "intermediateRanker";
-        String totalRankerId = "finalRanker";
         builder.setSpout(spoutId, new TestWordSpout(), 5);
         builder.setBolt(counterId, new RollingCountBolt(9, 3), 4).partialKeyGrouping(spoutId, new Fields("word"));
         builder.setBolt(aggId, new RollingCountAggBolt(), 4).fieldsGrouping(counterId, new Fields("obj"));
+        String intermediateRankerId = "intermediateRanker";
         builder.setBolt(intermediateRankerId, new IntermediateRankingsBolt(TOP_N), 4).fieldsGrouping(aggId, new Fields("obj"));
+        String totalRankerId = "finalRanker";
         builder.setBolt(totalRankerId, new TotalRankingsBolt(TOP_N)).globalGrouping(intermediateRankerId);
         LOG.info("Topology name: " + topologyName);
 
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SingleJoinBolt.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SingleJoinBolt.java
index 00a2ab7..5f9b225 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SingleJoinBolt.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SingleJoinBolt.java
@@ -28,29 +28,29 @@
 import org.apache.storm.tuple.Tuple;
 import org.apache.storm.utils.TimeCacheMap;
 
-/** Example of a simple custom bolt for joining two streams
- *  NOTE: Prefer to use the built-in JoinBolt wherever applicable
+/**
+ * Example of a simple custom bolt for joining two streams.
+ * NOTE: Prefer to use the built-in JoinBolt wherever applicable
  */
-
 public class SingleJoinBolt extends BaseRichBolt {
-    OutputCollector _collector;
-    Fields _idFields;
-    Fields _outFields;
-    int _numSources;
-    TimeCacheMap<List<Object>, Map<GlobalStreamId, Tuple>> _pending;
-    Map<String, GlobalStreamId> _fieldLocations;
+    OutputCollector collector;
+    Fields idFields;
+    Fields outFields;
+    int numSources;
+    TimeCacheMap<List<Object>, Map<GlobalStreamId, Tuple>> pending;
+    Map<String, GlobalStreamId> fieldLocations;
 
     public SingleJoinBolt(Fields outFields) {
-        _outFields = outFields;
+        this.outFields = outFields;
     }
 
     @Override
     public void prepare(Map<String, Object> conf, TopologyContext context, OutputCollector collector) {
-        _fieldLocations = new HashMap<String, GlobalStreamId>();
-        _collector = collector;
+        fieldLocations = new HashMap<String, GlobalStreamId>();
+        this.collector = collector;
         int timeout = ((Number) conf.get(Config.TOPOLOGY_MESSAGE_TIMEOUT_SECS)).intValue();
-        _pending = new TimeCacheMap<List<Object>, Map<GlobalStreamId, Tuple>>(timeout, new ExpireCallback());
-        _numSources = context.getThisSources().size();
+        pending = new TimeCacheMap<List<Object>, Map<GlobalStreamId, Tuple>>(timeout, new ExpireCallback());
+        numSources = context.getThisSources().size();
         Set<String> idFields = null;
         for (GlobalStreamId source : context.getThisSources().keySet()) {
             Fields fields = context.getComponentOutputFields(source.get_componentId(), source.get_streamId());
@@ -61,58 +61,58 @@
                 idFields.retainAll(setFields);
             }
 
-            for (String outfield : _outFields) {
+            for (String outfield : outFields) {
                 for (String sourcefield : fields) {
                     if (outfield.equals(sourcefield)) {
-                        _fieldLocations.put(outfield, source);
+                        fieldLocations.put(outfield, source);
                     }
                 }
             }
         }
-        _idFields = new Fields(new ArrayList<String>(idFields));
+        this.idFields = new Fields(new ArrayList<String>(idFields));
 
-        if (_fieldLocations.size() != _outFields.size()) {
+        if (fieldLocations.size() != outFields.size()) {
             throw new RuntimeException("Cannot find all outfields among sources");
         }
     }
 
     @Override
     public void execute(Tuple tuple) {
-        List<Object> id = tuple.select(_idFields);
+        List<Object> id = tuple.select(idFields);
         GlobalStreamId streamId = new GlobalStreamId(tuple.getSourceComponent(), tuple.getSourceStreamId());
-        if (!_pending.containsKey(id)) {
-            _pending.put(id, new HashMap<GlobalStreamId, Tuple>());
+        if (!pending.containsKey(id)) {
+            pending.put(id, new HashMap<GlobalStreamId, Tuple>());
         }
-        Map<GlobalStreamId, Tuple> parts = _pending.get(id);
+        Map<GlobalStreamId, Tuple> parts = pending.get(id);
         if (parts.containsKey(streamId)) {
             throw new RuntimeException("Received same side of single join twice");
         }
         parts.put(streamId, tuple);
-        if (parts.size() == _numSources) {
-            _pending.remove(id);
+        if (parts.size() == numSources) {
+            pending.remove(id);
             List<Object> joinResult = new ArrayList<Object>();
-            for (String outField : _outFields) {
-                GlobalStreamId loc = _fieldLocations.get(outField);
+            for (String outField : outFields) {
+                GlobalStreamId loc = fieldLocations.get(outField);
                 joinResult.add(parts.get(loc).getValueByField(outField));
             }
-            _collector.emit(new ArrayList<Tuple>(parts.values()), joinResult);
+            collector.emit(new ArrayList<Tuple>(parts.values()), joinResult);
 
             for (Tuple part : parts.values()) {
-                _collector.ack(part);
+                collector.ack(part);
             }
         }
     }
 
     @Override
     public void declareOutputFields(OutputFieldsDeclarer declarer) {
-        declarer.declare(_outFields);
+        declarer.declare(outFields);
     }
 
     private class ExpireCallback implements TimeCacheMap.ExpiredCallback<List<Object>, Map<GlobalStreamId, Tuple>> {
         @Override
         public void expire(List<Object> id, Map<GlobalStreamId, Tuple> tuples) {
             for (Tuple tuple : tuples.values()) {
-                _collector.fail(tuple);
+                collector.fail(tuple);
             }
         }
     }
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SlidingWindowSumBolt.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SlidingWindowSumBolt.java
index 7987717..2702181 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SlidingWindowSumBolt.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/bolt/SlidingWindowSumBolt.java
@@ -26,7 +26,7 @@
 import org.slf4j.LoggerFactory;
 
 /**
- * Computes sliding window sum
+ * Computes sliding window sum.
  */
 public class SlidingWindowSumBolt extends BaseWindowedBolt {
     private static final Logger LOG = LoggerFactory.getLogger(SlidingWindowSumBolt.class);
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/spout/RandomSentenceSpout.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/spout/RandomSentenceSpout.java
index f6067ae..92af626 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/spout/RandomSentenceSpout.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/spout/RandomSentenceSpout.java
@@ -29,14 +29,14 @@
 public class RandomSentenceSpout extends BaseRichSpout {
     private static final Logger LOG = LoggerFactory.getLogger(RandomSentenceSpout.class);
 
-    SpoutOutputCollector _collector;
-    Random _rand;
+    SpoutOutputCollector collector;
+    Random rand;
 
 
     @Override
     public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) {
-        _collector = collector;
-        _rand = new Random();
+        this.collector = collector;
+        rand = new Random();
     }
 
     @Override
@@ -46,11 +46,11 @@
             sentence("the cow jumped over the moon"), sentence("an apple a day keeps the doctor away"),
             sentence("four score and seven years ago"), sentence("snow white and the seven dwarfs"), sentence("i am at two with nature")
         };
-        final String sentence = sentences[_rand.nextInt(sentences.length)];
+        final String sentence = sentences[rand.nextInt(sentences.length)];
 
         LOG.debug("Emitting tuple: {}", sentence);
 
-        _collector.emit(new Values(sentence));
+        collector.emit(new Values(sentence));
     }
 
     protected String sentence(String input) {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/AggregateExample.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/AggregateExample.java
index feaff4f..34ffd7c 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/AggregateExample.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/AggregateExample.java
@@ -23,7 +23,7 @@
 import org.apache.storm.topology.base.BaseWindowedBolt;
 
 /**
- * An example that illustrates the global aggregate
+ * An example that illustrates the global aggregate.
  */
 public class AggregateExample {
     @SuppressWarnings("unchecked")
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/BranchExample.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/BranchExample.java
index d20f071..fc7e74a 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/BranchExample.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/BranchExample.java
@@ -33,17 +33,17 @@
     public static void main(String[] args) throws Exception {
         StreamBuilder builder = new StreamBuilder();
         Stream<Integer>[] evenAndOdd = builder
-            /*
-             * Create a stream of random numbers from a spout that
-             * emits random integers by extracting the tuple value at index 0.
-             */
-            .newStream(new RandomIntegerSpout(), new ValueMapper<Integer>(0))
-            /*
-             * Split the stream of numbers into streams of
-             * even and odd numbers. The first stream contains even
-             * and the second contains odd numbers.
-             */
-            .branch(x -> (x % 2) == 0,
+                /*
+                 * Create a stream of random numbers from a spout that
+                 * emits random integers by extracting the tuple value at index 0.
+                 */
+                .newStream(new RandomIntegerSpout(), new ValueMapper<Integer>(0))
+                /*
+                 * Split the stream of numbers into streams of
+                 * even and odd numbers. The first stream contains even
+                 * and the second contains odd numbers.
+                 */
+                .branch(x -> (x % 2) == 0,
                     x -> (x % 2) == 1);
 
         evenAndOdd[0].forEach(x -> LOG.info("EVEN> " + x));
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/GroupByKeyAndWindowExample.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/GroupByKeyAndWindowExample.java
index 6bcf194..02617a2 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/GroupByKeyAndWindowExample.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/GroupByKeyAndWindowExample.java
@@ -34,7 +34,7 @@
 
 /**
  * An example that shows the usage of {@link PairStream#groupByKeyAndWindow(Window)}
- * and {@link PairStream#reduceByKeyAndWindow(Reducer, Window)}
+ * and {@link PairStream#reduceByKeyAndWindow(Reducer, Window)}.
  */
 public class GroupByKeyAndWindowExample {
     public static void main(String[] args) throws Exception {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/JoinExample.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/JoinExample.java
index 52bbf28..9016c78 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/JoinExample.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/JoinExample.java
@@ -70,7 +70,7 @@
     private static class NumberSpout extends BaseRichSpout {
         private final Function<Integer, Integer> function;
         private SpoutOutputCollector collector;
-        private int i = 1;
+        private int count = 1;
 
         NumberSpout(Function<Integer, Integer> function) {
             this.function = function;
@@ -84,8 +84,8 @@
         @Override
         public void nextTuple() {
             Utils.sleep(990);
-            collector.emit(new Values(i, function.apply(i)));
-            i++;
+            collector.emit(new Values(count, function.apply(count)));
+            count++;
         }
 
         @Override
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StateQueryExample.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StateQueryExample.java
index 4e5b1ef..b72b676 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StateQueryExample.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StateQueryExample.java
@@ -31,12 +31,12 @@
 
 /**
  * An example that uses {@link Stream#stateQuery(StreamState)} to query the state
- * <p>
- * You should start a local redis instance before running the 'storm jar' command. By default
+ *
+ * <p>You should start a local redis instance before running the 'storm jar' command. By default
  * the connection will be attempted at localhost:6379. The default
  * RedisKeyValueStateProvider parameters can be overridden in conf/storm.yaml, for e.g.
- * <p/>
- * <pre>
+ *
+ * <p><pre>
  * topology.state.provider.config: '{"keyClass":"...", "valueClass":"...",
  *  "keySerializerClass":"...", "valueSerializerClass":"...",
  *  "jedisPoolConfig":{"host":"localhost", "port":6379,
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StatefulWordCount.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StatefulWordCount.java
index afc345d..39534b3 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StatefulWordCount.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/StatefulWordCount.java
@@ -26,12 +26,12 @@
 /**
  * A stateful word count that uses {@link PairStream#updateStateByKey(StateUpdater)} to
  * save the counts in a key value state. This example uses Redis state store.
- * <p>
- * You should start a local redis instance before running the 'storm jar' command. By default
+ *
+ * <p>You should start a local redis instance before running the 'storm jar' command. By default
  * the connection will be attempted at localhost:6379. The default
  * RedisKeyValueStateProvider parameters can be overridden in conf/storm.yaml, for e.g.
- * <p/>
- * <pre>
+ *
+ * <p><pre>
  * topology.state.provider.config: '{"keyClass":"...", "valueClass":"...",
  *  "keySerializerClass":"...", "valueSerializerClass":"...",
  *  "jedisPoolConfig":{"host":"localhost", "port":6379,
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/TypedTupleExample.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/TypedTupleExample.java
index e859ca1..481758f 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/TypedTupleExample.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/TypedTupleExample.java
@@ -28,12 +28,13 @@
  * An example that illustrates the usage of typed tuples (TupleN<..>) and {@link TupleValueMappers}.
  */
 public class TypedTupleExample {
+
+    /**
+     * The spout emits sequences of (Integer, Long, Long). TupleValueMapper can be used to extract fields
+     * from the values and produce a stream of typed tuple (Tuple3&lt;Integer, Long, Long&gt; in this case.
+     */
     public static void main(String[] args) throws Exception {
         StreamBuilder builder = new StreamBuilder();
-        /**
-         * The spout emits sequences of (Integer, Long, Long). TupleValueMapper can be used to extract fields
-         * from the values and produce a stream of typed tuple (Tuple3<Integer, Long, Long> in this case.
-         */
         Stream<Tuple3<Integer, Long, Long>> stream = builder.newStream(new RandomIntegerSpout(), TupleValueMappers.of(0, 1, 2));
 
         PairStream<Long, Integer> pairs = stream.mapToPair(t -> Pair.of(t._2 / 10000, t._1));
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WindowedWordCount.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WindowedWordCount.java
index 2c06674..ef8edd4 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WindowedWordCount.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WindowedWordCount.java
@@ -23,7 +23,7 @@
 import org.apache.storm.topology.base.BaseWindowedBolt.Duration;
 
 /**
- * A windowed word count example
+ * A windowed word count example.
  */
 public class WindowedWordCount {
     public static void main(String[] args) throws Exception {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WordCountToBolt.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WordCountToBolt.java
index 082b96a..997e642 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WordCountToBolt.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/streams/WordCountToBolt.java
@@ -27,7 +27,7 @@
 
 /**
  * An example that computes word counts and finally emits the results to an
- * external bolt (sink)
+ * external bolt (sink).
  */
 public class WordCountToBolt {
     public static void main(String[] args) throws Exception {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/RankableObjectWithFields.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/RankableObjectWithFields.java
index 48bc261..fea5896 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/RankableObjectWithFields.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/RankableObjectWithFields.java
@@ -74,6 +74,7 @@
     }
 
     /**
+     * Get fields.
      * @return an immutable list of any additional data fields of the object (may be empty but will never be null)
      */
     public List<Object> getFields() {
@@ -131,8 +132,6 @@
     /**
      * Note: We do not defensively copy the wrapped object and any accompanying fields.  We do guarantee, however,
      * do return a defensive (shallow) copy of the List object that is wrapping any accompanying fields.
-     *
-     * @return
      */
     @Override
     public Rankable copy() {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/Rankings.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/Rankings.java
index 85a123b..89bba59 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/Rankings.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/Rankings.java
@@ -39,7 +39,6 @@
 
     /**
      * Copy constructor.
-     * @param other
      */
     public Rankings(Rankings other) {
         this(other.maxSize());
@@ -47,6 +46,7 @@
     }
 
     /**
+     * Get max size.
      * @return the maximum possible number (size) of ranked objects this instance can hold
      */
     public int maxSize() {
@@ -54,6 +54,7 @@
     }
 
     /**
+     * Get size.
      * @return the number (size) of ranked objects this instance is currently holding
      */
     public int size() {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlidingWindowCounter.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlidingWindowCounter.java
index 6aedfc8..4ab8488 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlidingWindowCounter.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlidingWindowCounter.java
@@ -95,6 +95,7 @@
      * @return The current (total) counts of all tracked objects.
      */
     public Map<T, Long> getCountsThenAdvanceWindow() {
+        @SuppressWarnings("checkstyle:VariableDeclarationUsageDistance")
         Map<T, Long> counts = objCounter.getCounts();
         objCounter.wipeZeros();
         objCounter.wipeSlot(tailSlot);
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlotBasedCounter.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlotBasedCounter.java
index 51157b0..6f48c47 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlotBasedCounter.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/tools/SlotBasedCounter.java
@@ -75,8 +75,6 @@
 
     /**
      * Reset the slot count of any tracked objects to zero for the given slot.
-     *
-     * @param slot
      */
     public void wipeSlot(int slot) {
         for (T obj : objToCounts.keySet()) {
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/DebugMemoryMapState.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/DebugMemoryMapState.java
index ceac936..3a7aeb0 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/DebugMemoryMapState.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/DebugMemoryMapState.java
@@ -59,15 +59,15 @@
     }
 
     public static class Factory implements StateFactory {
-        String _id;
+        String id;
 
         public Factory() {
-            _id = UUID.randomUUID().toString();
+            id = UUID.randomUUID().toString();
         }
 
         @Override
         public State makeState(Map<String, Object> conf, IMetricsContext metrics, int partitionIndex, int numPartitions) {
-            return new DebugMemoryMapState(_id + partitionIndex);
+            return new DebugMemoryMapState(id + partitionIndex);
         }
     }
 }
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfDevicesTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfDevicesTopology.java
index bde5baa..176326e 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfDevicesTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfDevicesTopology.java
@@ -41,21 +41,24 @@
      * generates result stream based on min amd max with device-id and count values.
      */
     public static StormTopology buildDevicesTopology() {
-        String deviceID = "device-id";
+        String deviceId = "device-id";
         String count = "count";
-        Fields allFields = new Fields(deviceID, count);
+        Fields allFields = new Fields(deviceId, count);
 
         RandomNumberGeneratorSpout spout = new RandomNumberGeneratorSpout(allFields, 10, 1000);
 
         TridentTopology topology = new TridentTopology();
-        Stream devicesStream = topology.newStream("devicegen-spout", spout).
-            each(allFields, new Debug("##### devices"));
+        Stream devicesStream = topology
+                .newStream("devicegen-spout", spout)
+                .each(allFields, new Debug("##### devices"));
 
-        devicesStream.minBy(deviceID).
-            each(allFields, new Debug("#### device with min id"));
+        devicesStream
+                .minBy(deviceId)
+                .each(allFields, new Debug("#### device with min id"));
 
-        devicesStream.maxBy(count).
-            each(allFields, new Debug("#### device with max count"));
+        devicesStream
+                .maxBy(count)
+                .each(allFields, new Debug("#### device with max count"));
 
         return topology.build();
     }
@@ -73,28 +76,27 @@
         spout.setCycle(true);
 
         TridentTopology topology = new TridentTopology();
-        Stream vehiclesStream = topology.newStream("spout1", spout).
-            each(allFields, new Debug("##### vehicles"));
+        Stream vehiclesStream = topology
+                .newStream("spout1", spout)
+                .each(allFields, new Debug("##### vehicles"));
 
-        Stream slowVehiclesStream =
-            vehiclesStream
+        Stream slowVehiclesStream = vehiclesStream
                 .min(new SpeedComparator())
                 .each(vehicleField, new Debug("#### slowest vehicle"));
 
-        Stream slowDriversStream =
-            slowVehiclesStream
+        Stream slowDriversStream = slowVehiclesStream
                 .project(driverField)
                 .each(driverField, new Debug("##### slowest driver"));
 
         vehiclesStream
-            .max(new SpeedComparator())
-            .each(vehicleField, new Debug("#### fastest vehicle"))
-            .project(driverField)
-            .each(driverField, new Debug("##### fastest driver"));
+                .max(new SpeedComparator())
+                .each(vehicleField, new Debug("#### fastest vehicle"))
+                .project(driverField)
+                .each(driverField, new Debug("##### fastest driver"));
 
         vehiclesStream
-            .max(new EfficiencyComparator()).
-            each(vehicleField, new Debug("#### efficient vehicle"));
+                .max(new EfficiencyComparator())
+                .each(vehicleField, new Debug("#### efficient vehicle"));
 
         return topology.build();
     }
@@ -141,10 +143,10 @@
 
         @Override
         public String toString() {
-            return "Driver{" +
-                   "name='" + name + '\'' +
-                   ", id=" + id +
-                   '}';
+            return "Driver{"
+                    + "name='" + name + '\''
+                    + ", id=" + id
+                    + '}';
         }
     }
 
@@ -176,11 +178,11 @@
 
         @Override
         public String toString() {
-            return "Vehicle{" +
-                   "name='" + name + '\'' +
-                   ", maxSpeed=" + maxSpeed +
-                   ", efficiency=" + efficiency +
-                   '}';
+            return "Vehicle{"
+                    + "name='" + name + '\''
+                    + ", maxSpeed=" + maxSpeed
+                    + ", efficiency=" + efficiency
+                    + '}';
         }
     }
 }
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfVehiclesTopology.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfVehiclesTopology.java
index f37442b..83e9d62 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfVehiclesTopology.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentMinMaxOfVehiclesTopology.java
@@ -50,8 +50,9 @@
         spout.setCycle(true);
 
         TridentTopology topology = new TridentTopology();
-        Stream vehiclesStream = topology.newStream("spout1", spout).
-            each(allFields, new Debug("##### vehicles"));
+        Stream vehiclesStream = topology
+                .newStream("spout1", spout)
+                .each(allFields, new Debug("##### vehicles"));
 
         Stream slowVehiclesStream =
             vehiclesStream
@@ -64,18 +65,18 @@
                 .each(driverField, new Debug("##### slowest driver"));
 
         vehiclesStream
-            .max(new SpeedComparator())
-            .each(vehicleField, new Debug("#### fastest vehicle"))
-            .project(driverField)
-            .each(driverField, new Debug("##### fastest driver"));
+                .max(new SpeedComparator())
+                .each(vehicleField, new Debug("#### fastest vehicle"))
+                .project(driverField)
+                .each(driverField, new Debug("##### fastest driver"));
 
         vehiclesStream
-            .minBy(Vehicle.FIELD_NAME, new EfficiencyComparator()).
-            each(vehicleField, new Debug("#### least efficient vehicle"));
+                .minBy(Vehicle.FIELD_NAME, new EfficiencyComparator())
+                .each(vehicleField, new Debug("#### least efficient vehicle"));
 
         vehiclesStream
-            .maxBy(Vehicle.FIELD_NAME, new EfficiencyComparator()).
-            each(vehicleField, new Debug("#### most efficient vehicle"));
+                .maxBy(Vehicle.FIELD_NAME, new EfficiencyComparator())
+                .each(vehicleField, new Debug("#### most efficient vehicle"));
 
         return topology.build();
     }
@@ -120,10 +121,10 @@
 
         @Override
         public String toString() {
-            return "Driver{" +
-                   "name='" + name + '\'' +
-                   ", id=" + id +
-                   '}';
+            return "Driver{"
+                    + "name='" + name + '\''
+                    + ", id=" + id
+                    + '}';
         }
     }
 
@@ -155,11 +156,11 @@
 
         @Override
         public String toString() {
-            return "Vehicle{" +
-                   "name='" + name + '\'' +
-                   ", maxSpeed=" + maxSpeed +
-                   ", efficiency=" + efficiency +
-                   '}';
+            return "Vehicle{"
+                    + "name='" + name + '\''
+                    + ", maxSpeed=" + maxSpeed
+                    + ", efficiency=" + efficiency
+                    + '}';
         }
     }
 }
diff --git a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentReach.java b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentReach.java
index 19c0665..a159a3e 100644
--- a/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentReach.java
+++ b/examples/storm-starter/src/jvm/org/apache/storm/starter/trident/TridentReach.java
@@ -38,21 +38,25 @@
 import org.apache.storm.utils.DRPCClient;
 
 public class TridentReach {
-    public static Map<String, List<String>> TWEETERS_DB = new HashMap<String, List<String>>() {{
-        put("foo.com/blog/1", Arrays.asList("sally", "bob", "tim", "george", "nathan"));
-        put("engineering.twitter.com/blog/5", Arrays.asList("adam", "david", "sally", "nathan"));
-        put("tech.backtype.com/blog/123", Arrays.asList("tim", "mike", "john"));
-    }};
+    public static Map<String, List<String>> TWEETERS_DB = new HashMap<String, List<String>>() {
+        {
+            put("foo.com/blog/1", Arrays.asList("sally", "bob", "tim", "george", "nathan"));
+            put("engineering.twitter.com/blog/5", Arrays.asList("adam", "david", "sally", "nathan"));
+            put("tech.backtype.com/blog/123", Arrays.asList("tim", "mike", "john"));
+        }
+    };
 
-    public static Map<String, List<String>> FOLLOWERS_DB = new HashMap<String, List<String>>() {{
-        put("sally", Arrays.asList("bob", "tim", "alice", "adam", "jim", "chris", "jai"));
-        put("bob", Arrays.asList("sally", "nathan", "jim", "mary", "david", "vivian"));
-        put("tim", Arrays.asList("alex"));
-        put("nathan", Arrays.asList("sally", "bob", "adam", "harry", "chris", "vivian", "emily", "jordan"));
-        put("adam", Arrays.asList("david", "carissa"));
-        put("mike", Arrays.asList("john", "bob"));
-        put("john", Arrays.asList("alice", "nathan", "jim", "mike", "bob"));
-    }};
+    public static Map<String, List<String>> FOLLOWERS_DB = new HashMap<String, List<String>>() {
+        {
+            put("sally", Arrays.asList("bob", "tim", "alice", "adam", "jim", "chris", "jai"));
+            put("bob", Arrays.asList("sally", "nathan", "jim", "mary", "david", "vivian"));
+            put("tim", Arrays.asList("alex"));
+            put("nathan", Arrays.asList("sally", "bob", "adam", "harry", "chris", "vivian", "emily", "jordan"));
+            put("adam", Arrays.asList("david", "carissa"));
+            put("mike", Arrays.asList("john", "bob"));
+            put("john", Arrays.asList("alice", "nathan", "jim", "mike", "bob"));
+        }
+    };
 
     public static StormTopology buildTopology() {
         TridentTopology topology = new TridentTopology();
@@ -83,10 +87,10 @@
     }
 
     public static class StaticSingleKeyMapState extends ReadOnlyState implements ReadOnlyMapState<Object> {
-        Map _map;
+        Map map;
 
         public StaticSingleKeyMapState(Map map) {
-            _map = map;
+            this.map = map;
         }
 
         @Override
@@ -94,21 +98,21 @@
             List<Object> ret = new ArrayList();
             for (List<Object> key : keys) {
                 Object singleKey = key.get(0);
-                ret.add(_map.get(singleKey));
+                ret.add(map.get(singleKey));
             }
             return ret;
         }
 
         public static class Factory implements StateFactory {
-            Map _map;
+            Map map;
 
             public Factory(Map map) {
-                _map = map;
+                this.map = map;
             }
 
             @Override
             public State makeState(Map<String, Object> conf, IMetricsContext metrics, int partitionIndex, int numPartitions) {
-                return new StaticSingleKeyMapState(_map);
+                return new StaticSingleKeyMapState(map);
             }
 
         }
diff --git a/external/storm-blobstore-migration/pom.xml b/external/storm-blobstore-migration/pom.xml
index 69d4c5e..d7c2eb2 100644
--- a/external/storm-blobstore-migration/pom.xml
+++ b/external/storm-blobstore-migration/pom.xml
@@ -129,7 +129,7 @@
                 <artifactId>maven-checkstyle-plugin</artifactId>
                 <!--Note - the version would be inherited-->
                 <configuration>
-                    <maxAllowedViolations>56</maxAllowedViolations>
+                    <maxAllowedViolations>0</maxAllowedViolations>
                 </configuration>
             </plugin>
             <plugin>
diff --git a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListHDFS.java b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListHDFS.java
index 3f4566e..96e7038 100644
--- a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListHDFS.java
+++ b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListHDFS.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.storm.blobstore;
 
 import java.util.Map;
@@ -27,14 +28,17 @@
 import org.apache.storm.hdfs.blobstore.HdfsClientBlobStore;
 import org.apache.storm.utils.Utils;
 
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class ListHDFS {
     
     public static void main(String[] args) throws Exception {
-        if(args.length < 1) {
+        if (args.length < 1) {
             System.out.println("Need at least 1 argument (hdfs_blobstore_path), but have " + Integer.toString(args.length));
             System.out.println("listHDFS <hdfs_blobstore_path> <hdfs_principal> <keytab>");
             System.out.println("Lists blobs in HdfsBlobStore");
-            System.out.println("Example: listHDFS 'hdfs://some-hdfs-namenode:8080/srv/storm/my-storm-blobstore' 'stormUser/my-nimbus-host.example.com@STORM.EXAMPLE.COM' '/srv/my-keytab/stormUser.kt'");
+            System.out.println("Example: listHDFS "
+                    + "'hdfs://some-hdfs-namenode:8080/srv/storm/my-storm-blobstore' "
+                    + "'stormUser/my-nimbus-host.example.com@STORM.EXAMPLE.COM' '/srv/my-keytab/stormUser.kt'");
             System.exit(1);
         }
         
@@ -43,13 +47,13 @@
         
         hdfsConf.put(Config.BLOBSTORE_DIR, hdfsBlobstorePath);
         hdfsConf.put(Config.STORM_PRINCIPAL_TO_LOCAL_PLUGIN, "org.apache.storm.security.auth.DefaultPrincipalToLocal");
-        if(args.length >= 2) {
-        	System.out.println("SETTING HDFS PRINCIPAL!");
-        	hdfsConf.put(Config.BLOBSTORE_HDFS_PRINCIPAL, args[1]);
+        if (args.length >= 2) {
+            System.out.println("SETTING HDFS PRINCIPAL!");
+            hdfsConf.put(Config.BLOBSTORE_HDFS_PRINCIPAL, args[1]);
         }
-        if(args.length >= 3) {
-        	System.out.println("SETTING HDFS KEYTAB!");
-        	hdfsConf.put(Config.BLOBSTORE_HDFS_KEYTAB, args[2]);
+        if (args.length >= 3) {
+            System.out.println("SETTING HDFS KEYTAB!");
+            hdfsConf.put(Config.BLOBSTORE_HDFS_KEYTAB, args[2]);
         }
         
         /* CREATE THE BLOBSTORES */
diff --git a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListLocalFs.java b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListLocalFs.java
index 35ff33f..aefddb8 100644
--- a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListLocalFs.java
+++ b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/ListLocalFs.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.storm.blobstore;
 
 import java.util.Map;
@@ -30,7 +31,7 @@
     
     public static void main(String[] args) throws Exception {
         
-        if(args.length != 1) {
+        if (args.length != 1) {
             System.out.println("Need 1 arguments, but have " + Integer.toString(args.length));
             System.out.println("listLocalFs <local_blobstore_dir>");
             System.out.println("Migrates blobs from LocalFsBlobStore to HdfsBlobStore");
diff --git a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigrateBlobs.java b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigrateBlobs.java
index 4e2628b..aae81fd 100644
--- a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigrateBlobs.java
+++ b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigrateBlobs.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.storm.blobstore;
 
 import java.io.IOException;
@@ -26,31 +27,36 @@
 
 import org.apache.storm.Config;
 import org.apache.storm.blobstore.BlobStore;
-import org.apache.storm.hdfs.blobstore.HdfsBlobStore;
-import org.apache.storm.nimbus.NimbusInfo;
-import org.apache.storm.utils.Utils;
 import org.apache.storm.blobstore.LocalFsBlobStore;
 import org.apache.storm.generated.AuthorizationException;
 import org.apache.storm.generated.KeyAlreadyExistsException;
 import org.apache.storm.generated.KeyNotFoundException;
 import org.apache.storm.generated.ReadableBlobMeta;
 import org.apache.storm.generated.SettableBlobMeta;
+import org.apache.storm.hdfs.blobstore.HdfsBlobStore;
+import org.apache.storm.nimbus.NimbusInfo;
+import org.apache.storm.utils.Utils;
 
 public class MigrateBlobs {
     
     protected static void deleteAllBlobStoreKeys(BlobStore bs, Subject who) throws AuthorizationException, KeyNotFoundException {
         Iterable<String> hdfsKeys = () -> bs.listKeys();
-        for(String key : hdfsKeys) {
+        for (String key : hdfsKeys) {
             System.out.println(key);
             bs.deleteBlob(key, who);
         }
     }
     
-    protected static void copyBlobStoreKeys(BlobStore bsFrom, Subject whoFrom, BlobStore bsTo, Subject whoTo) throws AuthorizationException, KeyAlreadyExistsException, IOException, KeyNotFoundException {
+    protected static void copyBlobStoreKeys(BlobStore bsFrom,
+            Subject whoFrom,
+            BlobStore bsTo, Subject whoTo) throws AuthorizationException,
+            KeyAlreadyExistsException,
+            IOException,
+            KeyNotFoundException {
         Iterable<String> lfsKeys = () -> bsFrom.listKeys();
-        for(String key : lfsKeys) {
-            ReadableBlobMeta readable_meta = bsFrom.getBlobMeta(key, whoFrom);
-            SettableBlobMeta meta = readable_meta.get_settable();
+        for (String key : lfsKeys) {
+            ReadableBlobMeta readableMeta = bsFrom.getBlobMeta(key, whoFrom);
+            SettableBlobMeta meta = readableMeta.get_settable();
             InputStream in = bsFrom.getBlob(key, whoFrom);
             System.out.println("COPYING BLOB " + key + " FROM " + bsFrom + " TO " + bsTo);
             bsTo.createBlob(key, in, meta, whoTo);
@@ -66,30 +72,32 @@
             System.out.println("Need at least 2 arguments, but have " + Integer.toString(args.length));
             System.out.println("migrate <local_blobstore_dir> <hdfs_blobstore_path> <hdfs_principal> <keytab>");
             System.out.println("Migrates blobs from LocalFsBlobStore to HdfsBlobStore");
-            System.out.println("Example: migrate '/srv/storm' 'hdfs://some-hdfs-namenode:8080/srv/storm/my-storm-blobstore' 'stormUser/my-nimbus-host.example.com@STORM.EXAMPLE.COM' '/srv/my-keytab/stormUser.kt'");
+            System.out.println("Example: migrate '/srv/storm' "
+                    + "'hdfs://some-hdfs-namenode:8080/srv/storm/my-storm-blobstore' "
+                    + "'stormUser/my-nimbus-host.example.com@STORM.EXAMPLE.COM' '/srv/my-keytab/stormUser.kt'");
             System.exit(1);
         }
-        
-        String localBlobstoreDir = args[0];
+
         String hdfsBlobstorePath = args[1];
         
         hdfsConf.put(Config.BLOBSTORE_DIR, hdfsBlobstorePath);
         hdfsConf.put(Config.STORM_PRINCIPAL_TO_LOCAL_PLUGIN, "org.apache.storm.security.auth.DefaultPrincipalToLocal");
-        if(args.length >= 3) {
-        	System.out.println("SETTING HDFS PRINCIPAL!");
-        	hdfsConf.put(Config.BLOBSTORE_HDFS_PRINCIPAL, args[2]);
+        if (args.length >= 3) {
+            System.out.println("SETTING HDFS PRINCIPAL!");
+            hdfsConf.put(Config.BLOBSTORE_HDFS_PRINCIPAL, args[2]);
         }
-        if(args.length >= 4) {
-        	System.out.println("SETTING HDFS KEYTAB!");
-        	hdfsConf.put(Config.BLOBSTORE_HDFS_KEYTAB, args[3]);
+        if (args.length >= 4) {
+            System.out.println("SETTING HDFS KEYTAB!");
+            hdfsConf.put(Config.BLOBSTORE_HDFS_KEYTAB, args[3]);
         }
         hdfsConf.put(Config.STORM_BLOBSTORE_REPLICATION_FACTOR, 7);
         
         Map<String, Object> lfsConf = Utils.readStormConfig();
+        String localBlobstoreDir = args[0];
         lfsConf.put(Config.BLOBSTORE_DIR, localBlobstoreDir);
         lfsConf.put(Config.STORM_PRINCIPAL_TO_LOCAL_PLUGIN, "org.apache.storm.security.auth.DefaultPrincipalToLocal");
         
-        
+
         /* CREATE THE BLOBSTORES */
         LocalFsBlobStore lfsBlobStore = new LocalFsBlobStore();
         lfsBlobStore.prepare(lfsConf, null, NimbusInfo.fromConf(lfsConf), null);
diff --git a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigratorMain.java b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigratorMain.java
index a18376c..03d163a 100644
--- a/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigratorMain.java
+++ b/external/storm-blobstore-migration/src/main/java/org/apache/storm/blobstore/MigratorMain.java
@@ -15,6 +15,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
+
 package org.apache.storm.blobstore;
 
 import java.util.Arrays;
@@ -22,10 +23,10 @@
 import javax.security.auth.Subject;
 
 public class MigratorMain {
-	
-	public static void listBlobStoreKeys(BlobStore bs, Subject who) {
+
+    public static void listBlobStoreKeys(BlobStore bs, Subject who) {
         Iterable<String> bsKeys = () -> bs.listKeys();
-        for(String key : bsKeys) {
+        for (String key : bsKeys) {
             System.out.println(key);
         }
     }
@@ -38,20 +39,17 @@
     }
     
     public static void main(String[] args) throws Exception {
-        if(args.length == 0) {
+        if (args.length == 0) {
             usage();
         }
         
-        if(args[0].equals("listHDFS")) {
+        if (args[0].equals("listHDFS")) {
             ListHDFS.main(Arrays.copyOfRange(args, 1, args.length));
-        }
-        else if(args[0].equals("listLocalFs")) {
+        } else if (args[0].equals("listLocalFs")) {
             ListLocalFs.main(Arrays.copyOfRange(args, 1, args.length));
-        }
-        else if(args[0].equals("migrate")) {
+        } else if (args[0].equals("migrate")) {
             MigrateBlobs.main(Arrays.copyOfRange(args, 1, args.length));
-        }
-        else {
+        } else {
             System.out.println("Not recognized: " + args[0]);
             usage();
         }
diff --git a/external/storm-cassandra/pom.xml b/external/storm-cassandra/pom.xml
index f670a6e..495f9d5 100644
--- a/external/storm-cassandra/pom.xml
+++ b/external/storm-cassandra/pom.xml
@@ -132,7 +132,7 @@
                 <artifactId>maven-checkstyle-plugin</artifactId>
                 <!--Note - the version would be inherited-->
                 <configuration>
-                    <maxAllowedViolations>159</maxAllowedViolations>
+                    <maxAllowedViolations>0</maxAllowedViolations>
                 </configuration>
             </plugin>
             <plugin>
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/AbstractExecutionResultHandler.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/AbstractExecutionResultHandler.java
index 3b32749..bb397b7 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/AbstractExecutionResultHandler.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/AbstractExecutionResultHandler.java
@@ -48,6 +48,8 @@
 
     @Override
     public void onThrowable(Throwable t, OutputCollector collector, List<Tuple> tl) {
-        for (Tuple i : tl) onThrowable(t, collector, i);
+        for (Tuple i : tl) {
+            onThrowable(t, collector, i);
+        }
     }
 }
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/BaseExecutionResultHandler.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/BaseExecutionResultHandler.java
index 57ef351..e8f3259 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/BaseExecutionResultHandler.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/BaseExecutionResultHandler.java
@@ -19,6 +19,7 @@
 import com.datastax.driver.core.exceptions.WriteTimeoutException;
 import org.apache.storm.task.OutputCollector;
 import org.apache.storm.tuple.Tuple;
+import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 /**
@@ -29,7 +30,7 @@
  */
 public class BaseExecutionResultHandler extends AbstractExecutionResultHandler {
 
-    private final static org.slf4j.Logger LOG = LoggerFactory.getLogger(BaseExecutionResultHandler.class);
+    private static final Logger LOG = LoggerFactory.getLogger(BaseExecutionResultHandler.class);
 
     /**
      * {@inheritDoc}
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/CassandraContext.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/CassandraContext.java
index 49763d2..5c43852 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/CassandraContext.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/CassandraContext.java
@@ -24,9 +24,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/**
- *
- */
 public class CassandraContext extends WorkerCtx implements SimpleClientProvider {
 
     /**
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/DynamicStatementBuilder.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/DynamicStatementBuilder.java
index 823a566..f085f2b 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/DynamicStatementBuilder.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/DynamicStatementBuilder.java
@@ -93,8 +93,9 @@
 
     private static BatchCQLStatementTupleMapper newBatchStatementBuilder(BatchStatement.Type type, CQLStatementBuilder[] builders) {
         List<CQLStatementTupleMapper> mappers = new ArrayList<>(builders.length);
-        for (CQLStatementBuilder b : Arrays.asList(builders))
+        for (CQLStatementBuilder b : Arrays.asList(builders)) {
             mappers.add(b.build());
+        }
         return new BatchCQLStatementTupleMapper(type, mappers);
     }
 
@@ -136,8 +137,9 @@
     public static final FieldSelector[] fields(final String... fields) {
         int size = fields.length;
         List<FieldSelector> fl = new ArrayList<>(size);
-        for (int i = 0; i < size; i++)
+        for (int i = 0; i < size; i++) {
             fl.add(new FieldSelector(fields[i]));
+        }
         return fl.toArray(new FieldSelector[size]);
     }
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/Murmur3StreamGrouping.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/Murmur3StreamGrouping.java
index c172cb6..593e654 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/Murmur3StreamGrouping.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/Murmur3StreamGrouping.java
@@ -28,10 +28,9 @@
 import org.apache.storm.tuple.Fields;
 
 /**
- *
  * Simple {@link org.apache.storm.grouping.CustomStreamGrouping} that uses Murmur3 algorithm to choose the target task of a tuple.
  *
- * This stream grouping may be used to optimise writes to Apache Cassandra.
+ * <p>This stream grouping may be used to optimise writes to Apache Cassandra.
  */
 public class Murmur3StreamGrouping implements CustomStreamGrouping {
 
@@ -66,9 +65,8 @@
      * http://stackoverflow.com/questions/27212797/cassandra-hashing-algorithm-with-composite-keys
      * https://github.com/apache/cassandra/blob/trunk/src/java/org/apache/cassandra/db/marshal/CompositeType.java
      *
-     * @param values the fields which are part of the (compose) partition key.
-     * @return the computed hash for input values.
-     * @throws java.io.IOException
+     * @param values the fields which are part of the (compose) partition key
+     * @return the computed hash for input values
      */
     @VisibleForTesting
     public static long hashes(List<Object> values) throws IOException {
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BaseCassandraBolt.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BaseCassandraBolt.java
index 946932f..933bc6e 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BaseCassandraBolt.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BaseCassandraBolt.java
@@ -41,7 +41,7 @@
 /**
  * A base cassandra bolt.
  *
- * Default {@link org.apache.storm.topology.base.BaseTickTupleAwareRichBolt}
+ * <p>Default {@link org.apache.storm.topology.base.BaseTickTupleAwareRichBolt}
  */
 public abstract class BaseCassandraBolt<T> extends BaseTickTupleAwareRichBolt {
 
@@ -59,12 +59,11 @@
     private CQLStatementTupleMapper mapper;
     private ExecutionResultHandler resultHandler;
 
-    transient private Map<String, Fields> outputsFields = new HashMap<>();
+    private transient Map<String, Fields> outputsFields = new HashMap<>();
     private Map<String, Object> cassandraConfig;
 
     /**
      * Creates a new {@link CassandraWriterBolt} instance.
-     * @param mapper
      */
     public BaseCassandraBolt(CQLStatementTupleMapper mapper, SimpleClientProvider clientProvider) {
         this.mapper = mapper;
@@ -73,7 +72,6 @@
 
     /**
      * Creates a new {@link CassandraWriterBolt} instance.
-     * @param tupleMapper
      */
     public BaseCassandraBolt(CQLStatementTupleMapper tupleMapper) {
         this(tupleMapper, new CassandraContext());
@@ -106,7 +104,7 @@
     }
 
     /**
-     * Configures this bolt with the given {@code fields} as outputfields with stream id as {@link Utils#DEFAULT_STREAM_ID}
+     * Configures this bolt with the given {@code fields} as outputfields with stream id as {@link Utils#DEFAULT_STREAM_ID}.
      *
      * @param fields outputfields
      */
@@ -118,12 +116,11 @@
 
     /**
      * Configures this bolt given {@code fields} as outputfields for the given {@code stream}.
-     *
-     * @param stream
-     * @param fields
      */
     public BaseCassandraBolt withStreamOutputFields(String stream, Fields fields) {
-        if (stream == null || stream.length() == 0) throw new IllegalArgumentException("'stream' should not be null");
+        if (stream == null || stream.length() == 0) {
+            throw new IllegalArgumentException("'stream' should not be null");
+        }
         this.outputsFields.put(stream, fields);
         return this;
     }
@@ -131,8 +128,6 @@
     /**
      * Takes the given {@code config} for creating cassandra client.
      * {@link CassandraConf} contains all the properties that can be configured.
-     *
-     * @param config
      */
     public BaseCassandraBolt withCassandraConfig(Map<String, Object> config) {
         if (config == null) {
@@ -143,7 +138,9 @@
     }
 
     protected ExecutionResultHandler getResultHandler() {
-        if (resultHandler == null) resultHandler = new BaseExecutionResultHandler();
+        if (resultHandler == null) {
+            resultHandler = new BaseExecutionResultHandler();
+        }
         return resultHandler;
     }
 
@@ -151,7 +148,7 @@
         return mapper;
     }
 
-    abstract protected AsyncResultHandler<T> getAsyncHandler();
+    protected abstract AsyncResultHandler<T> getAsyncHandler();
 
     protected AsyncExecutor<T> getAsyncExecutor() {
         return AsyncExecutorProvider.getLocal(session, getAsyncHandler());
@@ -176,7 +173,9 @@
         // outputsFields can be empty if this bolt acts like a sink in topology.
         if (!outputsFields.isEmpty()) {
             Fields fields = outputsFields.remove(Utils.DEFAULT_STREAM_ID);
-            if (fields != null) declarer.declare(fields);
+            if (fields != null) {
+                declarer.declare(fields);
+            }
             for (Map.Entry<String, Fields> entry : outputsFields.entrySet()) {
                 declarer.declareStream(entry.getKey(), entry.getValue());
             }
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BatchCassandraWriterBolt.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BatchCassandraWriterBolt.java
index d87ecab..12c4eac 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BatchCassandraWriterBolt.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/BatchCassandraWriterBolt.java
@@ -32,7 +32,7 @@
 public class BatchCassandraWriterBolt extends BaseCassandraBolt<List<Tuple>> {
 
     public static final int DEFAULT_EMIT_FREQUENCY = 2;
-    private final static Logger LOG = LoggerFactory.getLogger(BatchCassandraWriterBolt.class);
+    private static final Logger LOG = LoggerFactory.getLogger(BatchCassandraWriterBolt.class);
     private LinkedBlockingQueue<Tuple> queue;
 
     private int tickFrequencyInSeconds;
@@ -41,14 +41,12 @@
 
     private int batchMaxSize = 1000;
 
-    private String componentID;
+    private String componentId;
 
     private AsyncResultHandler<List<Tuple>> asyncResultHandler;
 
     /**
      * Creates a new {@link CassandraWriterBolt} instance.
-     *
-     * @param tupleMapper
      */
     public BatchCassandraWriterBolt(CQLStatementTupleMapper tupleMapper) {
         this(tupleMapper, DEFAULT_EMIT_FREQUENCY);
@@ -56,8 +54,6 @@
 
     /**
      * Creates a new {@link CassandraWriterBolt} instance.
-     *
-     * @param tupleMapper
      */
     public BatchCassandraWriterBolt(CQLStatementTupleMapper tupleMapper, int tickFrequencyInSeconds) {
         super(tupleMapper);
@@ -70,7 +66,7 @@
     @Override
     public void prepare(Map<String, Object> topoConfig, TopologyContext topologyContext, OutputCollector outputCollector) {
         super.prepare(topoConfig, topologyContext, outputCollector);
-        this.componentID = topologyContext.getThisComponentId();
+        this.componentId = topologyContext.getThisComponentId();
         this.queue = new LinkedBlockingQueue<>(batchMaxSize);
         this.lastModifiedTimesMillis = now();
     }
@@ -144,8 +140,9 @@
 
         for (Tuple t : inputs) {
             List<Statement> sl = getMapper().map(topoConfig, session, t);
-            for (Statement s : sl)
+            for (Statement s : sl) {
                 stmts.add(new PairStatementTuple(t, s));
+            }
         }
         return stmts;
     }
@@ -158,7 +155,7 @@
     }
 
     private String logPrefix() {
-        return componentID + " - ";
+        return componentId + " - ";
     }
 
     public BatchCassandraWriterBolt withTickFrequency(long time, TimeUnit unit) {
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/CassandraWriterBolt.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/CassandraWriterBolt.java
index fc45b3f..d802941 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/CassandraWriterBolt.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/CassandraWriterBolt.java
@@ -25,8 +25,6 @@
 
     /**
      * Creates a new {@link CassandraWriterBolt} instance.
-     *
-     * @param tupleMapper
      */
     public CassandraWriterBolt(CQLStatementTupleMapper tupleMapper) {
         super(tupleMapper);
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/GroupingBatchBuilder.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/GroupingBatchBuilder.java
index 3697f81..f7ad027 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/GroupingBatchBuilder.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/GroupingBatchBuilder.java
@@ -20,9 +20,6 @@
 import java.util.List;
 import org.apache.storm.tuple.Tuple;
 
-/**
- *
- */
 public class GroupingBatchBuilder implements Iterable<PairBatchStatementTuples> {
 
     private int batchSizeRows;
@@ -31,7 +28,6 @@
 
     /**
      * Creates a new  {@link GroupingBatchBuilder} instance.
-     * @param batchSizeRows
      */
     public GroupingBatchBuilder(int batchSizeRows, List<PairStatementTuple> statements) {
         this.batchSizeRows = batchSizeRows;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/PairStatementTuple.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/PairStatementTuple.java
index d95e9d3..34cec23 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/PairStatementTuple.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/bolt/PairStatementTuple.java
@@ -26,8 +26,6 @@
 
     /**
      * Creates a new {@link PairStatementTuple} instance.
-     * @param tuple
-     * @param statement
      */
     public PairStatementTuple(Tuple tuple, Statement statement) {
         this.tuple = tuple;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/CassandraConf.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/CassandraConf.java
index 82f017f..e1ebbcb 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/CassandraConf.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/CassandraConf.java
@@ -62,7 +62,7 @@
      */
     private String username;
     /**
-     * The authorized cassandra password
+     * The authorized cassandra password.
      */
     private String password;
     /**
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/impl/DefaultClient.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/impl/DefaultClient.java
index d8495cc..656ab5c 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/impl/DefaultClient.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/client/impl/DefaultClient.java
@@ -30,7 +30,7 @@
  */
 public class DefaultClient implements SimpleClient, Closeable, Serializable {
 
-    private final static Logger LOG = LoggerFactory.getLogger(DefaultClient.class);
+    private static final Logger LOG = LoggerFactory.getLogger(DefaultClient.class);
 
     private String keyspace;
 
@@ -72,8 +72,9 @@
     public synchronized Session connect() throws NoHostAvailableException {
         if (isDisconnected()) {
             LOG.info("Connected to cluster: {}", cluster.getClusterName());
-            for (Host host : getAllHosts())
+            for (Host host : getAllHosts()) {
                 LOG.info("Datacenter: {}; Host: {}; Rack: {}", host.getDatacenter(), host.getAddress(), host.getRack());
+            }
 
             LOG.info("Connect to cluster using keyspace %s", keyspace);
             session = cluster.connect(keyspace);
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BaseBeanFactory.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BaseBeanFactory.java
index 0c1964e..de6afaf 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BaseBeanFactory.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BaseBeanFactory.java
@@ -36,7 +36,9 @@
      */
     @Override
     public synchronized T get(Map<String, Object> topoConf) {
-        if (instance != null) return instance;
+        if (instance != null) {
+            return instance;
+        }
         return instance = make(topoConf);
     }
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BeanFactory.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BeanFactory.java
index fb712a5..8dbbbae 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BeanFactory.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/BeanFactory.java
@@ -22,14 +22,12 @@
 
     /**
      * Sets the storm context.
-     * @param context
      */
     public void setStormContext(WorkerCtx context);
 
     /**
      * Return an instance, which may be shared or independent, of the specified type.
      * @param topoConf The storm configuration
-     * @return
      */
     T get(Map<String, Object> topoConf);
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/WorkerCtx.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/WorkerCtx.java
index 1a457f9..a6f771b 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/WorkerCtx.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/context/WorkerCtx.java
@@ -43,19 +43,19 @@
 
     /**
      * Return an instance provider of the specified type.
-     * @throws RuntimeException if no bean provider can be resolve for the given class.
-     * @return
+     * @throws RuntimeException if no bean provider can be resolve for the given class
      */
     protected <T> BeanFactory<T> getBeanfactory(Class<T> clazz) {
         BeanFactory<T> factory = (BeanFactory<T>) this.componentCtx.get(clazz);
-        if (factory == null) throw new RuntimeException("Cannot resolve bean factory for class : " + clazz.getCanonicalName());
+        if (factory == null) {
+            throw new RuntimeException("Cannot resolve bean factory for class : " + clazz.getCanonicalName());
+        }
         factory.setStormContext(this);
         return factory;
     }
 
     /**
      * Return an instance, which is shared (within a Worker), of the specified type.
-     * @return
      */
     public <T, K, V> T getWorkerBean(Class<T> clazz, Map<K, V> topoConf) {
         return getWorkerBean(clazz, topoConf, false);
@@ -71,7 +71,9 @@
      * @return a instance of type {@link T}.
      */
     public <T, K, V> T getWorkerBean(Class<T> clazz, Map<K, V> topoConf, boolean force) {
-        if (force) workerCtx.remove(clazz);
+        if (force) {
+            workerCtx.remove(clazz);
+        }
         BeanFactory<T> factory = (BeanFactory<T>) this.workerCtx.get(clazz);
         if (factory == null) {
             BeanFactory<T> instance = getBeanfactory(clazz).newInstance();
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/AsyncExecutor.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/AsyncExecutor.java
index 53e6ee6..eec9026 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/AsyncExecutor.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/AsyncExecutor.java
@@ -38,7 +38,7 @@
  */
 public class AsyncExecutor<T> implements Serializable {
 
-    private final static Logger LOG = LoggerFactory.getLogger(AsyncExecutor.class);
+    private static final Logger LOG = LoggerFactory.getLogger(AsyncExecutor.class);
 
     protected Session session;
 
@@ -79,8 +79,9 @@
 
         List<SettableFuture<T>> settableFutures = new ArrayList<>(statements.size());
 
-        for (Statement s : statements)
+        for (Statement s : statements) {
             settableFutures.add(execAsync(s, input, AsyncResultHandler.NO_OP_HANDLER));
+        }
 
         ListenableFuture<List<T>> allAsList = Futures.allAsList(settableFutures);
         Futures.addCallback(allAsList, new FutureCallback<List<T>>() {
@@ -272,12 +273,12 @@
             int top5 = Math.min(exceptions.size(), 5);
             StringBuilder sb = new StringBuilder();
             sb.append("First ")
-              .append(top5)
-              .append(" exceptions: ")
-              .append(System.lineSeparator());
+                    .append(top5)
+                    .append(" exceptions: ")
+                    .append(System.lineSeparator());
             for (int i = 0; i < top5; i++) {
                 sb.append(exceptions.get(i).getMessage())
-                  .append(System.lineSeparator());
+                        .append(System.lineSeparator());
             }
             return sb.toString();
         }
@@ -287,13 +288,13 @@
             StringBuilder sb = new StringBuilder();
 
             sb.append(getMessage())
-              .append(System.lineSeparator())
-              .append("Multiple exceptions encountered: ")
-              .append(System.lineSeparator());
+                    .append(System.lineSeparator())
+                    .append("Multiple exceptions encountered: ")
+                    .append(System.lineSeparator());
 
             for (Throwable exception : exceptions) {
                 sb.append(exception.toString())
-                  .append(System.lineSeparator());
+                        .append(System.lineSeparator());
             }
 
             return super.toString();
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/ExecutionResultCollector.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/ExecutionResultCollector.java
index 44721ad..8c8057d 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/ExecutionResultCollector.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/ExecutionResultCollector.java
@@ -51,8 +51,12 @@
          */
         @Override
         public void handle(OutputCollector collector, ExecutionResultHandler handler) {
-            for (Tuple t : inputs) handler.onQuerySuccess(collector, t);
-            for (Tuple t : inputs) collector.ack(t);
+            for (Tuple t : inputs) {
+                handler.onQuerySuccess(collector, t);
+            }
+            for (Tuple t : inputs) {
+                collector.ack(t);
+            }
         }
     }
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/BatchAsyncResultHandler.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/BatchAsyncResultHandler.java
index 27139fa..f2ad2a6 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/BatchAsyncResultHandler.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/BatchAsyncResultHandler.java
@@ -20,7 +20,6 @@
 import org.apache.storm.task.OutputCollector;
 import org.apache.storm.tuple.Tuple;
 
-
 public class BatchAsyncResultHandler implements AsyncResultHandler<List<Tuple>> {
 
     private ConcurrentLinkedQueue<ExecutionResultCollector> completed;
@@ -29,7 +28,6 @@
 
     /**
      * Creates a new {@link BatchAsyncResultHandler} instance.
-     * @param handler
      */
     public BatchAsyncResultHandler(ExecutionResultHandler handler) {
         this.handler = handler;
@@ -39,7 +37,7 @@
     /**
      * This method is responsible for failing specified inputs.
      *
-     * The default method does no-operation.
+     * <p>The default method does no-operation.
      */
     @Override
     public void failure(Throwable t, List<Tuple> input) {
@@ -49,7 +47,7 @@
     /**
      * This method is responsible for acknowledging specified inputs.
      *
-     * The default method does no-operation.
+     * <p>The default method does no-operation.
      */
     @Override
     public void success(List<Tuple> input) {
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/SingleAsyncResultHandler.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/SingleAsyncResultHandler.java
index 127a06d..f24988d 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/SingleAsyncResultHandler.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/executor/impl/SingleAsyncResultHandler.java
@@ -19,7 +19,6 @@
 import org.apache.storm.task.OutputCollector;
 import org.apache.storm.tuple.Tuple;
 
-
 public class SingleAsyncResultHandler implements AsyncResultHandler<Tuple> {
 
     private ConcurrentLinkedQueue<ExecutionResultCollector> completed;
@@ -28,7 +27,6 @@
 
     /**
      * Creates a new {@link SingleAsyncResultHandler} instance.
-     * @param handler
      */
     public SingleAsyncResultHandler(ExecutionResultHandler handler) {
         this.handler = handler;
@@ -38,7 +36,7 @@
     /**
      * This method is responsible for failing specified inputs.
      *
-     * The default method does no-operation.
+     * <p>The default method does no-operation.
      */
     @Override
     public void failure(Throwable t, Tuple input) {
@@ -48,7 +46,7 @@
     /**
      * This method is responsible for acknowledging specified inputs.
      *
-     * The default method does no-operation.
+     * <p>The default method does no-operation.
      */
     @Override
     public void success(Tuple input) {
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/AyncCQLResultSetValuesMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/AyncCQLResultSetValuesMapper.java
index f57a99c..c6b331f 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/AyncCQLResultSetValuesMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/AyncCQLResultSetValuesMapper.java
@@ -20,8 +20,9 @@
 import org.apache.storm.tuple.Values;
 
 /**
- * A resultset mapper that
+ * A resultset mapper that.
  */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public interface AyncCQLResultSetValuesMapper extends Serializable {
 
     List<List<Values>> map(Session session, List<Statement> statements, List<ITuple> tuples);
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/BaseCQLStatementTupleMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/BaseCQLStatementTupleMapper.java
index 9cbac15..100772b 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/BaseCQLStatementTupleMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/BaseCQLStatementTupleMapper.java
@@ -22,8 +22,8 @@
 
 /**
  * Default interface to map a {@link org.apache.storm.tuple.ITuple} to a CQL {@link com.datastax.driver.core.Statement}.
- *
  */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public abstract class BaseCQLStatementTupleMapper implements CQLStatementTupleMapper, Serializable {
 
     /**
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLResultSetValuesMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLResultSetValuesMapper.java
index 2d50161..87f3830 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLResultSetValuesMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLResultSetValuesMapper.java
@@ -19,9 +19,7 @@
 import org.apache.storm.tuple.ITuple;
 import org.apache.storm.tuple.Values;
 
-/**
- *
- */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public interface CQLResultSetValuesMapper extends Serializable {
 
     List<List<Values>> map(Session session, Statement statement, ITuple tuple);
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementBuilder.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementBuilder.java
index c12aad0..8a9b4d3 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementBuilder.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementBuilder.java
@@ -14,7 +14,7 @@
 
 import java.io.Serializable;
 
-
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public interface CQLStatementBuilder<T extends CQLStatementTupleMapper> extends Serializable {
 
     /**
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementTupleMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementTupleMapper.java
index ccc8bc8..29290a2 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementTupleMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CQLStatementTupleMapper.java
@@ -23,6 +23,7 @@
 /**
  * Default interface to map a {@link org.apache.storm.tuple.ITuple} to a CQL {@link com.datastax.driver.core.Statement}.
  */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public interface CQLStatementTupleMapper extends Serializable {
 
     /**
@@ -35,6 +36,7 @@
      */
     List<Statement> map(Map<String, Object> conf, Session session, ITuple tuple);
 
+    @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
     public static class DynamicCQLStatementTupleMapper implements CQLStatementTupleMapper {
         private List<CQLStatementBuilder> builders;
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/Column.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/Column.java
index dd94c28..097f4df 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/Column.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/Column.java
@@ -16,9 +16,6 @@
 import java.util.ArrayList;
 import java.util.List;
 
-/**
- *
- */
 public class Column<T> implements Serializable {
 
     private final String columnName;
@@ -31,8 +28,9 @@
 
     public static Object[] getVals(List<Column> columns) {
         List<Object> vals = new ArrayList<>(columns.size());
-        for (Column c : columns)
+        for (Column c : columns) {
             vals.add(c.getVal());
+        }
         return vals.toArray();
     }
 
@@ -44,17 +42,27 @@
         return val;
     }
 
-    public boolean isNull() { return val == null;}
+    public boolean isNull() {
+        return val == null;
+    }
 
     @Override
     public boolean equals(Object o) {
-        if (this == o) return true;
-        if (o == null || getClass() != o.getClass()) return false;
+        if (this == o) {
+            return true;
+        }
+        if (o == null || getClass() != o.getClass()) {
+            return false;
+        }
 
         Column column = (Column) o;
 
-        if (columnName != null ? !columnName.equals(column.columnName) : column.columnName != null) return false;
-        if (val != null ? !val.equals(column.val) : column.val != null) return false;
+        if (columnName != null ? !columnName.equals(column.columnName) : column.columnName != null) {
+            return false;
+        }
+        if (val != null ? !val.equals(column.val) : column.val != null) {
+            return false;
+        }
 
         return true;
     }
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/ContextQuery.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/ContextQuery.java
index bab39eb..f308540 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/ContextQuery.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/ContextQuery.java
@@ -19,7 +19,6 @@
 /**
  * This interface may be used to retrieve a cassandra bound query either from storm config
  * or the tuple being proceed.
- *
  */
 public interface ContextQuery extends Serializable {
 
@@ -41,7 +40,6 @@
 
         /**
          * Creates a new {@link StaticContextQuery} instance.
-         * @param value
          */
         public StaticContextQuery(String value) {
             this.value = value;
@@ -66,7 +64,9 @@
 
         @Override
         public String resolves(Map<String, Object> config, ITuple tuple) {
-            if (config.containsKey(key)) return (String) config.get(key);
+            if (config.containsKey(key)) {
+                return (String) config.get(key);
+            }
 
             throw new IllegalArgumentException("Bound query '" + key + "' does not exist in configuration");
         }
@@ -87,7 +87,9 @@
         @Override
         public String resolves(Map<String, Object> config, ITuple tuple) {
             String name = tuple.getStringByField(fieldName);
-            if (config.containsKey(name)) return (String) config.get(name);
+            if (config.containsKey(name)) {
+                return (String) config.get(name);
+            }
             throw new IllegalArgumentException("Bound query '" + name + "' does not exist in configuration");
         }
     }
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CqlMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CqlMapper.java
index 538da8d..9a1892b 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CqlMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/CqlMapper.java
@@ -49,8 +49,9 @@
         @Override
         public List<Column> map(ITuple tuple) {
             List<Column> columns = new ArrayList<>(selectors.size());
-            for (FieldSelector selector : selectors)
+            for (FieldSelector selector : selectors) {
                 columns.add(selector.select(tuple));
+            }
             return columns;
         }
     }
@@ -71,8 +72,9 @@
         @Override
         public List<Column> map(ITuple tuple) {
             List<Column> columns = new ArrayList<>(tuple.size());
-            for (String name : tuple.getFields())
+            for (String name : tuple.getFields()) {
                 columns.add(new Column(name, tuple.getValueByField(name)));
+            }
             return columns;
         }
     }
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/BoundCQLStatementMapperBuilder.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/BoundCQLStatementMapperBuilder.java
index 271ec3d..2cf23b7 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/BoundCQLStatementMapperBuilder.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/BoundCQLStatementMapperBuilder.java
@@ -12,6 +12,8 @@
 
 package org.apache.storm.cassandra.query.builder;
 
+import static org.apache.storm.cassandra.query.ContextQuery.StaticContextQuery;
+
 import java.io.Serializable;
 import java.util.Arrays;
 import java.util.List;
@@ -23,8 +25,7 @@
 import org.apache.storm.cassandra.query.impl.RoutingKeyGenerator;
 import org.apache.storm.cassandra.query.selector.FieldSelector;
 
-import static org.apache.storm.cassandra.query.ContextQuery.StaticContextQuery;
-
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class BoundCQLStatementMapperBuilder implements CQLStatementBuilder<BoundCQLStatementTupleMapper>, Serializable {
 
     private final ContextQuery contextQuery;
@@ -37,7 +38,6 @@
 
     /**
      * Creates a new {@link BoundCQLStatementMapperBuilder} instance.
-     * @param cql
      */
     public BoundCQLStatementMapperBuilder(String cql) {
         this.contextQuery = new StaticContextQuery(cql);
@@ -45,7 +45,6 @@
 
     /**
      * Creates a new {@link BoundCQLStatementMapperBuilder} instance.
-     * @param contextQuery
      */
     public BoundCQLStatementMapperBuilder(ContextQuery contextQuery) {
         this.contextQuery = contextQuery;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/SimpleCQLStatementMapperBuilder.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/SimpleCQLStatementMapperBuilder.java
index 0349310..70a9cc4 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/SimpleCQLStatementMapperBuilder.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/builder/SimpleCQLStatementMapperBuilder.java
@@ -25,6 +25,7 @@
 /**
  * Default class to build {@link org.apache.storm.cassandra.query.impl.SimpleCQLStatementMapper} instance.
  */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class SimpleCQLStatementMapperBuilder implements CQLStatementBuilder<SimpleCQLStatementMapper>, Serializable {
 
     private final String queryString;
@@ -67,11 +68,6 @@
         return this;
     }
 
-    public final SimpleCQLStatementMapperBuilder withRoutingKeys(String... fields) {
-        this.routingKeys = Arrays.asList(fields);
-        return this;
-    }
-
     /**
      * Includes only the specified tuple fields.
      *
@@ -81,5 +77,10 @@
         this.mapper = mapper;
         return this;
     }
+
+    public final SimpleCQLStatementMapperBuilder withRoutingKeys(String... fields) {
+        this.routingKeys = Arrays.asList(fields);
+        return this;
+    }
 }
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BatchCQLStatementTupleMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BatchCQLStatementTupleMapper.java
index 8d68a1e..860af67 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BatchCQLStatementTupleMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BatchCQLStatementTupleMapper.java
@@ -22,7 +22,7 @@
 import org.apache.storm.cassandra.query.CQLStatementTupleMapper;
 import org.apache.storm.tuple.ITuple;
 
-
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class BatchCQLStatementTupleMapper implements CQLStatementTupleMapper {
 
     private final List<CQLStatementTupleMapper> mappers;
@@ -30,8 +30,6 @@
 
     /**
      * Creates a new {@link BatchCQLStatementTupleMapper} instance.
-     * @param type
-     * @param mappers
      */
     public BatchCQLStatementTupleMapper(BatchStatement.Type type, List<CQLStatementTupleMapper> mappers) {
         this.mappers = new ArrayList<>(mappers);
@@ -44,8 +42,9 @@
     @Override
     public List<Statement> map(Map<String, Object> conf, Session session, ITuple tuple) {
         final BatchStatement batch = new BatchStatement(this.type);
-        for (CQLStatementTupleMapper m : mappers)
+        for (CQLStatementTupleMapper m : mappers) {
             batch.addAll(m.map(conf, session, tuple));
+        }
         return Arrays.asList((Statement) batch);
     }
 }
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BoundCQLStatementTupleMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BoundCQLStatementTupleMapper.java
index 40e2c34..e817f16 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BoundCQLStatementTupleMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/BoundCQLStatementTupleMapper.java
@@ -27,6 +27,7 @@
 import org.apache.storm.cassandra.query.CqlMapper;
 import org.apache.storm.tuple.ITuple;
 
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class BoundCQLStatementTupleMapper implements CQLStatementTupleMapper {
 
     private final ContextQuery contextQuery;
@@ -38,12 +39,6 @@
 
     /**
      * Creates a new {@link BoundCQLStatementTupleMapper} instance.
-     *
-     * @param contextQuery
-     * @param mapper
-     * @param mapper
-     * @param rkGenerator
-     * @param binder
      */
     public BoundCQLStatementTupleMapper(ContextQuery contextQuery, CqlMapper mapper, RoutingKeyGenerator rkGenerator,
                                         PreparedStatementBinder binder) {
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/ObjectMapperCqlStatementMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/ObjectMapperCqlStatementMapper.java
index 4f5a021..f60c76c 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/ObjectMapperCqlStatementMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/ObjectMapperCqlStatementMapper.java
@@ -34,8 +34,9 @@
 import org.apache.storm.tuple.ITuple;
 
 /**
- * Tuple mapper that is able to map objects annotated with {@link com.datastax.driver.mapping.annotations.Table} to CQL statements
+ * Tuple mapper that is able to map objects annotated with {@link com.datastax.driver.mapping.annotations.Table} to CQL statements.
  */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class ObjectMapperCqlStatementMapper implements CQLStatementTupleMapper {
     private static final Map<Session, MappingManager> mappingManagers = new WeakHashMap<>();
 
@@ -83,7 +84,7 @@
                 options.add(Option.timestamp(((Number) timestampObject).longValue()));
             } else if (timestampObject instanceof Instant) {
                 Instant timestamp = (Instant) timestampObject;
-                options.add(Option.timestamp(timestamp.getEpochSecond() * 1000_0000l + timestamp.getNano() / 1000l));
+                options.add(Option.timestamp(timestamp.getEpochSecond() * 1000_0000L + timestamp.getNano() / 1000L));
             }
         }
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/PreparedStatementBinder.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/PreparedStatementBinder.java
index 5a4d39e..1c82ee6 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/PreparedStatementBinder.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/PreparedStatementBinder.java
@@ -19,9 +19,6 @@
 import java.util.List;
 import org.apache.storm.cassandra.query.Column;
 
-/**
- *
- */
 public interface PreparedStatementBinder extends Serializable {
 
     public BoundStatement apply(PreparedStatement statement, List<Column> columns);
@@ -38,6 +35,7 @@
         }
     }
 
+    @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
     public static final class CQL3NamedSettersBinder implements PreparedStatementBinder {
 
         /**
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/RoutingKeyGenerator.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/RoutingKeyGenerator.java
index 948a21f..2c1e238 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/RoutingKeyGenerator.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/RoutingKeyGenerator.java
@@ -27,7 +27,6 @@
 
     /**
      * Creates a new {@link RoutingKeyGenerator} instance.
-     * @param routingKeys
      */
     public RoutingKeyGenerator(List<String> routingKeys) {
         Preconditions.checkNotNull(routingKeys);
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/SimpleCQLStatementMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/SimpleCQLStatementMapper.java
index 17a9049..f3daf59 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/SimpleCQLStatementMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/impl/SimpleCQLStatementMapper.java
@@ -25,9 +25,7 @@
 import org.apache.storm.cassandra.query.CqlMapper;
 import org.apache.storm.tuple.ITuple;
 
-/**
- *
- */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class SimpleCQLStatementMapper implements CQLStatementTupleMapper {
 
     private final String queryString;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/selector/FieldSelector.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/selector/FieldSelector.java
index dd9222a..28d9506 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/selector/FieldSelector.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/query/selector/FieldSelector.java
@@ -25,7 +25,7 @@
 
     /**
      * Creates a new {@link FieldSelector} instance.
-     * @param field the name of value.
+     * @param field the name of value
      */
     public FieldSelector(String field) {
         this.field = field;
@@ -36,8 +36,8 @@
     }
 
     /**
-     * @param tuple
-     * @return Compute the value of this field from given {@code tuple}.
+     * Get field value.
+     * @return Compute the value of this field from given {@code tuple}
      */
     protected Object getFieldValue(ITuple tuple) {
         return tuple.getValueByField(field);
@@ -45,7 +45,7 @@
 
     /**
      * Sets the fields as an automatically generated TimeUUID.
-     * @return this.
+     * @return this
      */
     public FieldSelector now() {
         this.isNow = true;
@@ -55,8 +55,8 @@
     /**
      * Sets an alias for this field.
      *
-     * @param as the alias name.
-     * @return this.
+     * @param as the alias name
+     * @return this
      */
     public FieldSelector as(String as) {
         this.as = as;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraBackingMap.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraBackingMap.java
index 61f9001..98bdbdb 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraBackingMap.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraBackingMap.java
@@ -43,22 +43,20 @@
 /**
  * An IBackingState implementation for Cassandra.
  *
- * The implementation stores state as a binary blob in cassandra using a {@link Serializer}.
+ * <p>The implementation stores state as a binary blob in cassandra using a {@link Serializer}.
  * It supports Opaque, Transactional and NonTransactional states, given a matching serializer.
  *
- * Configuration is done with three separate constructs:
+ * <p>Configuration is done with three separate constructs:
  *  - One tuple mapper for multiGet, which should map keys to a select statement and return {@link Values}.
  *  - One state mapper, which maps the state to/from a {@link Values} representation, which is used for binding.
  *  - One tuple mapper for multiPut, which should map {@link Values} to an INSERT or UPDATE statement.
  *
- * {@link #multiPut(List, List)} updates Cassandra with parallel statements.
+ * <p>{@link #multiPut(List, List)} updates Cassandra with parallel statements.
  * {@link #multiGet(List)} queries Cassandra with parallel statements.
  *
- * Parallelism defaults to half the maximum requests per host, either local or remote whichever is
+ * <p>Parallelism defaults to half the maximum requests per host, either local or remote whichever is
  * lower. The driver defaults to 256 for remote hosts and 1024 for local hosts, so the default value is 128
  * unless the driver is configured otherwise.
- *
- * @param <T>
  */
 public class CassandraBackingMap<T> implements IBackingMap<T> {
 
@@ -173,6 +171,7 @@
             return this;
         }
 
+        @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
         public Options<T> withNonTransactionalJSONBinaryState(String fieldName) {
             this.stateMapper = new SerializedStateMapper<>(fieldName, new JSONNonTransactionalSerializer());
             return this;
@@ -183,6 +182,7 @@
             return this;
         }
 
+        @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
         public Options<T> withTransactionalJSONBinaryState(String fieldName) {
             this.stateMapper = new SerializedStateMapper<>(fieldName, new JSONTransactionalSerializer());
             return this;
@@ -193,6 +193,7 @@
             return this;
         }
 
+        @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
         public Options<T> withOpaqueJSONBinaryState(String fieldName) {
             this.stateMapper = new SerializedStateMapper<>(fieldName, new JSONOpaqueSerializer());
             return this;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraMapStateFactory.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraMapStateFactory.java
index 5b82a6b..a1a5c4c 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraMapStateFactory.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraMapStateFactory.java
@@ -29,9 +29,8 @@
 /**
  * A StateFactory implementation that creates a MapState backed by CassandraBackingMap.
  *
- * The statefactory supports opaque, transactional and non-transactional configurations.
+ * <p>The statefactory supports opaque, transactional and non-transactional configurations.
  * Optionally, the backing map can be wrapped in a {@link CachedMap} by specifying {@link #withCache} (off by default).
- *
  */
 public class CassandraMapStateFactory implements StateFactory {
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraQuery.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraQuery.java
index 4fafa61..271f9e7 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraQuery.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraQuery.java
@@ -18,9 +18,6 @@
 import org.apache.storm.trident.tuple.TridentTuple;
 import org.apache.storm.tuple.Values;
 
-/**
- *
- */
 public class CassandraQuery extends BaseQueryFunction<CassandraState, List<Values>> {
 
     @Override
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraState.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraState.java
index 9ecae36..c446a18 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraState.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraState.java
@@ -32,9 +32,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-/**
- *
- */
 public class CassandraState implements State {
 
     private static final Logger LOG = LoggerFactory.getLogger(CassandraState.class);
@@ -131,11 +128,13 @@
             this.clientProvider = clientProvider;
         }
 
+        @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
         public Options withCQLStatementTupleMapper(CQLStatementTupleMapper cqlStatementTupleMapper) {
             this.cqlStatementTupleMapper = cqlStatementTupleMapper;
             return this;
         }
 
+        @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
         public Options withCQLResultSetValuesMapper(CQLResultSetValuesMapper cqlResultSetValuesMapper) {
             this.cqlResultSetValuesMapper = cqlResultSetValuesMapper;
             return this;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateFactory.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateFactory.java
index fcdbb1c..f42d6b0 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateFactory.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateFactory.java
@@ -20,9 +20,6 @@
 import org.apache.storm.trident.state.State;
 import org.apache.storm.trident.state.StateFactory;
 
-/**
- *
- */
 public class CassandraStateFactory implements StateFactory {
     private final CassandraState.Options options;
 
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateUpdater.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateUpdater.java
index e4bb380..cf63691 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateUpdater.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/CassandraStateUpdater.java
@@ -17,9 +17,6 @@
 import org.apache.storm.trident.state.BaseStateUpdater;
 import org.apache.storm.trident.tuple.TridentTuple;
 
-/**
- *
- */
 public class CassandraStateUpdater extends BaseStateUpdater<CassandraState> {
 
     @Override
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/MapStateFactoryBuilder.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/MapStateFactoryBuilder.java
index ed8ab71..82122f9 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/MapStateFactoryBuilder.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/MapStateFactoryBuilder.java
@@ -12,6 +12,13 @@
 
 package org.apache.storm.cassandra.trident.state;
 
+import static com.datastax.driver.core.querybuilder.QueryBuilder.bindMarker;
+import static com.datastax.driver.core.querybuilder.QueryBuilder.eq;
+import static com.datastax.driver.core.querybuilder.QueryBuilder.insertInto;
+import static com.datastax.driver.core.querybuilder.QueryBuilder.select;
+import static org.apache.storm.cassandra.DynamicStatementBuilder.all;
+import static org.apache.storm.cassandra.DynamicStatementBuilder.boundQuery;
+
 import com.datastax.driver.core.querybuilder.Insert;
 import com.datastax.driver.core.querybuilder.Select;
 import java.util.ArrayList;
@@ -33,36 +40,29 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static com.datastax.driver.core.querybuilder.QueryBuilder.bindMarker;
-import static com.datastax.driver.core.querybuilder.QueryBuilder.eq;
-import static com.datastax.driver.core.querybuilder.QueryBuilder.insertInto;
-import static com.datastax.driver.core.querybuilder.QueryBuilder.select;
-import static org.apache.storm.cassandra.DynamicStatementBuilder.all;
-import static org.apache.storm.cassandra.DynamicStatementBuilder.boundQuery;
-
 /**
  * A helper for building a MapState backed by Cassandra. It internalizes some common
  * implementation choices to simplify usage.
  *
- * In the simplest use case, a map state can be constructed with:
+ * <p>In the simplest use case, a map state can be constructed with:
  *
- * StateFactory mapState = MapStateFactoryBuilder.opaque()
+ * <p>StateFactory mapState = MapStateFactoryBuilder.opaque()
  *     .withTable("mykeyspace", "year_month_state")
  *     .withKeys("year", "month")
  *     .withJSONBinaryState("state")
  *     .build();
  *
- * for a cassandra table with:
+ * <p>for a cassandra table with:
  * mykeyspace.year_month_state {
  *     year: int,
  *     month: int,
  *     state: blob
  * }
  *
- * This will use the storm JSON serializers to convert the state to and from binary format.
+ * <p>This will use the storm JSON serializers to convert the state to and from binary format.
  * Other binary serializers can be used with the {@link #withBinaryState(String, Serializer)} method.
  *
- * Storing state in explicit fields (e.g. in a field "sum" of type int) is possible by instead calling
+ * <p>Storing state in explicit fields (e.g. in a field "sum" of type int) is possible by instead calling
  * {@link #withStateMapper(StateMapper)}. For instance, you can use {@link NonTransactionalTupleStateMapper},
  * {@link TransactionalTupleStateMapper} or {@link OpaqueTupleStateMapper} if your state values are tuples.
  *
@@ -114,7 +114,7 @@
         return this;
     }
 
-    @SuppressWarnings("unchecked")
+    @SuppressWarnings("checkstyle:AbbreviationAsWordInName")
     public MapStateFactoryBuilder<T> withJSONBinaryState(String stateField) {
         switch (stateType) {
             case OPAQUE:
@@ -215,9 +215,10 @@
             case OPAQUE:
                 return CassandraMapStateFactory.opaque(options, cassandraConfig)
                                                .withCache(cacheSize);
+            default:
+                throw new IllegalArgumentException(String.format("stateType %s not supported",
+                        stateType));
         }
-
-        return null;
     }
 
 }
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/OpaqueTupleStateMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/OpaqueTupleStateMapper.java
index a7e857b..f79f67b 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/OpaqueTupleStateMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/OpaqueTupleStateMapper.java
@@ -80,6 +80,7 @@
         }
         Values values = valuesList.get(0);
         int index = 0;
+        @SuppressWarnings("checkstyle:VariableDeclarationUsageDistance")
         Long currTx = (Long) values.get(index++);
 
         SimpleTuple curr = new SimpleTuple(tupleFields);
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TransactionalTupleStateMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TransactionalTupleStateMapper.java
index 7b3d670..0e45d0f 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TransactionalTupleStateMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TransactionalTupleStateMapper.java
@@ -69,6 +69,7 @@
         }
         Values values = valuesList.get(0);
         int index = 0;
+        @SuppressWarnings("checkstyle:VariableDeclarationUsageDistance")
         Long txId = (Long) values.get(index++);
 
         SimpleTuple curr = new SimpleTuple(tupleFields);
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentAyncCQLResultSetValuesMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentAyncCQLResultSetValuesMapper.java
index 0159eef..ad613c5 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentAyncCQLResultSetValuesMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentAyncCQLResultSetValuesMapper.java
@@ -33,6 +33,7 @@
 /**
  * A result set mapper implementation which runs requests in parallel and waits for them all to finish.
  */
+@SuppressWarnings("checkstyle:AbbreviationAsWordInName")
 public class TridentAyncCQLResultSetValuesMapper implements AyncCQLResultSetValuesMapper {
     private final Fields outputDeclaredFields;
     private final Semaphore throttle;
diff --git a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentResultSetValuesMapper.java b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentResultSetValuesMapper.java
index cd34362..62b4d97 100644
--- a/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentResultSetValuesMapper.java
+++ b/external/storm-cassandra/src/main/java/org/apache/storm/cassandra/trident/state/TridentResultSetValuesMapper.java
@@ -24,9 +24,6 @@
 import org.apache.storm.tuple.ITuple;
 import org.apache.storm.tuple.Values;
 
-/**
- *
- */
 public class TridentResultSetValuesMapper implements CQLResultSetValuesMapper {
     private Fields outputDeclaredFields;
 
diff --git a/external/storm-eventhubs/pom.xml b/external/storm-eventhubs/pom.xml
index a6a1cef..8dec463 100755
--- a/external/storm-eventhubs/pom.xml
+++ b/external/storm-eventhubs/pom.xml
@@ -53,7 +53,7 @@
                 <artifactId>maven-checkstyle-plugin</artifactId>
                 <!--Note - the version would be inherited-->
                 <configuration>
-                    <maxAllowedViolations>45</maxAllowedViolations>
+                    <maxAllowedViolations>0</maxAllowedViolations>
                 </configuration>
             </plugin>
             <plugin>
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/EventHubBolt.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/EventHubBolt.java
index 85ffd03..4c1e068 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/EventHubBolt.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/EventHubBolt.java
@@ -18,7 +18,6 @@
 
 package org.apache.storm.eventhubs.bolt;
 
-
 import com.microsoft.azure.eventhubs.EventData;
 import com.microsoft.azure.eventhubs.EventHubClient;
 import com.microsoft.azure.eventhubs.PartitionSender;
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/IEventDataFormat.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/IEventDataFormat.java
index ec09460..08f8e63 100644
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/IEventDataFormat.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/bolt/IEventDataFormat.java
@@ -22,7 +22,7 @@
 import org.apache.storm.tuple.Tuple;
 
 /**
- * Serialize a tuple to a byte array to be sent to EventHubs
+ * Serialize a tuple to a byte array to be sent to EventHubs.
  */
 public interface IEventDataFormat extends Serializable {
     public byte[] serialize(Tuple tuple);
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/BinaryEventDataScheme.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/BinaryEventDataScheme.java
index 4c8e0a2..40db61a 100644
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/BinaryEventDataScheme.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/BinaryEventDataScheme.java
@@ -31,7 +31,7 @@
 /**
  * An Event Data Scheme which deserializes message payload into the raw bytes.
  *
- * The resulting tuple would contain three items, the first being the message bytes, and the second a map of properties that include
+ * <p>The resulting tuple would contain three items, the first being the message bytes, and the second a map of properties that include
  * metadata, which can be used to determine who processes the message, and how it is processed.The third is the system properties which
  * exposes information like enqueue-time, offset and sequence number
  */
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataScheme.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataScheme.java
index 9bd0c22..40b8034 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataScheme.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataScheme.java
@@ -30,10 +30,10 @@
  * An Event Data Scheme which deserializes message payload into the Strings. No encoding is assumed. The receiver will need to handle
  * parsing of the string data in appropriate encoding.
  *
- * The resulting tuple would contain two items: the the message string, and a map of properties that include metadata, which can be used to
- * determine who processes the message, and how it is processed.
+ * <p>The resulting tuple would contain two items: the the message string, and a map of properties that include
+ * metadata, which can be used to determine who processes the message, and how it is processed.
  *
- * For passing the raw bytes of a messsage to Bolts, refer to {@link BinaryEventDataScheme}.
+ * <p>For passing the raw bytes of a messsage to Bolts, refer to {@link BinaryEventDataScheme}.
  */
 public class EventDataScheme implements IEventDataScheme {
 
@@ -46,9 +46,8 @@
         String messageData = "";
         if (eventData.getBytes() != null) {
             messageData = new String(eventData.getBytes());
-        }
-        /*Will only serialize AMQPValue type*/
-        else if (eventData.getObject() != null) {
+        } else if (eventData.getObject() != null) {
+            /*Will only serialize AMQPValue type*/
             try {
                 if (!(eventData.getObject() instanceof List)) {
                     messageData = eventData.getObject().toString();
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataWrap.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataWrap.java
index fc23c05..a331c69 100644
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataWrap.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventDataWrap.java
@@ -43,7 +43,6 @@
 
     @Override
     public int compareTo(EventDataWrap ed) {
-        return messageId.getSequenceNumber().
-            compareTo(ed.getMessageId().getSequenceNumber());
+        return messageId.getSequenceNumber().compareTo(ed.getMessageId().getSequenceNumber());
     }
 }
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubReceiverImpl.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubReceiverImpl.java
index c5d6303..392adaf 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubReceiverImpl.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubReceiverImpl.java
@@ -61,9 +61,10 @@
 
     @Override
     public void open(IEventFilter filter) throws EventHubException {
-        logger.info("creating eventhub receiver: partitionId=" + partitionId +
-                    ", filter=" + filter.getOffset() != null ?
-                        filter.getOffset() : Long.toString(filter.getTime().toEpochMilli()));
+        logger.info("creating eventhub receiver: partitionId=" + partitionId
+                + ", filter=" + filter.getOffset() != null
+                ? filter.getOffset()
+                : Long.toString(filter.getTime().toEpochMilli()));
         long start = System.currentTimeMillis();
         try {
             ehClient = EventHubClient.createFromConnectionStringSync(connectionString);
@@ -82,8 +83,7 @@
                     filter.getTime(),
                     1);
             } else {
-                throw new RuntimeException("Eventhub receiver must have " +
-                                           "an offset or time to be created");
+                throw new RuntimeException("Eventhub receiver must have an offset or time to be created");
             }
             if (receiver != null) {
                 receiver.setReceiveTimeout(Duration.ofMillis(receiverTimeoutInMillis));
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubSpout.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubSpout.java
index f8e144e..ea12d09 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubSpout.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/EventHubSpout.java
@@ -94,13 +94,7 @@
     }
 
     /**
-     * This is a extracted method that is easy to test
-     *
-     * @param config
-     * @param totalTasks
-     * @param taskIndex
-     * @param collector
-     * @throws Exception
+     * This is a extracted method that is easy to test.
      */
     public void preparePartitions(Map<String, Object> config, int totalTasks, int taskIndex, SpoutOutputCollector collector) throws
         Exception {
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IEventHubReceiverFactory.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IEventHubReceiverFactory.java
index d25ebdd..ef4d9b8 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IEventHubReceiverFactory.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IEventHubReceiverFactory.java
@@ -18,7 +18,7 @@
 import java.io.Serializable;
 
 /**
- * An abstract factory to generate EventHubReceiver
+ * An abstract factory to generate EventHubReceiver.
  */
 public interface IEventHubReceiverFactory extends Serializable {
     IEventHubReceiver create(EventHubSpoutConfig config, String partitionId);
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IPartitionManagerFactory.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IPartitionManagerFactory.java
index c3ce2ec..4a87b86 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IPartitionManagerFactory.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/IPartitionManagerFactory.java
@@ -18,7 +18,7 @@
 import java.io.Serializable;
 
 /**
- * An interface of factory method to create IPartitionManager
+ * An interface of factory method to create IPartitionManager.
  */
 public interface IPartitionManagerFactory extends Serializable {
     IPartitionManager create(EventHubSpoutConfig spoutConfig,
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/SimplePartitionManager.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/SimplePartitionManager.java
index 436aabe..3c98735 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/SimplePartitionManager.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/SimplePartitionManager.java
@@ -24,7 +24,7 @@
 import org.slf4j.LoggerFactory;
 
 /**
- * A simple partition manager that does not re-send failed messages
+ * A simple partition manager that does not re-send failed messages.
  */
 public class SimplePartitionManager implements IPartitionManager {
     private static final Logger logger = LoggerFactory.getLogger(SimplePartitionManager.class);
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/StringEventDataScheme.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/StringEventDataScheme.java
index 5b6a2b9..3bd1ba8 100644
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/StringEventDataScheme.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/spout/StringEventDataScheme.java
@@ -29,9 +29,9 @@
  * An Event Data Scheme which deserializes message payload into the Strings. No encoding is assumed. The receiver will need to handle
  * parsing of the string data in appropriate encoding.
  *
- * Note: Unlike other schemes provided, this scheme does not include any metadata.
+ * <p>Note: Unlike other schemes provided, this scheme does not include any metadata.
  *
- * For metadata please refer to {@link BinaryEventDataScheme}, {@link EventDataScheme}
+ * <p>For metadata please refer to {@link BinaryEventDataScheme}, {@link EventDataScheme}
  */
 public class StringEventDataScheme implements IEventDataScheme {
 
@@ -44,9 +44,8 @@
         String messageData = "";
         if (eventData.getBytes() != null) {
             messageData = new String(eventData.getBytes());
-        }
-        /*Will only serialize AMQPValue type*/
-        else if (eventData.getObject() != null) {
+        } else if (eventData.getObject() != null) {
+            /*Will only serialize AMQPValue type*/
             try {
                 if (!(eventData.getObject() instanceof List)) {
                     messageData = eventData.getObject().toString();
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/ITridentPartitionManager.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/ITridentPartitionManager.java
index 922e334..e1abdba 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/ITridentPartitionManager.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/ITridentPartitionManager.java
@@ -27,7 +27,7 @@
     void close();
 
     /**
-     * receive a batch of messages from EvenHub up to "count" messages
+     * receive a batch of messages from EvenHub up to "count" messages.
      *
      * @param offset the starting offset
      * @param count  max number of messages in this batch
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubEmitter.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubEmitter.java
index 4020b21..2e88373 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubEmitter.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubEmitter.java
@@ -27,7 +27,7 @@
 import org.apache.storm.trident.topology.TransactionAttempt;
 
 /**
- * A thin wrapper of TransactionalTridentEventHubEmitter for OpaqueTridentEventHubSpout
+ * A thin wrapper of TransactionalTridentEventHubEmitter for OpaqueTridentEventHubSpout.
  */
 public class OpaqueTridentEventHubEmitter implements IOpaquePartitionedTridentSpout.Emitter<Partitions, Partition, Map> {
     private final TransactionalTridentEventHubEmitter transactionalEmitter;
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubSpout.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubSpout.java
index f062201..d8fb338 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubSpout.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/OpaqueTridentEventHubSpout.java
@@ -26,7 +26,7 @@
 import org.apache.storm.tuple.Fields;
 
 /**
- * Opaque Trident EventHubs Spout
+ * Opaque Trident EventHubs Spout.
  */
 public class OpaqueTridentEventHubSpout implements IOpaquePartitionedTridentSpout<Partitions, Partition, Map> {
     private static final long serialVersionUID = 1L;
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/Partition.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/Partition.java
index 44d9d23..c47e616 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/Partition.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/Partition.java
@@ -23,7 +23,7 @@
 import org.apache.storm.trident.spout.ISpoutPartition;
 
 /**
- * Represents an EventHub partition
+ * Represents an EventHub partition.
  */
 public class Partition implements ISpoutPartition, Serializable {
     private static final long serialVersionUID = 1L;
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubEmitter.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubEmitter.java
index 8bd7bb1..2321617 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubEmitter.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubEmitter.java
@@ -85,8 +85,6 @@
 
     /**
      * Check if partition manager for a given partiton is created if not, create it.
-     *
-     * @param partition
      */
     private ITridentPartitionManager getOrCreatePartitionManager(Partition partition) {
         ITridentPartitionManager pm;
diff --git a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubSpout.java b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubSpout.java
index a2eb73d..e9a22e7 100755
--- a/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubSpout.java
+++ b/external/storm-eventhubs/src/main/java/org/apache/storm/eventhubs/trident/TransactionalTridentEventHubSpout.java
@@ -26,7 +26,7 @@
 import org.apache.storm.tuple.Fields;
 
 /**
- * Transactional Trident EventHub Spout
+ * Transactional Trident EventHub Spout.
  */
 public class TransactionalTridentEventHubSpout implements
                                                IPartitionedTridentSpout<Partitions, Partition, Map<String, Object>> {
diff --git a/external/storm-solr/pom.xml b/external/storm-solr/pom.xml
index 14af06e..5deee1c 100644
--- a/external/storm-solr/pom.xml
+++ b/external/storm-solr/pom.xml
@@ -124,7 +124,7 @@
                 <artifactId>maven-checkstyle-plugin</artifactId>
                 <!--Note - the version would be inherited-->
                 <configuration>
-                    <maxAllowedViolations>47</maxAllowedViolations>
+                    <maxAllowedViolations>0</maxAllowedViolations>
                 </configuration>
             </plugin>
             <plugin>
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/bolt/SolrUpdateBolt.java b/external/storm-solr/src/main/java/org/apache/storm/solr/bolt/SolrUpdateBolt.java
index f6c7c60..6706634 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/bolt/SolrUpdateBolt.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/bolt/SolrUpdateBolt.java
@@ -60,9 +60,9 @@
         this.solrConfig = solrConfig;
         this.solrMapper = solrMapper;
         this.commitStgy = commitStgy;
-        LOG.debug("Created {} with the following configuration: " +
-                  "[SolrConfig = {}], [SolrMapper = {}], [CommitStgy = {}]",
-                  this.getClass().getSimpleName(), solrConfig, solrMapper, commitStgy);
+        LOG.debug("Created {} with the following configuration: "
+                        + "[SolrConfig = {}], [SolrMapper = {}], [CommitStgy = {}]",
+                this.getClass().getSimpleName(), solrConfig, solrMapper, commitStgy);
     }
 
     @Override
@@ -78,9 +78,9 @@
 
     private int capacity() {
         final int defArrListCpcty = 10;
-        return (commitStgy instanceof CountBasedCommit) ?
-            ((CountBasedCommit) commitStgy).getThreshold() :
-            defArrListCpcty;
+        return (commitStgy instanceof CountBasedCommit)
+                ? ((CountBasedCommit) commitStgy).getThreshold()
+                : defArrListCpcty;
     }
 
     @Override
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/config/CountBasedCommit.java b/external/storm-solr/src/main/java/org/apache/storm/solr/config/CountBasedCommit.java
index 1a9c967..3fdcc89 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/config/CountBasedCommit.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/config/CountBasedCommit.java
@@ -21,7 +21,7 @@
     private int count;
 
     /**
-     * Initializes a count based commit strategy with the specified threshold
+     * Initializes a count based commit strategy with the specified threshold.
      *
      * @param threshold  The commit threshold, defining when SolrInputDocuments should be committed to Solr
      * */
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/config/SolrConfig.java b/external/storm-solr/src/main/java/org/apache/storm/solr/config/SolrConfig.java
index bd05e9d..8b36bf2 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/config/SolrConfig.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/config/SolrConfig.java
@@ -25,6 +25,7 @@
     private final boolean enableKerberos;
 
     /**
+     * Constructor.
      * @param zkHostString Zookeeper host string as defined in the {@link CloudSolrClient} constructor
      * */
     public SolrConfig(String zkHostString) {
@@ -32,6 +33,7 @@
     }
 
     /**
+     * Constructor.
      * @param zkHostString Zookeeper host string as defined in the {@link CloudSolrClient} constructor
      * @param tickTupleInterval interval for tick tuples
      * */
@@ -40,6 +42,7 @@
     }
 
     /**
+     * Constructor.
      * @param zkHostString Zookeeper host string as defined in the {@link CloudSolrClient} constructor
      * @param tickTupleInterval interval for tick tuples
      * @param enableKerberos true to enable kerberos else false
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrFieldsMapper.java b/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrFieldsMapper.java
index 1ff7aa3..f8950c0 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrFieldsMapper.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrFieldsMapper.java
@@ -12,6 +12,8 @@
 
 package org.apache.storm.solr.mapper;
 
+import static org.apache.storm.solr.schema.SolrFieldTypeFinder.FieldTypeWrapper;
+
 import java.util.LinkedList;
 import java.util.List;
 import org.apache.solr.client.solrj.SolrClient;
@@ -27,8 +29,6 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import static org.apache.storm.solr.schema.SolrFieldTypeFinder.FieldTypeWrapper;
-
 public class SolrFieldsMapper implements SolrMapper {
     private static final Logger log = LoggerFactory.getLogger(SolrFieldsMapper.class);
     private String collection;
@@ -119,11 +119,11 @@
 
     @Override
     public String toString() {
-        return "SolrFieldsMapper{" +
-               "collection='" + collection + '\'' +
-               ", typeFinder=" + typeFinder +
-               ", multiValueFieldToken='" + multiValueFieldToken + '\'' +
-               '}';
+        return "SolrFieldsMapper{"
+                + "collection='" + collection + '\''
+                + ", typeFinder=" + typeFinder
+                + ", multiValueFieldToken='" + multiValueFieldToken + '\''
+                + '}';
     }
 
     public static class Builder {
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrJsonMapper.java b/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrJsonMapper.java
index 2334a0e..3141b4b 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrJsonMapper.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/mapper/SolrJsonMapper.java
@@ -44,9 +44,7 @@
     }
 
     /**
-     *
-     * @param tuples
-     * @return
+     * Maps to Solr request.
      * @throws SolrMapperException if the tuple does not contain the
      */
     @Override
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/CopyField.java b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/CopyField.java
index 2281c8f..1ecffe6 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/CopyField.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/CopyField.java
@@ -36,9 +36,9 @@
 
     @Override
     public String toString() {
-        return "CopyField{" +
-               "source='" + source + '\'' +
-               ", dest='" + dest + '\'' +
-               '}';
+        return "CopyField{"
+                + "source='" + source + '\''
+                + ", dest='" + dest + '\''
+                + '}';
     }
 }
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Field.java b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Field.java
index 43b0110..dffca2a 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Field.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Field.java
@@ -36,9 +36,9 @@
 
     @Override
     public String toString() {
-        return "Field{" +
-               "name='" + name + '\'' +
-               ", type='" + type + '\'' +
-               '}';
+        return "Field{"
+                + "name='" + name + '\''
+                + ", type='" + type + '\''
+                + '}';
     }
 }
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/FieldType.java b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/FieldType.java
index 240af4b..94282b4 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/FieldType.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/FieldType.java
@@ -47,10 +47,10 @@
 
     @Override
     public String toString() {
-        return "FieldType{" +
-               "name='" + name + '\'' +
-               ", clazz='" + clazz + '\'' +
-               ", multiValued=" + multiValued +
-               '}';
+        return "FieldType{"
+                + "name='" + name + '\''
+                + ", clazz='" + clazz + '\''
+                + ", multiValued=" + multiValued
+                + '}';
     }
 }
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Schema.java b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Schema.java
index d9cdbac..c32549b 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Schema.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/Schema.java
@@ -88,15 +88,15 @@
 
     @Override
     public String toString() {
-        return "Schema{" +
-               "name='" + name + '\'' +
-               ", version='" + version + '\'' +
-               ", uniqueKey='" + uniqueKey + '\'' +
-               ", fieldTypes=" + fieldTypes +
-               ", fields=" + fields +
-               ", dynamicFields=" + dynamicFields +
-               ", copyFields=" + copyFields +
-               '}';
+        return "Schema{"
+                + "name='" + name + '\''
+                + ", version='" + version + '\''
+                + ", uniqueKey='" + uniqueKey + '\''
+                + ", fieldTypes=" + fieldTypes
+                + ", fields=" + fields
+                + ", dynamicFields=" + dynamicFields
+                + ", copyFields=" + copyFields
+                + '}';
     }
 
     // Wrapper class handy for the client code to use the JSON parser to build to use with JSON parser
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/SolrFieldTypeFinder.java b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/SolrFieldTypeFinder.java
index aa6802e..1e8f547 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/SolrFieldTypeFinder.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/SolrFieldTypeFinder.java
@@ -157,7 +157,7 @@
     }
 
     /**
-     * Class wrapping all the information for fields and types
+     * Class wrapping all the information for fields and types.
      * */
     public static class FieldTypeWrapper implements Serializable {
         Field field;
@@ -178,10 +178,10 @@
 
         @Override
         public String toString() {
-            return "FieldTypeWrapper{" +
-                   "field=" + field +
-                   ", type=" + type +
-                   '}';
+            return "FieldTypeWrapper{"
+                    + "field=" + field
+                    + ", type=" + type
+                    + '}';
         }
     }
 }
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilder.java b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilder.java
index 405429c..072775c 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilder.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilder.java
@@ -32,7 +32,9 @@
     private Schema schema;
 
 
-    /** Urls with the form http://localhost:8983/solr/gettingstarted/schema/ returns the schema in JSON format */
+    /**
+     * Urls with the form http://localhost:8983/solr/gettingstarted/schema/ returns the schema in JSON format.
+     */
     public RestJsonSchemaBuilder(String solrHost, String solrPort, String collection) throws IOException {
         this(new URL("http://" + solrHost + ":" + solrPort + "/solr/" + collection + "/schema/"));
     }
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilderV2.java b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilderV2.java
index aff8550..ac5f950 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilderV2.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/schema/builder/RestJsonSchemaBuilderV2.java
@@ -32,7 +32,7 @@
 import org.slf4j.LoggerFactory;
 
 /**
- * Class that builds the {@link Schema} object from the schema returned by the SchemaRequest
+ * Class that builds the {@link Schema} object from the schema returned by the SchemaRequest.
  */
 public class RestJsonSchemaBuilderV2 implements SchemaBuilder {
     private static final Logger logger = LoggerFactory.getLogger(RestJsonSchemaBuilderV2.class);
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrState.java b/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrState.java
index 14b5717..46fafff 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrState.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrState.java
@@ -47,10 +47,10 @@
     }
 
     @Override
-    public void beginCommit(Long aLong) { }
+    public void beginCommit(Long someLong) { }
 
     @Override
-    public void commit(Long aLong) { }
+    public void commit(Long someLong) { }
 
     public void updateState(List<TridentTuple> tuples) {
         try {
diff --git a/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrStateFactory.java b/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrStateFactory.java
index b0ae4a2..8582bca 100644
--- a/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrStateFactory.java
+++ b/external/storm-solr/src/main/java/org/apache/storm/solr/trident/SolrStateFactory.java
@@ -29,7 +29,7 @@
     }
 
     @Override
-    public State makeState(Map<String, Object> map, IMetricsContext iMetricsContext, int partitionIndex, int numPartitions) {
+    public State makeState(Map<String, Object> map, IMetricsContext metricsContext, int partitionIndex, int numPartitions) {
         SolrState state = new SolrState(solrConfig, solrMapper);
         state.prepare();
         return state;