fix code warnings - closes apache/incubator-pirk#62
diff --git a/src/main/java/org/apache/pirk/benchmark/BenchmarkDriver.java b/src/main/java/org/apache/pirk/benchmark/BenchmarkDriver.java
index d953bb4..a24d0da 100644
--- a/src/main/java/org/apache/pirk/benchmark/BenchmarkDriver.java
+++ b/src/main/java/org/apache/pirk/benchmark/BenchmarkDriver.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 
 package org.apache.pirk.benchmark;
 
diff --git a/src/main/java/org/apache/pirk/encryption/Paillier.java b/src/main/java/org/apache/pirk/encryption/Paillier.java
index 6c62fe8..bd5f992 100644
--- a/src/main/java/org/apache/pirk/encryption/Paillier.java
+++ b/src/main/java/org/apache/pirk/encryption/Paillier.java
@@ -95,15 +95,15 @@
     }
   }
 
-  BigInteger p = null; // large prime
-  BigInteger q = null; // large prime
-  BigInteger N = null; // N=pq, RSA modulus
+  private BigInteger p = null; // large prime
+  private BigInteger q = null; // large prime
+  private BigInteger N = null; // N=pq, RSA modulus
 
-  BigInteger NSquared = null; // NSquared = N^2
-  BigInteger lambdaN = null; // lambda(N) = lcm(p-1,q-1), Carmichael function of N
-  BigInteger w = null; // lambda(N)^-1 mod N
+  private BigInteger NSquared = null; // NSquared = N^2
+  private BigInteger lambdaN = null; // lambda(N) = lcm(p-1,q-1), Carmichael function of N
+  private BigInteger w = null; // lambda(N)^-1 mod N
 
-  int bitLength = 0; // bit length of the modulus N
+  private int bitLength = 0; // bit length of the modulus N
 
   /**
    * Constructor with all parameters p,q, and bitLengthInput specified
@@ -139,7 +139,7 @@
    * <p>
    * The probability that the new BigInteger values represents primes will exceed (1 - (1/2)^certainty). The execution time of this constructor is proportional
    * to the value of this parameter.
-   * 
+   *
    */
   public Paillier(int bitLengthInput, int certainty) throws PIRException
   {
@@ -153,7 +153,7 @@
    * <p>
    * The probability that the new BigInteger values represents primes will exceed (1 - (1/2)^certainty). The execution time of this constructor is proportional
    * to the value of this parameter.
-   * 
+   *
    */
   public Paillier(int bitLengthInput, int certainty, int ensureBitSet) throws PIRException
   {
@@ -250,7 +250,7 @@
 
   /**
    * Encrypt - generate r
-   * 
+   *
    */
   public BigInteger encrypt(BigInteger m) throws PIRException
   {
@@ -266,7 +266,7 @@
 
   /**
    * Encrypt - use provided r
-   * 
+   *
    */
   public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException
   {
diff --git a/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java b/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java
index 1445432..59256ec 100644
--- a/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java
+++ b/src/main/java/org/apache/pirk/inputformat/hadoop/BaseInputFormat.java
@@ -23,7 +23,7 @@
 /**
  * All input formats used must extend this class
  * <p>
- * MapWritable{@code<K,V>} should be of the form MapWritable{@code<Text,Writable>}
+ * MapWritable{@code <K,V>} should be of the form MapWritable{@code <Text,Writable>}
  * <p>
  * If V is an array type, it must be of the form ArrayWritable
  *
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
index 52d3c54..2beed14 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.querier.wideskies;
 
 import java.io.Serializable;
@@ -28,7 +28,6 @@
 
 /**
  * Class to hold the information necessary for the PIR querier to perform decryption
- * 
  */
 public class Querier implements Serializable, Storable
 {
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java
index d703737..23993cc 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierProps.java
@@ -35,30 +35,30 @@
   private static final Logger logger = LoggerFactory.getLogger(QuerierProps.class);
 
   // General properties
-  public static final String ACTION = "querier.action";
-  public static final String INPUTFILE = "querier.inputFile";
+  static final String ACTION = "querier.action";
+  static final String INPUTFILE = "querier.inputFile";
   public static final String OUTPUTFILE = "querier.outputFile";
-  public static final String QUERYTYPE = "querier.queryType";
-  public static final String NUMTHREADS = "querier.numThreads";
+  static final String QUERYTYPE = "querier.queryType";
+  static final String NUMTHREADS = "querier.numThreads";
 
   // Encryption properties
-  public static final String HASHBITSIZE = "querier.hashBitSize";
-  public static final String HASHKEY = "querier.hashKey";
-  public static final String DATAPARTITIONSIZE = "querier.dataPartitionBitSize";
-  public static final String PAILLIERBITSIZE = "querier.paillierBitSize";
-  public static final String BITSET = "querier.bitSet";
-  public static final String CERTAINTY = "querier.certainty";
-  public static final String QUERYSCHEMAS = "querier.querySchemas";
-  public static final String DATASCHEMAS = "querier.dataSchemas";
-  public static final String EMBEDSELECTOR = "querier.embedSelector";
-  public static final String USEMEMLOOKUPTABLE = "querier.memLookupTable";
-  public static final String USEHDFSLOOKUPTABLE = "querier.useHDFSLookupTable";
-  public static final String SR_ALGORITHM = "pallier.secureRandom.algorithm";
-  public static final String SR_PROVIDER = "pallier.secureRandom.provider";
-  public static final String EMBEDQUERYSCHEMA = "pir.embedQuerySchema";
+  static final String HASHBITSIZE = "querier.hashBitSize";
+  static final String HASHKEY = "querier.hashKey";
+  static final String DATAPARTITIONSIZE = "querier.dataPartitionBitSize";
+  static final String PAILLIERBITSIZE = "querier.paillierBitSize";
+  static final String BITSET = "querier.bitSet";
+  static final String CERTAINTY = "querier.certainty";
+  static final String QUERYSCHEMAS = "querier.querySchemas";
+  static final String DATASCHEMAS = "querier.dataSchemas";
+  static final String EMBEDSELECTOR = "querier.embedSelector";
+  static final String USEMEMLOOKUPTABLE = "querier.memLookupTable";
+  static final String USEHDFSLOOKUPTABLE = "querier.useHDFSLookupTable";
+  static final String SR_ALGORITHM = "pallier.secureRandom.algorithm";
+  static final String SR_PROVIDER = "pallier.secureRandom.provider";
+  static final String EMBEDQUERYSCHEMA = "pir.embedQuerySchema";
 
   // Decryption properties
-  public static final String QUERIERFILE = "querier.querierFile";
+  static final String QUERIERFILE = "querier.querierFile";
 
   static final List<String> PROPSLIST = Arrays.asList(ACTION, INPUTFILE, OUTPUTFILE, QUERYTYPE, NUMTHREADS, EMBEDQUERYSCHEMA, HASHBITSIZE, HASHKEY,
       DATAPARTITIONSIZE, PAILLIERBITSIZE, BITSET, CERTAINTY, QUERYSCHEMAS, DATASCHEMAS, EMBEDSELECTOR, USEMEMLOOKUPTABLE, USEHDFSLOOKUPTABLE, SR_ALGORITHM,
@@ -66,7 +66,7 @@
 
   /**
    * Validates the querier properties
-   * 
+   *
    */
   public static boolean validateQuerierProperties()
   {
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
index f4852f8..1ce62ec 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/decrypt/DecryptResponse.java
@@ -43,7 +43,6 @@
 
 /**
  * Class to perform PIR decryption
- * 
  */
 public class DecryptResponse
 {
@@ -81,7 +80,7 @@
    * D^k_r = D^k_r,0 || D^k_r,1 || ... || D^k_r,(numPartitionsPerDataElement - 1)
    * <p>
    * where D^k_r,l = Y_{r*numPartitionsPerDataElement + l} & (2^{r*numPartitionsPerDataElement} * (2^numBitsPerDataElement - 1))
-   * 
+   *
    */
   public void decrypt(int numThreads) throws InterruptedException, PIRException
   {
@@ -104,7 +103,7 @@
     BigInteger twoBI = BigInteger.valueOf(2);
     for (String selector : selectors)
     {
-      resultMap.put(selector, new ArrayList<QueryResponseJSON>());
+      resultMap.put(selector, new ArrayList<>());
 
       // 2^{selectorNum*dataPartitionBitSize}(2^{dataPartitionBitSize} - 1)
       BigInteger mask = twoBI.pow(selectorNum * dataPartitionBitSize).multiply((twoBI.pow(dataPartitionBitSize).subtract(BigInteger.ONE)));
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java
index 565560b..c705c5c 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/encrypt/EncryptQueryRunnable.java
@@ -29,7 +29,6 @@
 
 /**
  * Runnable class for multithreaded PIR encryption
- *
  */
 public class EncryptQueryRunnable implements Runnable
 {
@@ -55,7 +54,7 @@
     start = startInput;
     stop = stopInput;
 
-    encryptedValues = new TreeMap<Integer,BigInteger>();
+    encryptedValues = new TreeMap<>();
   }
 
   /**
diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
index 6a3bb15..362d26f 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
@@ -31,7 +31,6 @@
  * <p>
  * Note that the hash key is specific to the query. If we have hash collisions over our selector set, we will append integers to the key starting with 0 until
  * we no longer have collisions
- * 
  */
 public class QueryInfo implements Serializable, Cloneable
 {
@@ -61,7 +60,7 @@
   // false positive rate for variable length selectors and a zero false positive rate
   // for selectors of fixed size < 32 bits
 
-  QuerySchema qSchema = null;
+  private QuerySchema qSchema = null;
 
   public QueryInfo(int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput,
       boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput)
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java b/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java
index 6a92f63..273aaae 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/ResponderCLI.java
@@ -44,7 +44,7 @@
 
   /**
    * Create and parse allowable options
-   * 
+   *
    */
   public ResponderCLI(String[] args)
   {
@@ -80,7 +80,7 @@
 
   /**
    * Determine if an option was provided by the user via the CLI
-   * 
+   *
    * @param option
    *          - the option of interest
    * @return true if option was provided, false otherwise
@@ -92,7 +92,7 @@
 
   /**
    * Obtain the argument of the option provided by the user via the CLI
-   * 
+   *
    * @param option
    *          - the option of interest
    * @return value of the argument of the option
@@ -104,12 +104,12 @@
 
   /**
    * Method to parse and validate the options provided
-   * 
+   *
    * @return - true if valid, false otherwise
    */
   private boolean parseOptions()
   {
-    boolean valid = true;
+    boolean valid;
 
     // If we have a local.querier.properties file specified, load it
     if (hasOption(LOCALPROPFILE))
@@ -136,7 +136,7 @@
 
   /**
    * Create the options available for the DistributedTestDriver
-   * 
+   *
    * @return Apache's CLI Options object
    */
   private Options createOptions()
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
index dd20f87..c9ed966 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/mapreduce/HashSelectorsAndPartitionDataMapper.java
@@ -133,7 +133,7 @@
     if (passFilter)
     {
       // Extract the selector, compute the hash, and partition the data element according to query type
-      Tuple2<Integer,BytesArrayWritable> returnTuple = null;
+      Tuple2<Integer,BytesArrayWritable> returnTuple;
       try
       {
         returnTuple = HashSelectorAndPartitionData.hashSelectorAndFormPartitions(value, qSchema, dSchema, queryInfo);
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
index 4a98e69..2de4a2a 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/ComputeResponse.java
@@ -222,18 +222,17 @@
   {
     logger.info("Performing query: ");
 
-    JavaRDD<MapWritable> inputRDD = null;
-    if (dataInputFormat.equals(InputFormatConst.BASE_FORMAT))
+    JavaRDD<MapWritable> inputRDD;
+    switch (dataInputFormat)
     {
-      inputRDD = readData();
-    }
-    else if (dataInputFormat.equals(InputFormatConst.ES))
-    {
-      inputRDD = readDataES();
-    }
-    else
-    {
-      throw new PIRException("Unknown data input format " + dataInputFormat);
+      case InputFormatConst.BASE_FORMAT:
+        inputRDD = readData();
+        break;
+      case InputFormatConst.ES:
+        inputRDD = readDataES();
+        break;
+      default:
+        throw new PIRException("Unknown data input format " + dataInputFormat);
     }
 
     performQuery(inputRDD);
@@ -243,7 +242,7 @@
    * Method to read in the data from an allowed input format, filter, and return a RDD of MapWritable data elements
    */
   @SuppressWarnings("unchecked")
-  public JavaRDD<MapWritable> readData() throws ClassNotFoundException, Exception
+  public JavaRDD<MapWritable> readData() throws Exception
   {
     logger.info("Reading data ");
 
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java
index cc0a5ac..0e860dd 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalc.java
@@ -48,7 +48,6 @@
   private static final Logger logger = LoggerFactory.getLogger(EncRowCalc.class);
 
   private Accumulators accum = null;
-  private BroadcastVars bVars = null;
 
   private Query query = null;
   private QueryInfo queryInfo = null;
@@ -60,16 +59,15 @@
   public EncRowCalc(Accumulators accumIn, BroadcastVars bvIn)
   {
     accum = accumIn;
-    bVars = bvIn;
 
-    query = bVars.getQuery();
-    queryInfo = bVars.getQueryInfo();
-    if (bVars.getUseLocalCache().equals("true"))
+    query = bvIn.getQuery();
+    queryInfo = bvIn.getQueryInfo();
+    if (bvIn.getUseLocalCache().equals("true"))
     {
       useLocalCache = true;
     }
-    limitHitsPerSelector = bVars.getLimitHitsPerSelector();
-    maxHitsPerSelector = bVars.getMaxHitsPerSelector();
+    limitHitsPerSelector = bvIn.getLimitHitsPerSelector();
+    maxHitsPerSelector = bvIn.getMaxHitsPerSelector();
 
     logger.info("Initialized EncRowCalc - limitHitsPerSelector = " + limitHitsPerSelector + " maxHitsPerSelector = " + maxHitsPerSelector);
   }
@@ -84,7 +82,7 @@
 
     if (queryInfo.getUseHDFSExpLookupTable())
     {
-      FileSystem fs = null;
+      FileSystem fs;
       try
       {
         fs = FileSystem.get(new Configuration());
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java
index 4d11ef7..c7610f8 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/EncRowCalcPrecomputedCache.java
@@ -41,7 +41,6 @@
   private static final Logger logger = LoggerFactory.getLogger(EncRowCalcPrecomputedCache.class);
 
   private Accumulators accum = null;
-  private BroadcastVars bVars = null;
 
   Query query = null;
 
@@ -53,12 +52,11 @@
   public EncRowCalcPrecomputedCache(Accumulators accumIn, BroadcastVars bvIn)
   {
     accum = accumIn;
-    bVars = bvIn;
 
-    query = bVars.getQuery();
+    query = bvIn.getQuery();
 
-    limitHitsPerSelector = bVars.getLimitHitsPerSelector();
-    maxHitsPerSelector = bVars.getMaxHitsPerSelector();
+    limitHitsPerSelector = bvIn.getLimitHitsPerSelector();
+    maxHitsPerSelector = bvIn.getMaxHitsPerSelector();
 
     expTable = new HashMap<>();
 
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/ExpKeyFilenameMap.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/ExpKeyFilenameMap.java
index 9e505b4..cb95b5f 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/spark/ExpKeyFilenameMap.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/ExpKeyFilenameMap.java
@@ -53,7 +53,7 @@
   @Override
   public Iterable<Tuple2<Integer,String>> call(Iterator<Tuple2<Integer,Iterable<Tuple2<Integer,BigInteger>>>> iter) throws Exception
   {
-    ArrayList<Tuple2<Integer,String>> keyFileList = new ArrayList<Tuple2<Integer,String>>();
+    ArrayList<Tuple2<Integer,String>> keyFileList = new ArrayList<>();
 
     FileSystem fs = FileSystem.get(new Configuration());
 
@@ -73,7 +73,7 @@
       int queryHash = expTuple._1;
 
       // Record the queryHash -> fileName
-      keyFileList.add(new Tuple2<Integer,String>(queryHash, fileName));
+      keyFileList.add(new Tuple2<>(queryHash, fileName));
 
       // Write the partition elements to the corresponding exp table file
       // each line: queryHash,<power>-<element^power mod N^2>
diff --git a/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java b/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java
index 087031e..6e9c715 100644
--- a/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java
+++ b/src/main/java/org/apache/pirk/responder/wideskies/spark/HashSelectorsAndPartitionData.java
@@ -43,21 +43,15 @@
 
   private static final Logger logger = LoggerFactory.getLogger(HashSelectorsAndPartitionData.class);
 
-  Accumulators accum = null;
-  BroadcastVars bVars = null;
-
   private QueryInfo queryInfo = null;
   private QuerySchema qSchema = null;
   private DataSchema dSchema = null;
 
   public HashSelectorsAndPartitionData(Accumulators accumIn, BroadcastVars bvIn)
   {
-    accum = accumIn;
-    bVars = bvIn;
-
-    queryInfo = bVars.getQueryInfo();
-    qSchema = bVars.getQuerySchema();
-    dSchema = bVars.getDataSchema();
+    queryInfo = bvIn.getQueryInfo();
+    qSchema = bvIn.getQuerySchema();
+    dSchema = bvIn.getDataSchema();
 
     logger.info("Initialized HashSelectorsAndPartitionData");
   }
diff --git a/src/main/java/org/apache/pirk/schema/data/DataSchemaLoader.java b/src/main/java/org/apache/pirk/schema/data/DataSchemaLoader.java
index bc93827..dc07787 100644
--- a/src/main/java/org/apache/pirk/schema/data/DataSchemaLoader.java
+++ b/src/main/java/org/apache/pirk/schema/data/DataSchemaLoader.java
@@ -93,7 +93,7 @@
   /**
    * Initializes the static {@link DataSchemaRegistry} with a list of available data schema names.
    * 
-   * @throws Exception
+   * @throws Exception - failed to initialize
    */
   public static void initialize() throws Exception
   {
@@ -108,7 +108,7 @@
    *          If true, specifies that the data schema is an hdfs file; if false, that it is a regular file.
    * @param fs
    *          Used only when {@code hdfs} is true; the {@link FileSystem} handle for the hdfs in which the data schema exists
-   * @throws Exception
+   * @throws Exception - failed to initialize
    */
   public static void initialize(boolean hdfs, FileSystem fs) throws Exception
   {
diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
index 9099376..cdc223e 100644
--- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
+++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
@@ -45,7 +45,7 @@
 
   // Element names from the data schema to include in the response.
   // Order matters for packing/unpacking.
-  private final List<String> elementNames = new ArrayList<String>();
+  private final List<String> elementNames = new ArrayList<>();
 
   // Name of class to use in data filtering.
   private final String filterTypeName;
diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java b/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
index 39a4fbb..2d4c6b5 100644
--- a/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
+++ b/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
@@ -96,7 +96,7 @@
   /**
    * Initializes the static {@link QuerySchemaRegistry} with a list of query schema names.
    * 
-   * @throws Exception
+   * @throws Exception - failed to initialize
    */
   public static void initialize() throws Exception
   {
@@ -111,7 +111,7 @@
    *          If true, specifies that the query schema is an hdfs file; if false, that it is a regular file.
    * @param fs
    *          Used only when {@code hdfs} is true; the {@link FileSystem} handle for the hdfs in which the query schema exists
-   * @throws Exception
+   * @throws Exception - failed to initialize
    */
   public static void initialize(boolean hdfs, FileSystem fs) throws Exception
   {
@@ -223,7 +223,7 @@
       if (nNode.getNodeType() == Node.ELEMENT_NODE)
       {
         // Pull the name
-        String queryElementName = ((Element) nNode).getFirstChild().getNodeValue().trim();
+        String queryElementName = nNode.getFirstChild().getNodeValue().trim();
         if (!dataSchema.containsElement(queryElementName))
         {
           throw new PIRException("dataSchema = " + dataSchemaName + " does not contain requested element name = " + queryElementName);
@@ -318,7 +318,7 @@
         if (nNode.getNodeType() == Node.ELEMENT_NODE)
         {
           // Pull the name and add to the set.
-          String name = ((Element) nNode).getFirstChild().getNodeValue().trim();
+          String name = nNode.getFirstChild().getNodeValue().trim();
           filteredNamesSet.add(name);
 
           logger.info("filterName = " + name);
diff --git a/src/main/java/org/apache/pirk/schema/query/filter/DataFilter.java b/src/main/java/org/apache/pirk/schema/query/filter/DataFilter.java
index 35c64e0..16b6478 100644
--- a/src/main/java/org/apache/pirk/schema/query/filter/DataFilter.java
+++ b/src/main/java/org/apache/pirk/schema/query/filter/DataFilter.java
@@ -35,6 +35,6 @@
    * <p>
    * Returns true if we are to filter out the element, false otherwise
    */
-  public boolean filterDataElement(MapWritable dataElement, DataSchema dSchema);
+  boolean filterDataElement(MapWritable dataElement, DataSchema dSchema);
 
 }
diff --git a/src/main/java/org/apache/pirk/serialization/JavaSerializer.java b/src/main/java/org/apache/pirk/serialization/JavaSerializer.java
index 695ea6e..a4d1694 100644
--- a/src/main/java/org/apache/pirk/serialization/JavaSerializer.java
+++ b/src/main/java/org/apache/pirk/serialization/JavaSerializer.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.serialization;
 
 import java.io.IOException;
diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java
index 704a5cb..6071c60 100644
--- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java
+++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java
@@ -1,4 +1,4 @@
-/*******************************************************************************
+/*
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -15,7 +15,7 @@
  * KIND, either express or implied.  See the License for the
  * specific language governing permissions and limitations
  * under the License.
- *******************************************************************************/
+ */
 package org.apache.pirk.serialization;
 
 import java.io.IOException;
@@ -28,12 +28,32 @@
 {
   private ObjectMapper objectMapper = new ObjectMapper();
 
+  /**
+   * Stores the given object on the output stream as JSON.
+   *
+   * @param outputStream
+   *          The stream on which to store the object.
+   * @param obj
+   *          The object to be stored.
+   * @throws IOException
+   *           If a problem occurs storing the object on the given stream.
+   */
   @Override
   public void write(OutputStream outputStream, Storable obj) throws IOException
   {
     objectMapper.writerWithDefaultPrettyPrinter().writeValue(outputStream, obj);
   }
 
+  /**
+   * Read a JSON string from the given input stream and returns the Object representation.
+   *
+   * @param inputStream
+   *          The stream from which to read the object.
+   * @param classType
+   *          The type of object being retrieved.
+   * @throws IOException
+   *           If a problem occurs reading the object from the stream.
+   */
   @Override
   public <T> T read(InputStream inputStream, Class<T> classType) throws IOException
   {
diff --git a/src/main/java/org/apache/pirk/utils/FileIOUtils.java b/src/main/java/org/apache/pirk/utils/FileIOUtils.java
index 32cba40..8924a86 100644
--- a/src/main/java/org/apache/pirk/utils/FileIOUtils.java
+++ b/src/main/java/org/apache/pirk/utils/FileIOUtils.java
@@ -46,7 +46,7 @@
 
   public static ArrayList<String> readToArrayList(String filepath)
   {
-    return (ArrayList<String>) read(filepath, new ArrayList<String>(), new Callable<String>()
+    return (ArrayList<String>) read(filepath, new ArrayList<>(), new Callable<String>()
     {
       @Override
       public String call(String line)
@@ -58,12 +58,12 @@
 
   public static ArrayList<String> readToArrayList(String filepath, Callable<String> function)
   {
-    return (ArrayList<String>) read(filepath, new ArrayList<String>(), function);
+    return (ArrayList<String>) read(filepath, new ArrayList<>(), function);
   }
 
   public static HashSet<String> readToHashSet(String filepath)
   {
-    return (HashSet<String>) read(filepath, new HashSet<String>(), new Callable<String>()
+    return (HashSet<String>) read(filepath, new HashSet<>(), new Callable<String>()
     {
       @Override
       public String call(String line)
@@ -75,7 +75,7 @@
 
   public static HashSet<String> readToHashSet(String filepath, Callable<String> function)
   {
-    return (HashSet<String>) read(filepath, new HashSet<String>(), function);
+    return (HashSet<String>) read(filepath, new HashSet<>(), function);
   }
 
   public static AbstractCollection<String> read(String filepath, AbstractCollection<String> collection, Callable<String> function)
diff --git a/src/main/java/org/apache/pirk/utils/QueryParserUtils.java b/src/main/java/org/apache/pirk/utils/QueryParserUtils.java
index 6f27438..617e6dc 100644
--- a/src/main/java/org/apache/pirk/utils/QueryParserUtils.java
+++ b/src/main/java/org/apache/pirk/utils/QueryParserUtils.java
@@ -383,7 +383,7 @@
             }
             else if (!(value).equals(itemTokens[1])) // Single value match
             {
-              logger.debug("We do not have a single value match: stringValue " + (String) value + " != itemTokens[1] = " + itemTokens[1]);
+              logger.debug("We do not have a single value match: stringValue " + value + " != itemTokens[1] = " + itemTokens[1]);
               satisfiesQuery = false;
             }
           }
diff --git a/src/main/java/org/apache/pirk/utils/SystemConfiguration.java b/src/main/java/org/apache/pirk/utils/SystemConfiguration.java
index 84bcc4b..4146e5b 100755
--- a/src/main/java/org/apache/pirk/utils/SystemConfiguration.java
+++ b/src/main/java/org/apache/pirk/utils/SystemConfiguration.java
@@ -90,7 +90,7 @@
    * 
    * @param propertyName
    *          The name of the requested string property value.
-   * @param defaultvalue
+   * @param defaultValue
    *          The value to return if the property is undefined.
    * @return The value of the requested property, or the default value if the property is undefined.
    */
@@ -127,7 +127,7 @@
    */
   public static boolean getBooleanProperty(String propertyName, boolean defaultValue)
   {
-    return (isSetTrue(propertyName)) ? true : defaultValue;
+    return (isSetTrue(propertyName)) || defaultValue;
   }
 
   /**
@@ -237,7 +237,7 @@
   {
     if (file.exists())
     {
-      try (InputStream stream = new FileInputStream(file);)
+      try (InputStream stream = new FileInputStream(file))
       {
         logger.info("Loading properties file '" + file.getAbsolutePath() + "'");
         props.load(stream);