PIRK-45 Remove dependency on Java serialization - closes apache/incubator-pirk#107
diff --git a/pom.xml b/pom.xml
index 5c46283..80e9a2b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -410,7 +410,8 @@
                             <exclude>eclipse*.xml</exclude> <!-- Exclude eclipse* xml -->
                             <exclude>docs/*</exclude> <!-- Exclude docs -->
                             <exclude>logs/*</exclude> <!-- Exclude logs -->
-                            <exclude>**/m2.conf</exclude> <!-- Exclude Maven conf which gets installed on travis and fails RAT check -->
+                            <exclude>**/m2.conf
+                            </exclude> <!-- Exclude Maven conf which gets installed on travis and fails RAT check -->
                             <exclude>src/main/resources/META-INF/**</exclude>
                         </excludes>
                     </configuration>
@@ -549,9 +550,11 @@
                                 <shadedArtifactAttached>true</shadedArtifactAttached>
                                 <shadedClassifierName>exe</shadedClassifierName>
                                 <transformers>
-                                    <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
+                                    <transformer
+                                            implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
                                     </transformer>
-                                    <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
+                                    <transformer
+                                            implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer">
                                     </transformer>
                                 </transformers>
                                 <filters>
diff --git a/src/main/java/org/apache/pirk/encryption/Paillier.java b/src/main/java/org/apache/pirk/encryption/Paillier.java
index 72f59b2..da14bba 100644
--- a/src/main/java/org/apache/pirk/encryption/Paillier.java
+++ b/src/main/java/org/apache/pirk/encryption/Paillier.java
@@ -23,6 +23,7 @@
 import java.security.GeneralSecurityException;
 import java.security.SecureRandom;
 
+import com.google.gson.annotations.Expose;
 import org.apache.pirk.utils.PIRException;
 import org.apache.pirk.utils.SystemConfiguration;
 import org.slf4j.Logger;
@@ -95,27 +96,24 @@
     }
   }
 
-  private BigInteger p; // large prime
-  private BigInteger q; // large prime
+  @Expose private BigInteger p; // large prime
+  @Expose private BigInteger q; // large prime
   private BigInteger N; // N=pq, RSA modulus
 
   private BigInteger NSquared; // NSquared = N^2
-  private BigInteger lambdaN; // lambda(N) = lcm(p-1,q-1), Carmichael function of N
-  private BigInteger w; // lambda(N)^-1 mod N
 
-  private final int bitLength; // bit length of the modulus N
+  private BigInteger lambdaN; // lambda(N) = lcm(p-1,q-1), Carmichael function of N
+
+  private BigInteger w; // lambda(N)^-1 mod N
+  @Expose private final int bitLength; // bit length of the modulus N
 
   /**
    * Creates a Paillier algorithm with all parameters specified.
-   * 
-   * @param p
-   *          First large prime.
-   * @param q
-   *          Second large prime.
-   * @param bitLength
-   *          Bit length of the modulus {@code N}.
-   * @throws IllegalArgumentException
-   *           If {@code p} or {@code q} do not satisfy primality constraints.
+   *
+   * @param p         First large prime.
+   * @param q         Second large prime.
+   * @param bitLength Bit length of the modulus {@code N}.
+   * @throws IllegalArgumentException If {@code p} or {@code q} do not satisfy primality constraints.
    */
   public Paillier(BigInteger p, BigInteger q, int bitLength)
   {
@@ -146,13 +144,10 @@
    * <p>
    * The probability that the generated keys represent primes will exceed (1 - (1/2)<sup>{@code certainty}</sup>). The execution time of this constructor is
    * proportional to the value of this parameter.
-   * 
-   * @param bitLength
-   *          The bit length of the resulting modulus {@code N}.
-   * @param certainty
-   *          The probability that the new {@code p} and {@code q} represent prime numbers.
-   * @throws IllegalArgumentException
-   *           If the {@code certainty} is less than the system allowed lower bound.
+   *
+   * @param bitLength The bit length of the resulting modulus {@code N}.
+   * @param certainty The probability that the new {@code p} and {@code q} represent prime numbers.
+   * @throws IllegalArgumentException If the {@code certainty} is less than the system allowed lower bound.
    */
   public Paillier(int bitLength, int certainty)
   {
@@ -168,15 +163,11 @@
    * proportional to the value of this parameter.
    * <p>
    * When ensureBitSet > -1 the value of bit "{@code ensureBitSet}" in modulus {@code N} will be set.
-   * 
-   * @param bitLength
-   *          The bit length of the resulting modulus {@code N}.
-   * @param certainty
-   *          The probability that the new {@code p} and {@code q} represent prime numbers.
-   * @param ensureBitSet
-   *          index of bit in {@code N} to ensure is set.
-   * @throws IllegalArgumentException
-   *           If the {@code certainty} is less than the system allowed lower bound, or the index of {@code ensureBitSet} is greater than the {@code bitLength}.
+   *
+   * @param bitLength    The bit length of the resulting modulus {@code N}.
+   * @param certainty    The probability that the new {@code p} and {@code q} represent prime numbers.
+   * @param ensureBitSet index of bit in {@code N} to ensure is set.
+   * @throws IllegalArgumentException If the {@code certainty} is less than the system allowed lower bound, or the index of {@code ensureBitSet} is greater than the {@code bitLength}.
    */
   public Paillier(int bitLength, int certainty, int ensureBitSet)
   {
@@ -198,7 +189,7 @@
 
   /**
    * Returns the value of the large prime {@code p}.
-   * 
+   *
    * @return p.
    */
   public BigInteger getP()
@@ -208,7 +199,7 @@
 
   /**
    * Returns the value of the large prime {@code q}.
-   * 
+   *
    * @return q.
    */
   public BigInteger getQ()
@@ -218,7 +209,7 @@
 
   /**
    * Returns the RSA modulus value {@code N}.
-   * 
+   *
    * @return N, the product of {@code p} and {@code q}.
    */
   public BigInteger getN()
@@ -228,7 +219,7 @@
 
   /**
    * Returns the value of {@code N}<sup>2</sup>.
-   * 
+   *
    * @return N squared.
    */
   public BigInteger getNSquared()
@@ -240,7 +231,7 @@
    * Returns the value of Carmichael's function at {@code N}.
    * <p>
    * The Carmichael function of {@code N} is the least common multiple of {@code p-1} and {@code q-1},
-   * 
+   *
    * @return Carmichael's function at {@code N}.
    */
   public BigInteger getLambdaN()
@@ -250,7 +241,7 @@
 
   /**
    * Returns the bit length of the modulus {@code N}.
-   * 
+   *
    * @return the bit length, as an integer.
    */
   public int getBitLength()
@@ -296,11 +287,9 @@
   /**
    * Returns the encrypted value of {@code m} using a generated random value.
    *
-   * @param m
-   *          the value to be encrypted.
+   * @param m the value to be encrypted.
    * @return the encrypted value
-   * @throws PIRException
-   *           If {@code m} is not less than @{code N}.
+   * @throws PIRException If {@code m} is not less than @{code N}.
    */
   public BigInteger encrypt(BigInteger m) throws PIRException
   {
@@ -316,14 +305,11 @@
 
   /**
    * Returns the ciphertext of a message using the given random value.
-   * 
-   * @param m
-   *          the value to be encrypted.
-   * @param r
-   *          the random value to use in the Pailler encryption.
+   *
+   * @param m the value to be encrypted.
+   * @param r the random value to use in the Pailler encryption.
    * @return the encrypted value.
-   * @throws PIRException
-   *           If {@code m} is not less than @{code N}.
+   * @throws PIRException If {@code m} is not less than @{code N}.
    */
   public BigInteger encrypt(BigInteger m, BigInteger r) throws PIRException
   {
@@ -341,9 +327,8 @@
 
   /**
    * Returns the plaintext message for a given ciphertext.
-   * 
-   * @param c
-   *          an encrypted value.
+   *
+   * @param c an encrypted value.
    * @return the corresponding plaintext value.
    */
   public BigInteger decrypt(BigInteger c)
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java b/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java
index ab7637d..1f7d2cd 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/EncryptionPropertiesBuilder.java
@@ -22,7 +22,22 @@
 
 import java.util.Properties;
 
-import static org.apache.pirk.querier.wideskies.QuerierProps.*;
+import static org.apache.pirk.querier.wideskies.QuerierProps.BITSET;
+import static org.apache.pirk.querier.wideskies.QuerierProps.CERTAINTY;
+import static org.apache.pirk.querier.wideskies.QuerierProps.DATAPARTITIONSIZE;
+import static org.apache.pirk.querier.wideskies.QuerierProps.EMBEDSELECTOR;
+import static org.apache.pirk.querier.wideskies.QuerierProps.HASHBITSIZE;
+import static org.apache.pirk.querier.wideskies.QuerierProps.HASHKEY;
+import static org.apache.pirk.querier.wideskies.QuerierProps.NUMTHREADS;
+import static org.apache.pirk.querier.wideskies.QuerierProps.PAILLIERBITSIZE;
+import static org.apache.pirk.querier.wideskies.QuerierProps.QUERYTYPE;
+import static org.apache.pirk.querier.wideskies.QuerierProps.USEHDFSLOOKUPTABLE;
+import static org.apache.pirk.querier.wideskies.QuerierProps.USEMEMLOOKUPTABLE;
+import static org.apache.pirk.querier.wideskies.QuerierProps.setEncryptionDefaults;
+import static org.apache.pirk.querier.wideskies.QuerierProps.setGeneralDefaults;
+import static org.apache.pirk.querier.wideskies.QuerierProps.validateQuerierEncryptionProperties;
+
+//import static org.apache.pirk.querier.wideskies.QuerierProps.;
 
 /**
  * Holds the various parameters related to creating a {@link Querier}.
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
index b63e06e..efb5a11 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
@@ -22,6 +22,7 @@
 import java.util.List;
 import java.util.Map;
 
+import com.google.gson.annotations.Expose;
 import org.apache.pirk.encryption.Paillier;
 import org.apache.pirk.query.wideskies.Query;
 import org.apache.pirk.serialization.Storable;
@@ -31,18 +32,20 @@
  */
 public class Querier implements Serializable, Storable
 {
-  private static final long serialVersionUID = 1L;
+  public static final long querierSerialVersionUID = 1L;
 
-  private Query query = null; // contains the query vectors and functionality
+  @Expose public final long querierVersion = querierSerialVersionUID;
 
-  private Paillier paillier = null; // Paillier encryption functionality
+  @Expose private Query query = null; // contains the query vectors and functionality
 
-  private List<String> selectors = null; // selectors
+  @Expose private Paillier paillier = null; // Paillier encryption functionality
+
+  @Expose private List<String> selectors = null; // selectors
 
   // map to check the embedded selectors in the results for false positives;
   // if the selector is a fixed size < 32 bits, it is included as is
   // if the selector is of variable lengths
-  private Map<Integer,String> embedSelectorMap = null;
+  @Expose private Map<Integer,String> embedSelectorMap = null;
 
   public Querier(List<String> selectorsInput, Paillier paillierInput, Query queryInput, Map<Integer,String> embedSelectorMapInput)
   {
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java
index 826c577..7cf25b9 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierCLI.java
@@ -18,7 +18,12 @@
  */
 package org.apache.pirk.querier.wideskies;
 
-import org.apache.commons.cli.*;
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.Options;
 import org.apache.pirk.schema.data.DataSchemaLoader;
 import org.apache.pirk.schema.query.QuerySchemaLoader;
 import org.apache.pirk.utils.SystemConfiguration;
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java
new file mode 100644
index 0000000..6971a26
--- /dev/null
+++ b/src/main/java/org/apache/pirk/querier/wideskies/QuerierDeserializer.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.querier.wideskies;
+
+import com.google.gson.Gson;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParseException;
+import com.google.gson.reflect.TypeToken;
+import org.apache.pirk.encryption.Paillier;
+import org.apache.pirk.query.wideskies.Query;
+
+import java.lang.reflect.Type;
+import java.math.BigInteger;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Custom deserializer for Querier class for Gson.
+ */
+public class QuerierDeserializer implements JsonDeserializer<Querier>
+{
+
+  private static final Gson gson = new Gson();
+
+  @Override public Querier deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException
+  {
+    JsonObject jsonObject = jsonElement.getAsJsonObject();
+    // Check the version number.
+    long querierVersion = jsonObject.get("querierVersion").getAsLong();
+    if (querierVersion != Querier.querierSerialVersionUID)
+    {
+      throw new JsonParseException(
+          "Attempt to deserialize unsupported query version. Supported: " + Querier.querierSerialVersionUID + "; Received: " + querierVersion);
+    }
+    // Then deserialize the Query Info
+    Query query = gson.fromJson(jsonObject.get("query").toString(), Query.class);
+
+    // Now Paillier
+    Paillier paillier = deserializePaillier(jsonObject.get("paillier").getAsJsonObject());
+
+    List<String> selectors = gson.fromJson(jsonObject.get("selectors").toString(), new TypeToken<List<String>>()
+    {
+    }.getType());
+    Map<Integer,String> embedSelectorMap = gson.fromJson(jsonObject.get("embedSelectorMap").toString(), new TypeToken<Map<Integer,String>>()
+    {
+    }.getType());
+
+    return new Querier(selectors, paillier, query, embedSelectorMap);
+  }
+
+  /**
+   * Deserializes a Paillier JsonObject.
+   *
+   * @param paillier A JsonObject at the root of a serialied Paillier object.
+   * @return A Paillier object of the deserialized Json.
+   */
+  private Paillier deserializePaillier(JsonObject paillier)
+  {
+    BigInteger p = new BigInteger(paillier.get("p").getAsString());
+    BigInteger q = new BigInteger(paillier.get("q").getAsString());
+    int bitLength = paillier.get("bitLength").getAsInt();
+    return new Paillier(p, q, bitLength);
+  }
+
+}
diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java
index 51e6bb4..4922d9d 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/Query.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java
@@ -26,6 +26,7 @@
 import java.util.concurrent.ConcurrentHashMap;
 import java.util.function.Consumer;
 
+import com.google.gson.annotations.Expose;
 import org.apache.pirk.encryption.ModPowAbstraction;
 import org.apache.pirk.serialization.Storable;
 import org.slf4j.Logger;
@@ -33,17 +34,20 @@
 
 /**
  * Class to hold the PIR query vectors
- *
  */
+
 public class Query implements Serializable, Storable
 {
-  private static final long serialVersionUID = 1L;
+  public static final long querySerialVersionUID = 1L;
+
+  // So that we can serialize the version number in gson.
+  @Expose public final long queryVersion = querySerialVersionUID;
 
   private static final Logger logger = LoggerFactory.getLogger(Query.class);
 
-  private final QueryInfo queryInfo; // holds all query info
+  @Expose private final QueryInfo queryInfo; // holds all query info
 
-  private final SortedMap<Integer,BigInteger> queryElements; // query elements - ordered on insertion
+  @Expose private final SortedMap<Integer,BigInteger> queryElements; // query elements - ordered on insertion
 
   // lookup table for exponentiation of query vectors - based on dataPartitionBitSize
   // element -> <power, element^power mod N^2>
@@ -51,16 +55,22 @@
 
   // File based lookup table for modular exponentiation
   // element hash -> filename containing it's <power, element^power mod N^2> modular exponentiations
-  private Map<Integer,String> expFileBasedLookup = new HashMap<>();
+  @Expose private Map<Integer,String> expFileBasedLookup = new HashMap<>();
 
-  private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements
-  private final BigInteger NSquared;
+  @Expose private final BigInteger N; // N=pq, RSA modulus for the Paillier encryption associated with the queryElements
+
+  @Expose private final BigInteger NSquared;
 
   public Query(QueryInfo queryInfo, BigInteger N, SortedMap<Integer,BigInteger> queryElements)
   {
+    this(queryInfo, N, N.pow(2), queryElements);
+  }
+
+  public Query(QueryInfo queryInfo, BigInteger N, BigInteger NSquared, SortedMap<Integer,BigInteger> queryElements)
+  {
     this.queryInfo = queryInfo;
     this.N = N;
-    NSquared = N.pow(2);
+    this.NSquared = NSquared;
     this.queryElements = queryElements;
   }
 
@@ -114,8 +124,7 @@
 
     queryElements.values().parallelStream().forEach(new Consumer<BigInteger>()
     {
-      @Override
-      public void accept(BigInteger element)
+      @Override public void accept(BigInteger element)
       {
         Map<Integer,BigInteger> powMap = new HashMap<>(maxValue); // <power, element^power mod N^2>
         for (int i = 0; i <= maxValue; ++i)
diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java
new file mode 100644
index 0000000..92fdfa0
--- /dev/null
+++ b/src/main/java/org/apache/pirk/query/wideskies/QueryDeserializer.java
@@ -0,0 +1,167 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.query.wideskies;
+
+import com.google.gson.Gson;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParseException;
+import com.google.gson.reflect.TypeToken;
+import org.apache.pirk.schema.query.QuerySchema;
+import org.apache.pirk.schema.query.filter.DataFilter;
+import org.apache.pirk.schema.query.filter.FilterFactory;
+import org.apache.pirk.utils.PIRException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.lang.reflect.Type;
+import java.math.BigInteger;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.SortedMap;
+import java.util.UUID;
+
+/**
+ * Custom deserializer for Query class for Gson.
+ */
+public class QueryDeserializer implements JsonDeserializer<Query>
+{
+
+  private static final Logger logger = LoggerFactory.getLogger(QueryDeserializer.class);
+
+  private static final Gson gson = new Gson();
+
+  @Override public Query deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException
+  {
+    JsonObject jsonObject = jsonElement.getAsJsonObject();
+    logger.info("Got query json:" + jsonObject.toString());
+    // Check the version number.
+    long queryVersion = jsonObject.get("queryVersion").getAsLong();
+    if (queryVersion != Query.querySerialVersionUID)
+    {
+      throw new JsonParseException(
+          "Attempt to deserialize unsupported query version. Supported: " + Query.querySerialVersionUID + "; Received: " + queryVersion);
+    }
+    // Then deserialize the Query Info
+    QueryInfo queryInfo = deserializeInfo(jsonObject.get("queryInfo").getAsJsonObject());
+    SortedMap<Integer,BigInteger> queryElements = gson.fromJson(jsonObject.get("queryElements"), new TypeToken<SortedMap<Integer,BigInteger>>()
+    {
+    }.getType());
+    BigInteger N = new BigInteger(jsonObject.get("n").getAsString());
+    BigInteger NSquared = new BigInteger(jsonObject.get("nsquared").getAsString());
+    Map<Integer,String> expFileBasedLookup = gson.fromJson(jsonObject.get("expFileBasedLookup"), new TypeToken<Map<Integer,String>>()
+    {
+    }.getType());
+
+    Query query = new Query(queryInfo, N, NSquared, queryElements);
+    query.setExpFileBasedLookup(expFileBasedLookup);
+    return query;
+  }
+
+  /**
+   * Deserializes a QueryInfo JsonObject
+   *
+   * @param queryInfoJson A JsonObject at the root of a serialized QueryInfo object.
+   * @return A QueryInfo object of the deserialized Json.
+   * @throws JsonParseException
+   */
+  public static QueryInfo deserializeInfo(JsonObject queryInfoJson) throws JsonParseException
+  {
+    // First check the version.
+    long infoVersion = queryInfoJson.get("queryInfoVersion").getAsLong();
+    if (infoVersion != QueryInfo.queryInfoSerialVersionUID)
+    {
+      throw new JsonParseException(
+          "Attempt to deserialize unsupported query info version. Supported: " + QueryInfo.queryInfoSerialVersionUID + "; Received: " + infoVersion);
+    }
+    // Deserialize the QuerySchema next, accounting for the possibility that it is null.
+    QuerySchema querySchema;
+    if (queryInfoJson.get("qSchema").isJsonNull())
+    {
+      querySchema = null;
+    }
+    else
+    {
+      querySchema = deserializeSchema(queryInfoJson.get("qSchema").getAsJsonObject());
+    }
+    // Now start making the QueryInfo object.
+    QueryInfo info = new QueryInfo(UUID.fromString(queryInfoJson.get("identifier").getAsString()), queryInfoJson.get("numSelectors").getAsInt(),
+        queryInfoJson.get("hashBitSize").getAsInt(), queryInfoJson.get("hashKey").getAsString(), queryInfoJson.get("dataPartitionBitSize").getAsInt(),
+        queryInfoJson.get("queryType").getAsString(), queryInfoJson.get("useExpLookupTable").getAsBoolean(), queryInfoJson.get("embedSelector").getAsBoolean(),
+        queryInfoJson.get("useHDFSExpLookupTable").getAsBoolean(), queryInfoJson.get("numBitsPerDataElement").getAsInt(), querySchema);
+    return info;
+  }
+
+  /**
+   * Deserializes a QuerySchema JsonObject
+   *
+   * @param querySchemaJson A JsonObject at the root of a serialized QuerySchema object.
+   * @return A QuerySchema object of the deserialized Json.
+   * @throws JsonParseException
+   */
+  private static QuerySchema deserializeSchema(JsonObject querySchemaJson) throws JsonParseException
+  {
+    // Deserialize The Query Schema First.
+    long schemaVersion = querySchemaJson.get("querySchemaVersion").getAsLong();
+    if (schemaVersion != QuerySchema.querySchemaSerialVersionUID)
+    {
+      throw new JsonParseException(
+          "Attempt to deserialize unsupported query info version. Supported: " + QueryInfo.queryInfoSerialVersionUID + "; Received: " + schemaVersion);
+    }
+    String dataFilterName = querySchemaJson.get("filterTypeName").getAsString();
+    Set<String> filteredElementNames;
+    try
+    {
+      filteredElementNames = gson.fromJson(querySchemaJson.get("filteredElementNames"), new TypeToken<Set<String>>()
+      {
+      }.getType());
+    } catch (Exception e)
+    {
+      logger.warn("No filtered element names for Query Schema deserialization.");
+      filteredElementNames = null;
+    }
+    // Set up the data filter
+    DataFilter dataFilter;
+    try
+    {
+      dataFilter = FilterFactory.getFilter(dataFilterName, filteredElementNames);
+    } catch (IOException | PIRException e)
+    {
+      logger.error("Error trying to create data filter from JSON.", e);
+      throw new JsonParseException(e);
+    }
+
+    QuerySchema querySchema = new QuerySchema(querySchemaJson.get("schemaName").getAsString(), querySchemaJson.get("dataSchemaName").getAsString(),
+        querySchemaJson.get("selectorName").getAsString(), dataFilterName, dataFilter, querySchemaJson.get("dataElementSize").getAsInt());
+    List<String> elementNames = gson.fromJson(querySchemaJson.get("elementNames"), new TypeToken<List<String>>()
+    {
+    }.getType());
+    querySchema.getElementNames().addAll(elementNames);
+    HashMap<String,String> additionalFields = gson.fromJson(querySchemaJson.get("additionalFields"), new TypeToken<HashMap<String,String>>()
+    {
+    }.getType());
+    querySchema.getAdditionalFields().putAll(additionalFields);
+    return querySchema;
+  }
+}
diff --git a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
index 06bfa28..20fbb36 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/QueryInfo.java
@@ -19,16 +19,17 @@
 
 package org.apache.pirk.query.wideskies;
 
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.UUID;
-
+import com.google.gson.annotations.Expose;
 import org.apache.pirk.schema.query.QuerySchema;
 import org.apache.pirk.schema.query.QuerySchemaRegistry;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.UUID;
+
 /**
  * Class to hold all of the basic information regarding a query
  * <p>
@@ -37,33 +38,39 @@
  */
 public class QueryInfo implements Serializable, Cloneable
 {
-  private static final long serialVersionUID = 1L;
+  public static final long queryInfoSerialVersionUID = 1L;
+
+  // So that we can serialize the version number in gson.
+  @Expose public final long queryInfoVersion = queryInfoSerialVersionUID;
 
   private static final Logger logger = LoggerFactory.getLogger(QueryInfo.class);
 
-  private UUID identifier; // the identifier of the query
-  private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize}
+  @Expose private UUID identifier; // the identifier of the query
 
-  private String queryType = null; // QueryType string const
+  @Expose private int numSelectors = 0; // the number of selectors in the query, given by \floor{paillerBitSize/dataPartitionBitSize}
 
-  private int hashBitSize = 0; // Bit size of the keyed hash function
-  private String hashKey; // Key for the keyed hash function
+  @Expose private String queryType = null; // QueryType string const
 
-  private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type
-  private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now
-  private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element
+  @Expose private int hashBitSize = 0; // Bit size of the keyed hash function
 
-  private boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute
+  @Expose private String hashKey; // Key for the keyed hash function
 
-  private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS
+  @Expose private int numBitsPerDataElement = 0; // total num bits per returned data value - defined relative to query type
+
+  @Expose private int dataPartitionBitSize = 0; // num of bits for each partition of an incoming data element, must be < 32 right now
+
+  @Expose private int numPartitionsPerDataElement = 0; // num partitions of size dataPartitionBitSize per data element
+
+  @Expose private boolean useExpLookupTable = false; // whether or not to generate and use the expLookupTable for encryption, it is very expensive to compute
+
+  @Expose private boolean useHDFSExpLookupTable = false; // whether or not to use the expLookupTable stored in HDFS
   // if it doesn't yet exist, it will be created within the cluster and stored in HDFS
 
-  private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low
+  @Expose private boolean embedSelector = true; // whether or not to embed the selector in the results - results in a very low
 
   // false positive rate for variable length selectors and a zero false positive rate
   // for selectors of fixed size < 32 bits
-
-  private QuerySchema qSchema = null;
+  @Expose private QuerySchema qSchema = null;
 
   public QueryInfo(int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput,
       boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput)
@@ -86,7 +93,6 @@
     useExpLookupTable = useExpLookupTableInput;
     useHDFSExpLookupTable = useHDFSExpLookupTableInput;
     embedSelector = embedSelectorInput;
-
     numBitsPerDataElement = QuerySchemaRegistry.get(queryType).getDataElementSize();
     dataPartitionBitSize = dataPartitionBitSizeInput;
     numPartitionsPerDataElement = numBitsPerDataElement / dataPartitionBitSizeInput;
@@ -99,6 +105,36 @@
     printQueryInfo();
   }
 
+  public QueryInfo(UUID identifierInput, int numSelectorsInput, int hashBitSizeInput, String hashKeyInput, int dataPartitionBitSizeInput, String queryTypeInput,
+      boolean useExpLookupTableInput, boolean embedSelectorInput, boolean useHDFSExpLookupTableInput, int numBitsPerDataElementInput,
+      QuerySchema querySchemaInput)
+  {
+    identifier = identifierInput;
+    queryType = queryTypeInput;
+
+    numSelectors = numSelectorsInput;
+
+    hashBitSize = hashBitSizeInput;
+    hashKey = hashKeyInput;
+
+    useExpLookupTable = useExpLookupTableInput;
+    useHDFSExpLookupTable = useHDFSExpLookupTableInput;
+    embedSelector = embedSelectorInput;
+
+    numBitsPerDataElement = numBitsPerDataElementInput;
+    dataPartitionBitSize = dataPartitionBitSizeInput;
+    numPartitionsPerDataElement = numBitsPerDataElement / dataPartitionBitSizeInput;
+
+    if (embedSelectorInput)
+    {
+      numPartitionsPerDataElement += 4; // using a 8-bit partition size and a 32-bit embedded selector
+    }
+
+    addQuerySchema(querySchemaInput);
+
+    printQueryInfo();
+  }
+
   public QueryInfo(Map queryInfoMap)
   {
     // The Storm Config serializes the map as a json and reads back in with numeric values as longs.
@@ -217,8 +253,7 @@
         + useHDFSExpLookupTable + " embedSelector = " + embedSelector);
   }
 
-  @Override
-  public QueryInfo clone()
+  @Override public QueryInfo clone()
   {
     try
     {
diff --git a/src/main/java/org/apache/pirk/response/wideskies/Response.java b/src/main/java/org/apache/pirk/response/wideskies/Response.java
index e3fdad1..b94b977 100644
--- a/src/main/java/org/apache/pirk/response/wideskies/Response.java
+++ b/src/main/java/org/apache/pirk/response/wideskies/Response.java
@@ -22,6 +22,7 @@
 import java.math.BigInteger;
 import java.util.TreeMap;
 
+import com.google.gson.annotations.Expose;
 import org.apache.pirk.query.wideskies.QueryInfo;
 import org.apache.pirk.serialization.Storable;
 
@@ -29,15 +30,16 @@
  * Class to hold the encrypted response elements for the PIR query
  * <p>
  * Serialized and returned to the querier for decryption
- * 
  */
 public class Response implements Serializable, Storable
 {
-  private static final long serialVersionUID = 1L;
+  public static final long responseSerialVersionUID = 1L;
 
-  private QueryInfo queryInfo = null; // holds all query info
+  @Expose public final long responseVersion = responseSerialVersionUID;
 
-  private TreeMap<Integer,BigInteger> responseElements = null; // encrypted response columns, colNum -> column
+  @Expose private QueryInfo queryInfo = null; // holds all query info
+
+  @Expose private TreeMap<Integer,BigInteger> responseElements = null; // encrypted response columns, colNum -> column
 
   public Response(QueryInfo queryInfoInput)
   {
diff --git a/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java
new file mode 100644
index 0000000..f1588c6
--- /dev/null
+++ b/src/main/java/org/apache/pirk/response/wideskies/ResponseDeserializer.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.response.wideskies;
+
+import com.google.gson.Gson;
+import com.google.gson.JsonDeserializationContext;
+import com.google.gson.JsonDeserializer;
+import com.google.gson.JsonElement;
+import com.google.gson.JsonObject;
+import com.google.gson.JsonParseException;
+import com.google.gson.reflect.TypeToken;
+import org.apache.pirk.query.wideskies.QueryDeserializer;
+import org.apache.pirk.query.wideskies.QueryInfo;
+
+import java.lang.reflect.Type;
+import java.math.BigInteger;
+import java.util.TreeMap;
+
+/**
+ * Custom deserializer for Response class for Gson.
+ */
+public class ResponseDeserializer implements JsonDeserializer<Response>
+{
+
+  private static final Gson gson = new Gson();
+
+  @Override public Response deserialize(JsonElement jsonElement, Type type, JsonDeserializationContext jsonDeserializationContext) throws JsonParseException
+  {
+    final JsonObject jsonObject = jsonElement.getAsJsonObject();
+    long responseVersion = jsonObject.get("responseVersion").getAsLong();
+    if (responseVersion != Response.responseSerialVersionUID)
+    {
+      throw new JsonParseException("\"Attempt to deserialize unsupported query version. Supported: \"\n"
+          + "          + Response.responseSerialVersionUID + \"; Received: \" + responseVersion");
+    }
+    QueryInfo queryInfo = QueryDeserializer.deserializeInfo(jsonObject.get("queryInfo").getAsJsonObject());
+    Response response = new Response(queryInfo);
+    TreeMap<Integer,BigInteger> responseElements = gson.fromJson(jsonObject.get("responseElements"), new TypeToken<TreeMap<Integer,BigInteger>>()
+    {
+    }.getType());
+    response.setResponseElements(responseElements);
+    return response;
+  }
+}
+
diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
index 6fa4dd5..c22c384 100644
--- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
+++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
@@ -18,6 +18,9 @@
  */
 package org.apache.pirk.schema.query;
 
+import com.google.gson.annotations.Expose;
+import org.apache.pirk.schema.query.filter.DataFilter;
+
 import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.HashMap;
@@ -25,45 +28,45 @@
 import java.util.List;
 import java.util.Set;
 
-import org.apache.pirk.schema.query.filter.DataFilter;
-
 /**
  * Class to hold a query schema
- *
  */
 public class QuerySchema implements Serializable
 {
-  private static final long serialVersionUID = 1L;
+  public static final long querySchemaSerialVersionUID = 1L;
+
+  // So that we can serialize the version number in gson.
+  @Expose public final long querySchemaVersion = querySchemaSerialVersionUID;
 
   // This schema's name.
-  private final String schemaName;
+  @Expose private final String schemaName;
 
   // Name of the data schema associated with this query schema.
-  private final String dataSchemaName;
+  @Expose private final String dataSchemaName;
 
   // Name of element in the dataSchema to be used as the selector.
-  private final String selectorName;
+  @Expose private final String selectorName;
 
   // Element names from the data schema to include in the response.
   // Order matters for packing/unpacking.
-  private final List<String> elementNames = new ArrayList<>();
+  @Expose private final List<String> elementNames = new ArrayList<>();
 
   // Name of class to use in data filtering.
-  private final String filterTypeName;
+  @Expose private final String filterTypeName;
 
   // Instance of the filterTypeName.
   private final DataFilter filter;
 
   // Set of data schema element names on which to apply filtering.
-  private final Set<String> filteredElementNames = new HashSet<>();
+  @Expose private final Set<String> filteredElementNames = new HashSet<>();
 
   // Total number of bits to be returned for each data element hit.
-  private final int dataElementSize;
+  @Expose private final int dataElementSize;
 
-  // Additional fields by key,value
-  private final HashMap<String,String> additionalFields = new HashMap<>();
+  // Addiional fields by key,value
+  @Expose private final HashMap<String,String> additionalFields = new HashMap<>();
 
-  QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize)
+  public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize)
   {
     this.schemaName = schemaName;
     this.dataSchemaName = dataSchemaName;
@@ -75,7 +78,7 @@
 
   /**
    * Returns the name of this schema.
-   * 
+   *
    * @return The schema name.
    */
   public String getSchemaName()
@@ -99,7 +102,7 @@
    * Returns the element names to include in the response.
    * <p>
    * The element names are defined by the data schema associated with this query.
-   * 
+   *
    * @return The ordered list of query element names.
    */
   public List<String> getElementNames()
@@ -111,7 +114,7 @@
    * Returns the element name used as the selector.
    * <p>
    * The element names are defined by the data schema associated with this query.
-   * 
+   *
    * @return The element names being selected.
    */
   public String getSelectorName()
@@ -126,9 +129,9 @@
 
   /**
    * Returns the name of the filter class for this query.
-   * 
+   * <p>
    * The filter class name is the fully qualified name of a Java class that implements the {@link DataFilter} interface.
-   * 
+   *
    * @return The type name of the query filter, or <code>null</code> if there is no filter defined.
    */
   public String getFilterTypeName()
@@ -138,7 +141,7 @@
 
   /**
    * Returns the set of element names on which to apply the filter.
-   * 
+   *
    * @return The possibly empty set of data schema element names.
    */
   public Set<String> getFilteredElementNames()
@@ -150,7 +153,7 @@
    * Returns the data element filter for this query.
    * <p>
    * The data filter is applied to the {@link QuerySchema#getFilteredElementNames()} data elements.
-   * 
+   *
    * @return The data filter, or <code>null</code> if no filter has been specified for this query.
    */
   public DataFilter getFilter()
@@ -162,7 +165,7 @@
    * Returns the map of additional field keys and values
    * <p>
    * Note that additional fields are optional, thus the map may be empty
-   * 
+   *
    * @return The additionalFields HashMap
    */
   public HashMap<String,String> getAdditionalFields()
@@ -172,7 +175,7 @@
 
   /**
    * Returns the value from the additionalFields mapping corresponding to the given key
-   * 
+   *
    * @param key
    * @return value from the additionalFields mapping corresponding to the given key
    */
diff --git a/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java b/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java
index 38239cc..667218b 100644
--- a/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java
+++ b/src/main/java/org/apache/pirk/serialization/HadoopFileSystemStore.java
@@ -37,7 +37,7 @@
   }
 
   /**
-   * Creates a new storage service on the given HDFS file system using default Java serialization.
+   * Creates a new storage service on the given HDFS file system using default Json serialization.
    */
   public HadoopFileSystemStore(FileSystem fs)
   {
diff --git a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java
index 6071c60..ea4cabd 100644
--- a/src/main/java/org/apache/pirk/serialization/JsonSerializer.java
+++ b/src/main/java/org/apache/pirk/serialization/JsonSerializer.java
@@ -18,46 +18,55 @@
  */
 package org.apache.pirk.serialization;
 
+import com.google.gson.Gson;
+import com.google.gson.GsonBuilder;
+import org.apache.pirk.querier.wideskies.Querier;
+import org.apache.pirk.querier.wideskies.QuerierDeserializer;
+import org.apache.pirk.query.wideskies.QueryDeserializer;
+import org.apache.pirk.response.wideskies.Response;
+import org.apache.pirk.response.wideskies.ResponseDeserializer;
+
+import javax.management.Query;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.InputStreamReader;
 import java.io.OutputStream;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
+import java.io.OutputStreamWriter;
+import java.io.Reader;
+import java.io.Writer;
 
 public class JsonSerializer extends SerializationService
 {
-  private ObjectMapper objectMapper = new ObjectMapper();
+  public static final Gson gson = new GsonBuilder().registerTypeAdapter(Response.class, new ResponseDeserializer())
+      .registerTypeAdapter(Query.class, new QueryDeserializer()).registerTypeAdapter(Querier.class, new QuerierDeserializer()).setPrettyPrinting()
+      .excludeFieldsWithoutExposeAnnotation().serializeNulls().create();
 
   /**
    * Stores the given object on the output stream as JSON.
    *
-   * @param outputStream
-   *          The stream on which to store the object.
-   * @param obj
-   *          The object to be stored.
-   * @throws IOException
-   *           If a problem occurs storing the object on the given stream.
+   * @param outputStream The stream on which to store the object.
+   * @param obj          The object to be stored.
+   * @throws IOException If a problem occurs storing the object on the given stream.
    */
-  @Override
-  public void write(OutputStream outputStream, Storable obj) throws IOException
+  @Override public void write(OutputStream outputStream, Storable obj) throws IOException
   {
-    objectMapper.writerWithDefaultPrettyPrinter().writeValue(outputStream, obj);
+    Writer writer = new OutputStreamWriter(outputStream);
+    gson.toJson(obj, obj.getClass(), writer);
+    writer.close();
   }
 
   /**
    * Read a JSON string from the given input stream and returns the Object representation.
    *
-   * @param inputStream
-   *          The stream from which to read the object.
-   * @param classType
-   *          The type of object being retrieved.
-   * @throws IOException
-   *           If a problem occurs reading the object from the stream.
+   * @param inputStream The stream from which to read the object.
+   * @param classType   The type of object being retrieved.
+   * @throws IOException If a problem occurs reading the object from the stream.
    */
-  @Override
-  public <T> T read(InputStream inputStream, Class<T> classType) throws IOException
+  @Override public <T> T read(InputStream inputStream, Class<T> classType) throws IOException
   {
-    return objectMapper.readValue(inputStream, classType);
+    Reader reader = new InputStreamReader(inputStream);
+    return gson.fromJson(reader, classType);
+
   }
 
 }
diff --git a/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java b/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java
index ac9cf2c..741f944 100644
--- a/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java
+++ b/src/main/java/org/apache/pirk/serialization/LocalFileSystemStore.java
@@ -26,7 +26,7 @@
 public class LocalFileSystemStore extends StorageService
 {
   /**
-   * Creates a new storage service on the local file system using default Java serialization.
+   * Creates a new storage service on the local file system using default Json serialization.
    */
   public LocalFileSystemStore()
   {
diff --git a/src/main/java/org/apache/pirk/serialization/SerializationService.java b/src/main/java/org/apache/pirk/serialization/SerializationService.java
index 2764fc8..01dbdcd 100644
--- a/src/main/java/org/apache/pirk/serialization/SerializationService.java
+++ b/src/main/java/org/apache/pirk/serialization/SerializationService.java
@@ -23,7 +23,7 @@
 import java.io.InputStream;
 import java.io.OutputStream;
 
-/*
+/**
  * Ability to read and write objects to/from a stream.
  */
 public abstract class SerializationService
diff --git a/src/main/java/org/apache/pirk/serialization/StorageService.java b/src/main/java/org/apache/pirk/serialization/StorageService.java
index a4910df..74f7f03 100644
--- a/src/main/java/org/apache/pirk/serialization/StorageService.java
+++ b/src/main/java/org/apache/pirk/serialization/StorageService.java
@@ -18,7 +18,7 @@
  *******************************************************************************/
 package org.apache.pirk.serialization;
 
-/* 
+/**
  * Common supertype for types that can store objects using serialization.
  */
 abstract class StorageService
@@ -27,7 +27,7 @@
 
   StorageService()
   {
-    this.setSerializer(new JavaSerializer());
+    this.setSerializer(new JsonSerializer());
   }
 
   StorageService(SerializationService service)