PIRK-78 Create a QuerySchema builder for assembling new schemas - closes apache/incubator-pirk#112
diff --git a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
index 7a69580..260dd76 100644
--- a/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
+++ b/src/main/java/org/apache/pirk/querier/wideskies/Querier.java
@@ -58,20 +58,29 @@
@Override public boolean equals(Object o)
{
if (this == o)
+ {
return true;
+ }
if (o == null || getClass() != o.getClass())
+ {
return false;
+ }
Querier querier = (Querier) o;
if (!query.equals(querier.query))
+ {
return false;
+ }
if (!paillier.equals(querier.paillier))
+ {
return false;
+ }
if (!selectors.equals(querier.selectors))
+ {
return false;
+ }
return embedSelectorMap != null ? embedSelectorMap.equals(querier.embedSelectorMap) : querier.embedSelectorMap == null;
-
}
@Override public int hashCode()
diff --git a/src/main/java/org/apache/pirk/query/wideskies/Query.java b/src/main/java/org/apache/pirk/query/wideskies/Query.java
index dd18297..044df2d 100644
--- a/src/main/java/org/apache/pirk/query/wideskies/Query.java
+++ b/src/main/java/org/apache/pirk/query/wideskies/Query.java
@@ -158,24 +158,37 @@
@Override public boolean equals(Object o)
{
if (this == o)
+ {
return true;
+ }
if (o == null || getClass() != o.getClass())
+ {
return false;
+ }
Query query = (Query) o;
if (!queryInfo.equals(query.queryInfo))
+ {
return false;
+ }
if (!queryElements.equals(query.queryElements))
+ {
return false;
+ }
if (expTable != null ? !expTable.equals(query.expTable) : query.expTable != null)
+ {
return false;
+ }
if (expFileBasedLookup != null ? !expFileBasedLookup.equals(query.expFileBasedLookup) : query.expFileBasedLookup != null)
+ {
return false;
+ }
if (!N.equals(query.N))
+ {
return false;
+ }
return NSquared.equals(query.NSquared);
-
}
@Override public int hashCode()
diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
index dc2588f..3993453 100644
--- a/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
+++ b/src/main/java/org/apache/pirk/schema/query/QuerySchema.java
@@ -18,19 +18,23 @@
*/
package org.apache.pirk.schema.query;
-import com.google.gson.annotations.Expose;
-import org.apache.pirk.schema.query.filter.DataFilter;
-
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
+import java.util.Map;
import java.util.Objects;
import java.util.Set;
+import org.apache.pirk.schema.query.filter.DataFilter;
+
+import com.google.gson.annotations.Expose;
+
/**
- * Class to hold a query schema
+ * Class to hold information about a query schema.
+ * <p>
+ * The query schema is designed to be instantiated via the {@link QuerySchemaBuilder} or a loader.
*/
public class QuerySchema implements Serializable
{
@@ -74,11 +78,11 @@
@Expose
private final int dataElementSize;
- // Addiional fields by key,value
+ // Additional fields by key,value
@Expose
- private final HashMap<String,String> additionalFields = new HashMap<>();
+ private final Map<String,String> additionalFields = new HashMap<>();
- public QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize)
+ QuerySchema(String schemaName, String dataSchemaName, String selectorName, String filterTypeName, DataFilter filter, int dataElementSize)
{
this.schemaName = schemaName;
this.dataSchemaName = dataSchemaName;
@@ -180,7 +184,7 @@
*
* @return The additionalFields HashMap
*/
- public HashMap<String,String> getAdditionalFields()
+ public Map<String,String> getAdditionalFields()
{
return additionalFields;
}
diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchemaBuilder.java b/src/main/java/org/apache/pirk/schema/query/QuerySchemaBuilder.java
new file mode 100644
index 0000000..4a6c6da
--- /dev/null
+++ b/src/main/java/org/apache/pirk/schema/query/QuerySchemaBuilder.java
@@ -0,0 +1,345 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.pirk.schema.query;
+
+import java.io.IOException;
+import java.util.Collections;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.pirk.schema.data.DataSchema;
+import org.apache.pirk.schema.data.DataSchemaRegistry;
+import org.apache.pirk.schema.data.partitioner.DataPartitioner;
+import org.apache.pirk.schema.query.filter.DataFilter;
+import org.apache.pirk.schema.query.filter.FilterFactory;
+import org.apache.pirk.utils.PIRException;
+import org.apache.pirk.utils.SystemConfiguration;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * The query schema builder is used to compose a new {@link QuerySchema} .
+ * <p>
+ * Each schema is required to have a name, a primary selector name, and a reference to a data schema that has already been registered, and a set of element
+ * names in that data schema that will be the subject of the query.
+ * <p>
+ * Optionally a query schema can define a filter to apply to data elements before performing the encrypted query.
+ * <p>
+ * A builder can be used to define each of these characteristics, before calling the {@code build()} method to create the query schema.
+ */
+public class QuerySchemaBuilder
+{
+ private static final Logger logger = LoggerFactory.getLogger(QuerySchemaBuilder.class);
+
+ private static final String NO_FILTER = "noFilter";
+
+ private String name;
+ private String dataSchemaName;
+ private String selectorName;
+ private Set<String> queryElementNames = Collections.emptySet();
+ private String filterTypeName = NO_FILTER;
+ private Set<String> filteredElementNames = Collections.emptySet();
+ private Map<String,String> additionalFields = Collections.emptyMap();
+
+ /**
+ * Created a new query schema builder.
+ */
+ public QuerySchemaBuilder()
+ {
+ // Default constructor
+ }
+
+ /**
+ * Builds a new query schema using the information set on the builder.
+ *
+ * @return The query schema.
+ * @throws IOException
+ * If an error occurred creating the defined query element filter.
+ * @throws PIRException
+ * If the query schema definition is invalid. The exception message will give the details of the problem.
+ */
+ public QuerySchema build() throws IOException, PIRException
+ {
+ verifySchemaProperties();
+ verifyDataSchemaProperties();
+ verifyQueryElements();
+
+ DataFilter filter = instantiateFilter(filterTypeName, filteredElementNames);
+ QuerySchema schema = new QuerySchema(name, dataSchemaName, selectorName, filterTypeName, filter, computeDataElementSize());
+ schema.getElementNames().addAll(queryElementNames);
+ schema.getFilteredElementNames().addAll(filteredElementNames);
+ schema.getAdditionalFields().putAll(additionalFields);
+
+ return schema;
+ }
+
+ /**
+ * Returns the name of the query schema being created.
+ *
+ * @return The query schema name.
+ */
+ public String getName()
+ {
+ return name;
+ }
+
+ /**
+ * Sets the name of the query schema being created.
+ *
+ * @param name
+ * The schema name.
+ * @return this builder.
+ */
+ public QuerySchemaBuilder setName(String name)
+ {
+ this.name = name;
+ return this;
+ }
+
+ /**
+ * Returns the data schema associated with this query schema.
+ *
+ * @return The name of the data schema to set on the query schema.
+ */
+ public String getDataSchemaName()
+ {
+ return dataSchemaName;
+ }
+
+ /**
+ * Sets the data schema associated with this query schema.
+ *
+ * @param dataSchemaName
+ * The name of the data schema to set on the query schema.
+ * @return this builder.
+ */
+ public QuerySchemaBuilder setDataSchemaName(String dataSchemaName)
+ {
+ this.dataSchemaName = dataSchemaName;
+ return this;
+ }
+
+ /**
+ * Returns the names of the data schema that are the subject of this query schema.
+ *
+ * @return A possibly empty set of data element names to return as part of the query.
+ */
+ public Set<String> getQueryElementNames()
+ {
+ return queryElementNames;
+ }
+
+ /**
+ * Sets the names of the data schema that are the subject of this query schema.
+ *
+ * @param elementNames
+ * The set of data element names to return as part of the query.
+ * @return This builder.
+ */
+ public QuerySchemaBuilder setQueryElementNames(Set<String> elementNames)
+ {
+ this.queryElementNames = elementNames;
+ return this;
+ }
+
+ /**
+ * Returns the element used as the primary selector for the query.
+ *
+ * @return the name of the element in the data schema that is to be used as the primary selector for the query.
+ */
+ public String getSelectorName()
+ {
+ return selectorName;
+ }
+
+ /**
+ * Sets the element used as the primary selector for the query
+ *
+ * @param selectorName
+ * The name of the element in the data schema that is to be used as the primary selector for the query.
+ * @return This builder.
+ */
+ public QuerySchemaBuilder setSelectorName(String selectorName)
+ {
+ this.selectorName = selectorName;
+ return this;
+ }
+
+ /**
+ * Returns the name of a filter to use with this query schema.
+ *
+ * @return The fully qualified class name of a type that implements {@link DataFilter}.
+ */
+ public String getFilterTypeName()
+ {
+ return filterTypeName;
+ }
+
+ /**
+ * Sets the name of a filter to use with this query schema.
+ *
+ * @param filterTypeName
+ * The fully qualified class name of a type that implements {@link DataFilter}.
+ * @return This builder.
+ */
+ public QuerySchemaBuilder setFilterTypeName(String filterTypeName)
+ {
+ this.filterTypeName = filterTypeName;
+ return this;
+ }
+
+ /**
+ * Returns the set of names to be filtered when using this query schema.
+ *
+ * @return A possibly empty set of data schema element names to be filtered.
+ */
+ public Set<String> getFilteredElementNames()
+ {
+ return filteredElementNames;
+ }
+
+ /**
+ * Sets the names to be filtered when using this query schema.
+ *
+ * @param filteredElementNames
+ * The set of data schema element names to be filtered.
+ * @return This builder.
+ */
+ public QuerySchemaBuilder setFilteredElementNames(Set<String> filteredElementNames)
+ {
+ this.filteredElementNames = filteredElementNames;
+ return this;
+ }
+
+ /**
+ * Returns the key:value pairs to be set on the query schema.
+ *
+ * @return A map of arbitrary key value pairs.
+ */
+ public Map<String,String> getAdditionalFields()
+ {
+ return additionalFields;
+ }
+
+ /**
+ * Sets the additional key:value pairs to be set on the query schema.
+ *
+ * @param additionalFields
+ * The map of key:value pairs to be defined on the query schema.
+ * @return This builder.
+ */
+ public QuerySchemaBuilder setAdditionalFields(Map<String,String> additionalFields)
+ {
+ this.additionalFields = additionalFields;
+ return this;
+ }
+
+ private DataSchema getDataSchema() throws PIRException
+ {
+ if (dataSchemaName == null)
+ {
+ throw new PIRException("Required data schema name is not set");
+ }
+
+ DataSchema dataSchema = DataSchemaRegistry.get(dataSchemaName);
+ if (dataSchema == null)
+ {
+ throw new PIRException("Loaded DataSchema does not exist for dataSchemaName = " + dataSchemaName);
+ }
+ return dataSchema;
+ }
+
+ private int computeDataElementSize() throws PIRException
+ {
+ DataSchema dataSchema = getDataSchema();
+ int dataElementSize = 0;
+ for (String elementName : queryElementNames)
+ {
+ // Compute the number of bits for this element.
+ DataPartitioner partitioner = dataSchema.getPartitionerForElement(elementName);
+ int bits = partitioner.getBits(dataSchema.getElementType(elementName));
+
+ // Multiply by the number of array elements allowed, if applicable.
+ if (dataSchema.isArrayElement(elementName))
+ {
+ bits *= Integer.parseInt(SystemConfiguration.getProperty("pir.numReturnArrayElements"));
+ }
+ dataElementSize += bits;
+
+ logger.info("name = " + elementName + " bits = " + bits + " dataElementSize = " + dataElementSize);
+ }
+
+ return dataElementSize;
+ }
+
+ /**
+ * Instantiate the specified filter.
+ *
+ * Exceptions derive from call to the {@code getFilter} method of {@link FilterFactory}
+ *
+ * @param filterTypeName
+ * The name of the filter class we are instantiating
+ * @param filteredElementNames
+ * The set of names of elements of the data schema up which the filter will act.
+ * @return An instantiation of the filter, set up to filter upon the specified names.
+ * @throws IOException
+ * - failed to read input
+ * @throws PIRException
+ * - File could not be instantiated
+ */
+ private DataFilter instantiateFilter(String filterTypeName, Set<String> filteredElementNames) throws IOException, PIRException
+ {
+ return filterTypeName.equals(NO_FILTER) ? null : FilterFactory.getFilter(filterTypeName, filteredElementNames);
+ }
+
+ private void verifyDataSchemaProperties() throws PIRException
+ {
+ // We must have a matching data schema for this query.
+ DataSchema dataSchema = getDataSchema();
+
+ // Ensure the selectorName matches an element in the data schema.
+ if (!dataSchema.containsElement(selectorName))
+ {
+ throw new PIRException("dataSchema = " + dataSchemaName + " does not contain selector = " + selectorName);
+ }
+ }
+
+ private void verifyQueryElements() throws PIRException
+ {
+ DataSchema dataSchema = getDataSchema();
+
+ for (String elementName : queryElementNames)
+ {
+ if (!dataSchema.containsElement(elementName))
+ {
+ throw new PIRException("dataSchema = " + dataSchemaName + " does not contain requested element name = " + elementName);
+ }
+ logger.info("name = " + elementName + " partitionerName = " + dataSchema.getPartitionerTypeName(elementName));
+ }
+ }
+
+ private void verifySchemaProperties() throws PIRException
+ {
+ if (name == null)
+ {
+ throw new PIRException("Required property query schema name is not set");
+ }
+ // Other required properties are checked by other helpers.
+ }
+}
diff --git a/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java b/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
index 949396b..230e0ba 100644
--- a/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
+++ b/src/main/java/org/apache/pirk/schema/query/QuerySchemaLoader.java
@@ -24,6 +24,7 @@
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
+import java.util.Map;
import java.util.Set;
import javax.xml.parsers.DocumentBuilder;
@@ -32,11 +33,6 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.pirk.schema.data.DataSchema;
-import org.apache.pirk.schema.data.DataSchemaRegistry;
-import org.apache.pirk.schema.data.partitioner.DataPartitioner;
-import org.apache.pirk.schema.query.filter.DataFilter;
-import org.apache.pirk.schema.query.filter.FilterFactory;
import org.apache.pirk.utils.PIRException;
import org.apache.pirk.utils.SystemConfiguration;
import org.slf4j.Logger;
@@ -82,8 +78,6 @@
{
private static final Logger logger = LoggerFactory.getLogger(QuerySchemaLoader.class);
- private static final String NO_FILTER = "noFilter";
-
static
{
logger.info("Loading query schemas: ");
@@ -194,28 +188,23 @@
// Read in and parse the XML file.
Document doc = parseXMLDocument(stream);
+ // Used to build the final schema.
+ QuerySchemaBuilder schemaBuilder = new QuerySchemaBuilder();
+
// Extract the schemaName.
String schemaName = extractValue(doc, "schemaName");
+ schemaBuilder.setName(schemaName);
logger.info("schemaName = " + schemaName);
// Extract the dataSchemaName.
String dataSchemaName = extractValue(doc, "dataSchemaName");
+ schemaBuilder.setDataSchemaName(dataSchemaName);
logger.info("dataSchemaName = " + dataSchemaName);
- // We must have a matching data schema for this query.
- DataSchema dataSchema = DataSchemaRegistry.get(dataSchemaName);
- if (dataSchema == null)
- {
- throw new PIRException("Loaded DataSchema does not exist for dataSchemaName = " + dataSchemaName);
- }
-
- // Extract the selectorName, and ensure it matches an element in the data schema.
+ // Extract the selectorName.
String selectorName = extractValue(doc, "selectorName");
+ schemaBuilder.setSelectorName(selectorName);
logger.info("selectorName = " + selectorName);
- if (!dataSchema.containsElement(selectorName))
- {
- throw new PIRException("dataSchema = " + dataSchemaName + " does not contain selectorName = " + selectorName);
- }
// Extract the query elements.
NodeList elementsList = doc.getElementsByTagName("elements");
@@ -226,50 +215,28 @@
Element elements = (Element) elementsList.item(0);
LinkedHashSet<String> elementNames = new LinkedHashSet<>();
- int dataElementSize = 0;
NodeList nList = elements.getElementsByTagName("name");
for (int i = 0; i < nList.getLength(); i++)
{
Node nNode = nList.item(i);
if (nNode.getNodeType() == Node.ELEMENT_NODE)
{
- // Pull the name
- String queryElementName = nNode.getFirstChild().getNodeValue().trim();
- if (!dataSchema.containsElement(queryElementName))
- {
- throw new PIRException("dataSchema = " + dataSchemaName + " does not contain requested element name = " + queryElementName);
- }
- elementNames.add(queryElementName);
- logger.info("name = " + queryElementName + " partitionerName = " + dataSchema.getPartitionerTypeName(queryElementName));
-
- // Compute the number of bits for this element.
- DataPartitioner partitioner = dataSchema.getPartitionerForElement(queryElementName);
- int bits = partitioner.getBits(dataSchema.getElementType(queryElementName));
-
- // Multiply by the number of array elements allowed, if applicable.
- if (dataSchema.isArrayElement(queryElementName))
- {
- bits *= Integer.parseInt(SystemConfiguration.getProperty("pir.numReturnArrayElements"));
- }
- dataElementSize += bits;
-
- logger.info("name = " + queryElementName + " bits = " + bits + " dataElementSize = " + dataElementSize);
+ elementNames.add(nNode.getFirstChild().getNodeValue().trim());
}
}
+ schemaBuilder.setQueryElementNames(elementNames);
// Extract the filter, if it exists
- String filterTypeName = NO_FILTER;
if (doc.getElementsByTagName("filter").item(0) != null)
{
- filterTypeName = doc.getElementsByTagName("filter").item(0).getTextContent().trim();
+ schemaBuilder.setFilterTypeName(doc.getElementsByTagName("filter").item(0).getTextContent().trim());
}
// Create a filter over the query elements.
- Set<String> filteredNamesSet = extractFilteredElementNames(doc);
- DataFilter filter = instantiateFilter(filterTypeName, filteredNamesSet);
+ schemaBuilder.setFilteredElementNames(extractFilteredElementNames(doc));
// Extract the additional fields, if they exists
- HashMap<String,String> additionalFields = new HashMap<String,String>();
+ Map<String,String> additionalFields = new HashMap<>();
if (doc.getElementsByTagName("additional").item(0) != null)
{
NodeList fieldList = doc.getElementsByTagName("field");
@@ -285,13 +252,10 @@
additionalFields.put(getNodeValue("key", kv), getNodeValue("value", kv));
}
}
+ schemaBuilder.setAdditionalFields(additionalFields);
// Create and return the query schema object.
- QuerySchema querySchema = new QuerySchema(schemaName, dataSchemaName, selectorName, filterTypeName, filter, dataElementSize);
- querySchema.getElementNames().addAll(elementNames);
- querySchema.getFilteredElementNames().addAll(filteredNamesSet);
- querySchema.getAdditionalFields().putAll(additionalFields);
- return querySchema;
+ return schemaBuilder.build();
}
/**
@@ -407,24 +371,4 @@
}
return value;
}
-
- /**
- * Instantiate the specified filter.
- *
- * Exceptions derive from call to the {@code getFilter} method of {@link FilterFactory}
- *
- * @param filterTypeName
- * The name of the filter class we are instantiating
- * @param filteredElementNames
- * The set of names of elements of the data schema up which the filter will act.
- * @return An instantiation of the filter, set up to filter upon the specified names.
- * @throws IOException
- * - failed to read input
- * @throws PIRException
- * - File could not be instantiated
- */
- private DataFilter instantiateFilter(String filterTypeName, Set<String> filteredElementNames) throws IOException, PIRException
- {
- return filterTypeName.equals(NO_FILTER) ? null : FilterFactory.getFilter(filterTypeName, filteredElementNames);
- }
}
diff --git a/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java b/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java
index da3ccbe..94e5ba4 100644
--- a/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java
+++ b/src/test/java/org/apache/pirk/schema/query/LoadQuerySchemaTest.java
@@ -19,6 +19,8 @@
package org.apache.pirk.schema.query;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.File;
@@ -27,6 +29,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
@@ -56,17 +59,17 @@
{
private static final Logger logger = LoggerFactory.getLogger(LoadQuerySchemaTest.class);
- private String querySchemaFile = "querySchemaFile";
- private String dataSchemaName = "fakeDataSchema";
- private String querySchemaName = "fakeQuerySchema";
+ private final String querySchemaFile = "querySchemaFile";
+ private final String dataSchemaName = "fakeDataSchema";
+ private final String querySchemaName = "fakeQuerySchema";
- private String element1 = "elementName1";
- private String element2 = "elementName2";
- private String element3 = "elementName3";
- private String element4 = "elementName4";
+ private final String element1 = "elementName1";
+ private final String element2 = "elementName2";
+ private final String element3 = "elementName3";
+ private final String element4 = "elementName4";
- private List<String> queryElements = Arrays.asList(element1, element2, element3);
- private List<String> filterElements = Collections.singletonList(element2);
+ private final List<String> queryElements = Arrays.asList(element1, element2, element3);
+ private final List<String> filterElements = Collections.singletonList(element2);
@Test
public void testGeneralSchemaLoad() throws Exception
@@ -82,26 +85,11 @@
createStopListFile();
// Create the data schema used and force it to load
- try
- {
- createDataSchema("dataSchemaFile");
- } catch (Exception e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
+ createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the query schema used and force it to load
- try
- {
- TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, element4, queryElements, filterElements, StopListFilter.class.getName());
-
- } catch (IOException e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
+ TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, element4, queryElements, filterElements, StopListFilter.class.getName());
QuerySchemaLoader.initialize();
// Check the entries
@@ -112,26 +100,18 @@
assertEquals(element4, qSchema.getSelectorName());
assertEquals(StopListFilter.class.getName(), qSchema.getFilterTypeName());
- if (!(qSchema.getFilter() instanceof StopListFilter))
- {
- fail("Filter class instance must be StopListFilter");
- }
+ assertTrue("Filter class instance must be StopListFilter", qSchema.getFilter() instanceof StopListFilter);
assertEquals(3, qSchema.getElementNames().size());
for (String item : qSchema.getElementNames())
{
- if (!(item.equals(element1) || item.equals(element2) || item.equals(element3)))
- {
- fail("elementNames: item = " + item + " must equal one of: " + element1 + ", " + element2 + ", or " + element3);
- }
+ assertTrue("elementNames: item = " + item + " must equal one of: " + element1 + ", " + element2 + ", or " + element3,
+ item.equals(element1) || item.equals(element2) || item.equals(element3));
}
assertEquals(1, qSchema.getFilteredElementNames().size());
for (String item : qSchema.getFilteredElementNames())
{
- if (!item.equals(element2))
- {
- fail("filterElementNames: item = " + item + " must equal " + element2);
- }
+ assertEquals("filterElementNames: item = " + item + " must equal " + element2, item, element2);
}
// one string, array IPs, array integers
@@ -169,42 +149,24 @@
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the data schema used and force it to load
- try
- {
- createDataSchema("dataSchemaFile");
- } catch (Exception e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
+ createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the additionalFields
- HashMap<String,String> additionalFields = new HashMap<String,String>();
+ HashMap<String,String> additionalFields = new HashMap<>();
additionalFields.put("key1", "value1");
additionalFields.put("key2", "value2");
// Create the query schema used and force it to load
- try
- {
- TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, element4, queryElements, filterElements, null, true, null, false,
- additionalFields);
-
- } catch (IOException e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
+ TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, element4, queryElements, filterElements, null, true, null, false,
+ additionalFields);
QuerySchemaLoader.initialize();
// Check the entries
QuerySchema qSchema = QuerySchemaRegistry.get(querySchemaName);
- if (qSchema == null)
- {
- logger.info("qSchema is null");
- }
+ assertNotNull("qSchema is null", qSchema);
- HashMap<String,String> schemaAdditionalFields = qSchema.getAdditionalFields();
+ Map<String,String> schemaAdditionalFields = qSchema.getAdditionalFields();
assertEquals(schemaAdditionalFields.size(), 2);
assertEquals(schemaAdditionalFields.get("key1"), "value1");
assertEquals(schemaAdditionalFields.get("key2"), "value2");
@@ -235,32 +197,20 @@
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the data schema used and force it to load
- try
- {
- createDataSchema("dataSchemaFile");
- } catch (Exception e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
+ createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the query schema used and force it to load
- try
- {
- TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, "nonExistentElement", queryElements, filterElements, "bogusFilterClass");
+ TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, "nonExistentElement", queryElements, filterElements, "bogusFilterClass");
- } catch (IOException e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
try
{
QuerySchemaLoader.initialize();
fail("QuerySchemaLoader did not throw exception for bogus filter class");
- } catch (Exception ignore)
- {}
+ } catch (PIRException ignore)
+ {
+ // Expected
+ }
// Reset original query and data schema properties
SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
@@ -289,21 +239,15 @@
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the query schema used and force it to load
- try
- {
- TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName + "bogus", element4, queryElements, filterElements, null);
-
- } catch (IOException e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
+ TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName + "bogus", element4, queryElements, filterElements, null);
try
{
QuerySchemaLoader.initialize();
fail("QuerySchemaLoader did not throw exception for non-existent DataSchema");
- } catch (Exception ignore)
- {}
+ } catch (PIRException ignore)
+ {
+ // Expected
+ }
// Reset original query properties and force to load
SystemConfiguration.setProperty("query.schemas", querySchemasProp);
@@ -325,33 +269,21 @@
String querySchemasProp = SystemConfiguration.getProperty("query.schemas", "none");
// Create the data schema used and force it to load
- try
- {
- createDataSchema("dataSchemaFile");
- } catch (Exception e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
+ createDataSchema("dataSchemaFile");
DataSchemaLoader.initialize();
// Create the query schema used and force it to load
- try
- {
- TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, "nonExistentElement", queryElements, filterElements,
- StopListFilter.class.getName());
+ TestUtils.createQuerySchema(querySchemaFile, querySchemaName, dataSchemaName, "nonExistentElement", queryElements, filterElements,
+ StopListFilter.class.getName());
- } catch (IOException e)
- {
- e.printStackTrace();
- fail(e.toString());
- }
try
{
QuerySchemaLoader.initialize();
fail("QuerySchemaLoader did not throw exception for non-existent selectorName");
} catch (Exception ignore)
- {}
+ {
+ // Expected
+ }
// Reset original query and data schema properties
SystemConfiguration.setProperty("data.schemas", dataSchemasProp);
@@ -380,7 +312,7 @@
}
// Create the test data schema file
- private void createDataSchema(String schemaFile) throws IOException
+ private void createDataSchema(String schemaFile) throws Exception
{
// Create a temporary file for the test schema, set in the properties
File file = File.createTempFile(schemaFile, ".xml");
@@ -389,49 +321,42 @@
SystemConfiguration.setProperty("data.schemas", file.toString());
// Write to the file
- try
- {
- DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
- DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
- Document doc = dBuilder.newDocument();
+ DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
+ DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
+ Document doc = dBuilder.newDocument();
- // root element
- Element rootElement = doc.createElement("schema");
- doc.appendChild(rootElement);
+ // root element
+ Element rootElement = doc.createElement("schema");
+ doc.appendChild(rootElement);
- // Add the schemaName
- Element schemaNameElement = doc.createElement("schemaName");
- schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
- rootElement.appendChild(schemaNameElement);
+ // Add the schemaName
+ Element schemaNameElement = doc.createElement("schemaName");
+ schemaNameElement.appendChild(doc.createTextNode(dataSchemaName));
+ rootElement.appendChild(schemaNameElement);
- // Add the elements
- // element1 -- single String
- TestUtils.addElement(doc, rootElement, element1, PrimitiveTypePartitioner.STRING, "false", PrimitiveTypePartitioner.class.getName());
+ // Add the elements
+ // element1 -- single String
+ TestUtils.addElement(doc, rootElement, element1, PrimitiveTypePartitioner.STRING, "false", PrimitiveTypePartitioner.class.getName());
- // element2 - -- array of Integers
- TestUtils.addElement(doc, rootElement, element2, PrimitiveTypePartitioner.INT, "true", PrimitiveTypePartitioner.class.getName());
+ // element2 - -- array of Integers
+ TestUtils.addElement(doc, rootElement, element2, PrimitiveTypePartitioner.INT, "true", PrimitiveTypePartitioner.class.getName());
- // element3 -- array of IP addresses
- TestUtils.addElement(doc, rootElement, element3, PrimitiveTypePartitioner.STRING, "true", IPDataPartitioner.class.getName());
+ // element3 -- array of IP addresses
+ TestUtils.addElement(doc, rootElement, element3, PrimitiveTypePartitioner.STRING, "true", IPDataPartitioner.class.getName());
- // element4 -- single byte type
- TestUtils.addElement(doc, rootElement, element4, PrimitiveTypePartitioner.BYTE, "false", PrimitiveTypePartitioner.class.getName());
+ // element4 -- single byte type
+ TestUtils.addElement(doc, rootElement, element4, PrimitiveTypePartitioner.BYTE, "false", PrimitiveTypePartitioner.class.getName());
- // Write to a xml file
- TransformerFactory transformerFactory = TransformerFactory.newInstance();
- Transformer transformer = transformerFactory.newTransformer();
- DOMSource source = new DOMSource(doc);
- StreamResult result = new StreamResult(file);
- transformer.transform(source, result);
+ // Write to a xml file
+ TransformerFactory transformerFactory = TransformerFactory.newInstance();
+ Transformer transformer = transformerFactory.newTransformer();
+ DOMSource source = new DOMSource(doc);
+ StreamResult result = new StreamResult(file);
+ transformer.transform(source, result);
- // Output for testing
- StreamResult consoleResult = new StreamResult(System.out);
- transformer.transform(source, consoleResult);
- System.out.println();
-
- } catch (Exception e)
- {
- e.printStackTrace();
- }
+ // Output for testing
+ StreamResult consoleResult = new StreamResult(System.out);
+ transformer.transform(source, consoleResult);
+ System.out.println();
}
}
diff --git a/src/test/java/org/apache/pirk/serialization/SerializationTest.java b/src/test/java/org/apache/pirk/serialization/SerializationTest.java
index a44b7a9..3c35949 100644
--- a/src/test/java/org/apache/pirk/serialization/SerializationTest.java
+++ b/src/test/java/org/apache/pirk/serialization/SerializationTest.java
@@ -167,7 +167,7 @@
// Deserialize Querier
Querier deserializedQuerier = service.read(new FileInputStream(fileQuerier), Querier.class);
// Check
- Assert.assertTrue(querier.equals(deserializedQuerier));
+ Assert.assertEquals(querier, deserializedQuerier);
} catch (IOException e)
{
logger.error("File operation error: ", e);