[EAGLE-1081] Checkstyle fixes for eagle-entity-base module

<!--
{% comment %}
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements.  See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License.  You may obtain a copy of the License at

http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
{% endcomment %}
-->

Be sure to do all of the following to help us incorporate your contribution
quickly and easily:

 - [ ] Make sure the PR title is formatted like:
   `[EAGLE-<Jira issue #>] Description of pull request`
 - [ ] Make sure tests pass via `mvn clean verify`. (Even better, enable
       Travis-CI on your fork and ensure the whole test matrix passes).
 - [ ] Replace `<Jira issue #>` in the title with the actual Jira issue
       number, if there is one.
 - [ ] If this contribution is large, please file an Apache
       [Individual Contributor License Agreement](https://www.apache.org/licenses/icla.txt).

---

Author: Colm O hEigeartaigh <coheigea@apache.org>

Closes #985 from coheigea/EAGLE-1081.
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java
index 17b3fdb..14245df 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityContext.java
@@ -20,21 +20,21 @@
 import java.util.Map;
 
 public class EntityContext {
-	private Map<String, Object> context;
+    private Map<String, Object> context;
 
-	public Map<String, Object> getContext() {
-		return context;
-	}
-	
-	public EntityContext() {
-		this.context = new HashMap<>();
-	}
-	
-	protected EntityContext(EntityContext context) {
-		this.context = new HashMap<>(context.context);
-	}
-	
-	public EntityContext cloneEntity() {
-		return new EntityContext(this);
-	}
+    public Map<String, Object> getContext() {
+        return context;
+    }
+
+    public EntityContext() {
+        this.context = new HashMap<>();
+    }
+
+    protected EntityContext(EntityContext context) {
+        this.context = new HashMap<>(context.context);
+    }
+
+    public EntityContext cloneEntity() {
+        return new EntityContext(this);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java
index fb86fa6..c291528 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/EntityJsonModule.java
@@ -21,7 +21,7 @@
 import java.util.Map;

 

 public class EntityJsonModule extends SimpleModule {

-    public EntityJsonModule(){

-        addSerializer(Map.Entry.class,new MapEntrySerializer());

+    public EntityJsonModule() {

+        addSerializer(Map.Entry.class, new MapEntrySerializer());

     }

-}
\ No newline at end of file
+}

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java
index 4cebbf6..8a80cf0 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/MapEntrySerializer.java
@@ -29,10 +29,11 @@
     private static final String VALUE_FIELD = "value";

 

     @Override

-    public void serialize(Map.Entry entry, JsonGenerator jsonGenerator, SerializerProvider serializerProvider) throws IOException, JsonProcessingException {

+    public void serialize(Map.Entry entry, JsonGenerator jsonGenerator, SerializerProvider serializerProvider)

+        throws IOException, JsonProcessingException {

         jsonGenerator.writeStartObject();

-        jsonGenerator.writeObjectField(KEY_FIELD,entry.getKey());

-        jsonGenerator.writeObjectField(VALUE_FIELD,entry.getValue());

+        jsonGenerator.writeObjectField(KEY_FIELD, entry.getKey());

+        jsonGenerator.writeObjectField(VALUE_FIELD, entry.getValue());

         jsonGenerator.writeEndObject();

     }

 }

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java
index 3304bea..658c20a 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/NoSuchRowException.java
@@ -16,14 +16,14 @@
  */
 package org.apache.eagle.log.base.taggedlog;
 
-public class NoSuchRowException extends RuntimeException{
-	static final long serialVersionUID = -4538233994503905943L;
+public class NoSuchRowException extends RuntimeException {
+    static final long serialVersionUID = -4538233994503905943L;
 
-	public NoSuchRowException(){
-		super();
-	}
-	
-	public NoSuchRowException(String s){
-		super(s);
-	}
+    public NoSuchRowException() {
+        super();
+    }
+
+    public NoSuchRowException(String s) {
+        super(s);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java
index d72c35a..3c4166c 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/RowkeyAPIEntity.java
@@ -25,57 +25,72 @@
 
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
-@XmlType(propOrder = {"success", "exception", "prefixHashCode", "timestamp", "humanTime", "tagNameHashValueHashMap", "fieldNameValueMap"})
+@XmlType(propOrder = {
+                      "success", "exception", "prefixHashCode", "timestamp", "humanTime",
+                      "tagNameHashValueHashMap", "fieldNameValueMap"
+    })
 public class RowkeyAPIEntity {
-	boolean success;
-	String exception;
-	int prefixHashCode;
-	long timestamp;
-	String humanTime;
-	Map<Integer, Integer> tagNameHashValueHashMap;
-	Map<String, String> fieldNameValueMap;
-	
-	public boolean isSuccess() {
-		return success;
-	}
-	public void setSuccess(boolean success) {
-		this.success = success;
-	}
-	public String getException() {
-		return exception;
-	}
-	public void setException(String exception) {
-		this.exception = exception;
-	}
-	public String getHumanTime() {
-		return humanTime;
-	}
-	public void setHumanTime(String humanTime) {
-		this.humanTime = humanTime;
-	}
-	public int getPrefixHashCode() {
-		return prefixHashCode;
-	}
-	public void setPrefixHashCode(int prefixHashcode) {
-		this.prefixHashCode = prefixHashcode;
-	}
-	public long getTimestamp() {
-		return timestamp;
-	}
-	public void setTimestamp(long timestamp) {
-		this.timestamp = timestamp;
-	}
-	public Map<Integer, Integer> getTagNameHashValueHashMap() {
-		return tagNameHashValueHashMap;
-	}
-	public void setTagNameHashValueHashMap(
-			Map<Integer, Integer> tagNameHashValueHashMap) {
-		this.tagNameHashValueHashMap = tagNameHashValueHashMap;
-	}
-	public Map<String, String> getFieldNameValueMap() {
-		return fieldNameValueMap;
-	}
-	public void setFieldNameValueMap(Map<String, String> fieldNameValueMap) {
-		this.fieldNameValueMap = fieldNameValueMap;
-	}
+    boolean success;
+    String exception;
+    int prefixHashCode;
+    long timestamp;
+    String humanTime;
+    Map<Integer, Integer> tagNameHashValueHashMap;
+    Map<String, String> fieldNameValueMap;
+
+    public boolean isSuccess() {
+        return success;
+    }
+
+    public void setSuccess(boolean success) {
+        this.success = success;
+    }
+
+    public String getException() {
+        return exception;
+    }
+
+    public void setException(String exception) {
+        this.exception = exception;
+    }
+
+    public String getHumanTime() {
+        return humanTime;
+    }
+
+    public void setHumanTime(String humanTime) {
+        this.humanTime = humanTime;
+    }
+
+    public int getPrefixHashCode() {
+        return prefixHashCode;
+    }
+
+    public void setPrefixHashCode(int prefixHashcode) {
+        this.prefixHashCode = prefixHashcode;
+    }
+
+    public long getTimestamp() {
+        return timestamp;
+    }
+
+    public void setTimestamp(long timestamp) {
+        this.timestamp = timestamp;
+    }
+
+    public Map<Integer, Integer> getTagNameHashValueHashMap() {
+        return tagNameHashValueHashMap;
+    }
+
+    public void setTagNameHashValueHashMap(Map<Integer, Integer> tagNameHashValueHashMap) {
+        this.tagNameHashValueHashMap = tagNameHashValueHashMap;
+    }
+
+    public Map<String, String> getFieldNameValueMap() {
+        return fieldNameValueMap;
+    }
+
+    public void setFieldNameValueMap(Map<String, String> fieldNameValueMap) {
+        this.fieldNameValueMap = fieldNameValueMap;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java
index b396b06..8e6d314 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogAPIEntity.java
@@ -42,10 +42,8 @@
 import java.util.Set;
 
 /**
- * rowkey: prefix + timestamp + tagNameValues
- * as of now, all tags will be persisted as a column in hbase table
- * tag name is column qualifier name
- * tag value is column value.
+ * rowkey: prefix + timestamp + tagNameValues as of now, all tags will be persisted as a column in hbase table
+ * tag name is column qualifier name tag value is column value.
  */
 @JsonFilter(TaggedLogAPIEntity.PropertyBeanFilterName)
 public class TaggedLogAPIEntity implements PropertyChangeListener, Serializable {
@@ -63,17 +61,14 @@
     }
 
     /**
-     * Extra dynamic attributes.
-     * TODO: can we move exp, serializeAlias, serializeVerbose to a wrapper class?
+     * Extra dynamic attributes. TODO: can we move exp, serializeAlias, serializeVerbose to a wrapper class?
      */
     private Map<String, Object> exp;
 
     private String encodedRowkey;
     // track what qualifiers are changed
     private Set<String> modifiedProperties = new HashSet<String>();
-    protected PropertyChangeSupport pcs
-            = new PropertyChangeSupport(this);
-
+    protected PropertyChangeSupport pcs = new PropertyChangeSupport(this);
 
     public Map<String, String> getSerializeAlias() {
         return serializeAlias;
@@ -135,6 +130,7 @@
         pcs.firePropertyChange(fieldModified, null, null);
     }
 
+    @Override
     public void propertyChange(PropertyChangeEvent evt) {
         modifiedProperties.add(evt.getPropertyName());
     }
@@ -143,6 +139,7 @@
         return this.modifiedProperties;
     }
 
+    @Override
     public String toString() {
         StringBuffer sb = new StringBuffer();
         sb.append("prefix:");
@@ -189,22 +186,26 @@
         };
 
         @Override
-        public void serializeAsField(Object pojo, JsonGenerator jgen, SerializerProvider provider, PropertyWriter writer) throws Exception {
+        public void serializeAsField(Object pojo, JsonGenerator jgen, SerializerProvider provider,
+                                     PropertyWriter writer)
+            throws Exception {
             if (pojo instanceof TaggedLogAPIEntity) {
-                TaggedLogAPIEntity entity = (TaggedLogAPIEntity) pojo;
+                TaggedLogAPIEntity entity = (TaggedLogAPIEntity)pojo;
                 Set<String> modified = entity.modifiedQualifiers();
                 Set<String> basePropertyNames = getPropertyNames();
                 String writerName = writer.getName();
                 if (modified.contains(writerName) || basePropertyNames.contains(writerName)) {
                     if ((!entity.isSerializeVerbose() && verboseFields.contains(writerName))
-                            || (timestamp.equals(writerName) && !EntityDefinitionManager.isTimeSeries(entity.getClass()))) {
+                        || (timestamp.equals(writerName)
+                            && !EntityDefinitionManager.isTimeSeries(entity.getClass()))) {
                         // log skip
                         if (LOG.isDebugEnabled()) {
                             LOG.debug("skip field");
                         }
                     } else {
                         // if serializeAlias is not null and exp is not null
-                        if (exp.equals(writerName) && entity.getSerializeAlias() != null && entity.getExp() != null) {
+                        if (exp.equals(writerName) && entity.getSerializeAlias() != null
+                            && entity.getExp() != null) {
                             Map<String, Object> _exp = new HashMap<String, Object>();
                             for (Map.Entry<String, Object> entry : entity.getExp().entrySet()) {
                                 String alias = entity.getSerializeAlias().get(entry.getKey());
@@ -248,4 +249,4 @@
         mapper.setFilters(TaggedLogAPIEntity.getFilterProvider());
         return mapper;
     }
-}
\ No newline at end of file
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java
index 1df1c0d..1b712c4 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/base/taggedlog/TaggedLogObjectMapper.java
@@ -19,17 +19,20 @@
 import java.util.Map;
 
 public interface TaggedLogObjectMapper {
-	/**
-	 * when read, business logic should convert schema-less key/value into business object based on its own schema
-	 * @param entity
-	 * @param qualifierValues
-	 */
-	public void populateQualifierValues(TaggedLogAPIEntity entity, Map<String, byte[]> qualifierValues);
-	
-	/**
-	 * when write, business logic should convert business object to schema-less key value
-	 * @param entity
-	 * @return
-	 */
-	public Map<String, byte[]> createQualifierValues(TaggedLogAPIEntity entity);	
+    /**
+     * when read, business logic should convert schema-less key/value into business object based on its own
+     * schema
+     * 
+     * @param entity
+     * @param qualifierValues
+     */
+    public void populateQualifierValues(TaggedLogAPIEntity entity, Map<String, byte[]> qualifierValues);
+
+    /**
+     * when write, business logic should convert business object to schema-less key value
+     * 
+     * @param entity
+     * @return
+     */
+    public Map<String, byte[]> createQualifierValues(TaggedLogAPIEntity entity);
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java
index 916706f..3823d62 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/AbstractHBaseLogReader.java
@@ -33,208 +33,213 @@
 import java.util.Map;
 
 /**
- * HBase Log Reader basic initialization:
+ * HBase Log Reader basic initialization.
  * <ol>
- *   <li>Open HBase connection to target HBase table</li>
- *   <li>Generate HBase filter,start and stop row key, output qualifier and Scan </li>
- *   <li><code>onOpen(HTableInterface,Scan)</code>: Callback abstract method </li>
- *   <li><code>close</code>: Close HBase connection</li>
+ * <li>Open HBase connection to target HBase table</li>
+ * <li>Generate HBase filter,start and stop row key, output qualifier and Scan</li>
+ * <li><code>onOpen(HTableInterface,Scan)</code>: Callback abstract method</li>
+ * <li><code>close</code>: Close HBase connection</li>
  * </ol>
  *
  * @param <T> Reader entity class type
- *
  */
 public abstract class AbstractHBaseLogReader<T> implements LogReader<T> {
-	private static Logger LOG = LoggerFactory.getLogger(AbstractHBaseLogReader.class);
+    private static Logger LOG = LoggerFactory.getLogger(AbstractHBaseLogReader.class);
 
-	protected byte[][] qualifiers;
-	private HTableInterface tbl;
-	private byte[] startKey;
-	private byte[] stopKey;
-	protected Map<String, List<String>> searchTags;
-	private Filter filter;
-	private Date startTime;
-	private Date endTime;
+    protected byte[][] qualifiers;
+    private HTableInterface tbl;
+    private byte[] startKey;
+    private byte[] stopKey;
+    protected Map<String, List<String>> searchTags;
+    private Filter filter;
+    private Date startTime;
+    private Date endTime;
 
-//	protected ResultScanner rs;
-	private boolean isOpen = false;
+    // protected ResultScanner rs;
+    private boolean isOpen = false;
 
-	/**
-	 * TODO it's ugly that both _ed and prefix fields can hold prefix information,
-	 * prefix field should be in precedence over _ed
-	 */
-	private String _prefix;
-	protected EntityDefinition _ed;
+    /**
+     * TODO it's ugly that both ed and prefix fields can hold prefix information, prefix field should be in
+     * precedence over ed.
+     */
+    private String prefix;
+    protected EntityDefinition ed;
 
-	public AbstractHBaseLogReader(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime,
-	                              Filter filter, String lastScanKey, byte[][] outputQualifiers){
-		this(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, null);
-	}
-	/**
-	 * This constructor supports partition.
-	 *
-	 * @param ed entity definition
-	 * @param partitions partition values, which is sorted in partition definition order. TODO: in future we need to support
-	 * multiple values for one partition field
-	 * @param startTime start time of the query
-	 * @param endTime end time of the query
-	 * @param filter filter for the hbase scan
-	 * @param lastScanKey the key of last scan
-	 * @param outputQualifiers the bytes of output qualifier names
-	 * @param prefix can be populated from outside world specifically for generic metric reader
-	 */
-	public AbstractHBaseLogReader(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime,
-	                              Filter filter, String lastScanKey, byte[][] outputQualifiers, String prefix){
-		this.startTime = startTime;
-		this.endTime = endTime;
-		this._ed = ed;
-		if (_ed.getPartitions() != null) {
-			if (partitions == null || _ed.getPartitions().length != partitions.size()) {
-				throw new IllegalArgumentException("Invalid argument. Entity " + ed.getClass().getSimpleName() + " defined "
-						+ "partitions, but argument partitions is null or number of partition values are different!");
-			}
-		}
-		/**
-		 * decide prefix field value
-		 */
-		if(prefix == null || prefix.isEmpty()){
-			this._prefix = _ed.getPrefix();
-		}else{
-			this._prefix = prefix;
-		}
-		this.qualifiers = outputQualifiers;
-		this.filter = filter;
+    public AbstractHBaseLogReader(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime,
+                                  Filter filter, String lastScanKey, byte[][] outputQualifiers) {
+        this(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, null);
+    }
 
-		this.startKey = buildRowKey(this._prefix, partitions, startTime);
-		
-		
-		/**
-		 * startTime should be inclusive, -128 is max value for hbase Bytes comparison, see PureJavaComparer.compareTo
-		 * as an alternative, we can use startTime-1000 and endTime-1000 to make sure startTime is inclusive and endTime is exclusive
-		 */
-		this.startKey = ByteUtil.concat(this.startKey, new byte[] {-1, -1,-1,-1});
-		if (lastScanKey == null) {
-			this.stopKey = buildRowKey(this._prefix, partitions, endTime);
-			// endTime should be exclusive
-			this.stopKey = ByteUtil.concat(this.stopKey, new byte[] {-1,-1,-1,-1,-1});
-		} else {
-			// build stop key
-			this.stopKey = EagleBase64Wrapper.decode(lastScanKey);
-			// TODO to-be-fixed, probably it's an issue because contacting 1 is not
-			// enough for lexicographical sorting
-			this.stopKey = ByteUtil.concat(this.stopKey, new byte[] { 1 });
-		}
-	}
-	
-	/**
-	 * TODO If the required field is null for a row, then this row will not be fetched. That could be a problem for counting
-	 * Need another version of read to strictly get the number of rows which will return all the columns for a column family
-	 */
-	@Override
-	public void open() throws IOException {
-		if (isOpen)
-			return; // silently return
-		try {
-			tbl = EagleConfigFactory.load().getHTable(_ed.getTable());
-		} catch (RuntimeException ex) {
-			throw new IOException(ex);
-		}
+    /**
+     * This constructor supports partition.
+     *
+     * @param ed entity definition
+     * @param partitions partition values, which is sorted in partition definition order. TODO: in future we
+     *            need to support multiple values for one partition field
+     * @param startTime start time of the query
+     * @param endTime end time of the query
+     * @param filter filter for the hbase scan
+     * @param lastScanKey the key of last scan
+     * @param outputQualifiers the bytes of output qualifier names
+     * @param prefix can be populated from outside world specifically for generic metric reader
+     */
+    public AbstractHBaseLogReader(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime,
+                                  Filter filter, String lastScanKey, byte[][] outputQualifiers,
+                                  String prefix) {
+        this.startTime = startTime;
+        this.endTime = endTime;
+        this.ed = ed;
+        if (ed.getPartitions() != null) {
+            if (partitions == null || ed.getPartitions().length != partitions.size()) {
+                throw new IllegalArgumentException("Invalid argument. Entity " + ed.getClass().getSimpleName()
+                                                   + " defined "
+                                                   + "partitions, but argument partitions is null or number of partition values are different!");
+            }
+        }
+        /**
+         * decide prefix field value
+         */
+        if (prefix == null || prefix.isEmpty()) {
+            this.prefix = ed.getPrefix();
+        } else {
+            this.prefix = prefix;
+        }
+        this.qualifiers = outputQualifiers;
+        this.filter = filter;
 
-		Scan s1 = new Scan();
-		// reverse timestamp, startRow is stopKey, and stopRow is startKey
-		s1.setStartRow(stopKey);
-		s1.setStopRow(startKey);
-		s1.setFilter(filter);
-		// TODO the # of cached rows should be minimum of (pagesize and 100)
-		int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize();
-		s1.setCaching(cs);
-		// TODO not optimized for all applications
-		s1.setCacheBlocks(true)
-		;
-		// scan specified columnfamily and qualifiers
-		if(this.qualifiers == null) {
-			// Filter all
-			s1.addFamily(_ed.getColumnFamily().getBytes());
-		}else{
-			for (byte[] qualifier : qualifiers) {
-				s1.addColumn(_ed.getColumnFamily().getBytes(), qualifier);
-			}
-		}
-		// TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation should use SingleColumnValueExcludeFilter, 
-		// but it's complicated in current implementation. 
-		workaroundHBASE2198(s1, filter);
-		if (LOG.isDebugEnabled()) {
-			LOG.debug(s1.toString());
-		}
-//		rs = tbl.getScanner(s1);
-		this.onOpen(tbl,s1);
-		isOpen = true;
-	}
+        this.startKey = buildRowKey(this.prefix, partitions, startTime);
 
-	/**
-	 * HBase table connection callback function
-	 *
-	 * @param tbl   HBase table connection
-	 * @param scan  HBase scan
-	 * @throws IOException
-	 */
-	protected abstract void onOpen(HTableInterface tbl,Scan scan) throws IOException;
+        /**
+         * startTime should be inclusive, -128 is max value for hbase Bytes comparison, see
+         * PureJavaComparer.compareTo as an alternative, we can use startTime-1000 and endTime-1000 to make
+         * sure startTime is inclusive and endTime is exclusive
+         */
+        this.startKey = ByteUtil.concat(this.startKey, new byte[] { -1, -1, -1, -1 });
+        if (lastScanKey == null) {
+            this.stopKey = buildRowKey(this.prefix, partitions, endTime);
+            // endTime should be exclusive
+            this.stopKey = ByteUtil.concat(this.stopKey, new byte[] { -1, -1, -1, -1, -1 });
+        } else {
+            // build stop key
+            this.stopKey = EagleBase64Wrapper.decode(lastScanKey);
+            // TODO to-be-fixed, probably it's an issue because contacting 1 is not
+            // enough for lexicographical sorting
+            this.stopKey = ByteUtil.concat(this.stopKey, new byte[] { 1 });
+        }
+    }
 
-	/**
-	 * <h2>History</h2>
-	 * <ul>
-	 * 	<li><b>Nov 19th, 2014</b>: Fix for out put all qualifiers</li>
-	 * </ul>
-	 * @param s1
-	 * @param filter
-	 */
-	protected void workaroundHBASE2198(Scan s1, Filter filter) {
-		if (filter instanceof SingleColumnValueFilter) {
-			if(this.qualifiers == null){
-				s1.addFamily(((SingleColumnValueFilter) filter).getFamily());
-			}else {
-				s1.addColumn(((SingleColumnValueFilter) filter).getFamily(), ((SingleColumnValueFilter) filter).getQualifier());
-			}
-			return;
-		}
-		if (filter instanceof FilterList) {
-			for (Filter f : ((FilterList)filter).getFilters()) {
-				workaroundHBASE2198(s1, f);
-			}
-		}
-	}
+    /**
+     * TODO If the required field is null for a row, then this row will not be fetched. That could be a
+     * problem for counting Need another version of read to strictly get the number of rows which will return
+     * all the columns for a column family
+     */
+    @Override
+    public void open() throws IOException {
+        if (isOpen) {
+            return; // silently return
+        }
+        try {
+            tbl = EagleConfigFactory.load().getHTable(ed.getTable());
+        } catch (RuntimeException ex) {
+            throw new IOException(ex);
+        }
 
-	/**
-	 * <h2>Close:</h2>
-	 * 1. release current table connection
-	 *
-	 * @throws IOException
-	 */
-	@Override
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-//		if(rs != null){
-//			rs.close();
-//		}
-	}
+        Scan s1 = new Scan();
+        // reverse timestamp, startRow is stopKey, and stopRow is startKey
+        s1.setStartRow(stopKey);
+        s1.setStopRow(startKey);
+        s1.setFilter(filter);
+        // TODO the # of cached rows should be minimum of (pagesize and 100)
+        int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize();
+        s1.setCaching(cs);
+        // TODO not optimized for all applications
+        s1.setCacheBlocks(true);
+        // scan specified columnfamily and qualifiers
+        if (this.qualifiers == null) {
+            // Filter all
+            s1.addFamily(ed.getColumnFamily().getBytes());
+        } else {
+            for (byte[] qualifier : qualifiers) {
+                s1.addColumn(ed.getColumnFamily().getBytes(), qualifier);
+            }
+        }
+        // TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation
+        // should use SingleColumnValueExcludeFilter,
+        // but it's complicated in current implementation.
+        workaroundHBASE2198(s1, filter);
+        if (LOG.isDebugEnabled()) {
+            LOG.debug(s1.toString());
+        }
+        // rs = tbl.getScanner(s1);
+        this.onOpen(tbl, s1);
+        isOpen = true;
+    }
 
-	private static byte[] buildRowKey(String prefix, List<String> partitions, Date t){
-		final int length = (partitions == null) ? (4 + 8) : (4 + 8 + partitions.size() * 4);
-		final byte[] key = new byte[length];
-		int offset = 0;
-		ByteUtil.intToBytes(prefix.hashCode(), key, offset);
-		offset += 4;
-		if (partitions != null) {
-			for (String partition : partitions) {
-				ByteUtil.intToBytes(partition.hashCode(), key, offset);
-				offset += 4;
-			}
-		}
-		// reverse timestamp
-		long ts = Long.MAX_VALUE - t.getTime();
-		ByteUtil.longToBytes(ts, key, offset);
-		return key;
-	}
+    /**
+     * HBase table connection callback function.
+     *
+     * @param tbl HBase table connection
+     * @param scan HBase scan
+     * @throws IOException
+     */
+    protected abstract void onOpen(HTableInterface tbl, Scan scan) throws IOException;
+
+    /**
+     * <h2>History</h2>.
+     * <ul>
+     * <li><b>Nov 19th, 2014</b>: Fix for out put all qualifiers</li>
+     * </ul>
+     *
+     * @param s1
+     * @param filter
+     */
+    protected void workaroundHBASE2198(Scan s1, Filter filter) {
+        if (filter instanceof SingleColumnValueFilter) {
+            if (this.qualifiers == null) {
+                s1.addFamily(((SingleColumnValueFilter)filter).getFamily());
+            } else {
+                s1.addColumn(((SingleColumnValueFilter)filter).getFamily(),
+                             ((SingleColumnValueFilter)filter).getQualifier());
+            }
+            return;
+        }
+        if (filter instanceof FilterList) {
+            for (Filter f : ((FilterList)filter).getFilters()) {
+                workaroundHBASE2198(s1, f);
+            }
+        }
+    }
+
+    /**
+     * <h2>Close:</h2> 1. release current table connection
+     *
+     * @throws IOException
+     */
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+        // if(rs != null){
+        // rs.close();
+        // }
+    }
+
+    private static byte[] buildRowKey(String prefix, List<String> partitions, Date t) {
+        final int length = (partitions == null) ? (4 + 8) : (4 + 8 + partitions.size() * 4);
+        final byte[] key = new byte[length];
+        int offset = 0;
+        ByteUtil.intToBytes(prefix.hashCode(), key, offset);
+        offset += 4;
+        if (partitions != null) {
+            for (String partition : partitions) {
+                ByteUtil.intToBytes(partition.hashCode(), key, offset);
+                offset += 4;
+            }
+        }
+        // reverse timestamp
+        long ts = Long.MAX_VALUE - t.getTime();
+        ByteUtil.longToBytes(ts, key, offset);
+        return key;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java
index 71253da..f7de525 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/BaseEntityRepository.java
@@ -18,9 +18,9 @@
 
 import org.apache.eagle.log.entity.repo.EntityRepository;
 
-public class BaseEntityRepository  extends EntityRepository {
+public class BaseEntityRepository extends EntityRepository {
 
-	public BaseEntityRepository() {
-		entitySet.add(GenericMetricEntity.class);
-	}
+    public BaseEntityRepository() {
+        entitySet.add(GenericMetricEntity.class);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java
index 4ad8959..37b163c 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityCreationListener.java
@@ -19,8 +19,8 @@
 import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
 
 /**
- * Interface to notify creation event of an entity 
+ * Interface to notify creation event of an entity
  */
 public interface EntityCreationListener {
-	public void entityCreated(TaggedLogAPIEntity entity) throws Exception;
+    public void entityCreated(TaggedLogAPIEntity entity) throws Exception;
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java
index 6e5cb5c..4747760 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityQualifierUtils.java
@@ -34,248 +34,274 @@
 import java.util.regex.Pattern;
 
 public class EntityQualifierUtils {
-	private final static Logger LOG = LoggerFactory.getLogger(EntityQualifierUtils.class);
+    private static final Logger LOG = LoggerFactory.getLogger(EntityQualifierUtils.class);
 
-	public static Map<String,Object> keyValuesToMap(List<KeyValue> row,EntityDefinition ed){
-		Map<String,Object> result = new HashMap<String,Object>();
-		for(KeyValue kv:row){
-			String qualifierName = new String(kv.getQualifier());
-			if(!ed.isTag(qualifierName)){
-				Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName);
-				if(qualifier == null){
-					qualifier = ed.getQualifierNameMap().get(qualifierName);
-				}
-				qualifierName = qualifier.getDisplayName();
-				Object value = qualifier.getSerDeser().deserialize(kv.getValue());
-				result.put(qualifierName,value);
-			}else{
-				result.put(qualifierName,new String(kv.getValue()));
-			}
-		}
-		return result;
-	}
+    public static Map<String, Object> keyValuesToMap(List<KeyValue> row, EntityDefinition ed) {
+        Map<String, Object> result = new HashMap<String, Object>();
+        for (KeyValue kv : row) {
+            String qualifierName = new String(kv.getQualifier());
+            if (!ed.isTag(qualifierName)) {
+                Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName);
+                if (qualifier == null) {
+                    qualifier = ed.getQualifierNameMap().get(qualifierName);
+                }
+                qualifierName = qualifier.getDisplayName();
+                Object value = qualifier.getSerDeser().deserialize(kv.getValue());
+                result.put(qualifierName, value);
+            } else {
+                result.put(qualifierName, new String(kv.getValue()));
+            }
+        }
+        return result;
+    }
 
-	public static Map<String,Double> keyValuesToDoubleMap(List<KeyValue> row,EntityDefinition ed){
-		Map<String,Double> result = new HashMap<String,Double>();
-		for(KeyValue kv:row){
-			String qualifierName = new String(kv.getQualifier());
-			if(!ed.isTag(qualifierName)){
-				Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName);
-				if(qualifier == null){
-					qualifier = ed.getQualifierNameMap().get(qualifierName);
-				}
-				qualifierName = qualifier.getDisplayName();
-				Object value = qualifier.getSerDeser().deserialize(kv.getValue());
-				result.put(qualifierName,convertObjToDouble(value));
-			}else{
-				result.put(qualifierName,Double.NaN);
-			}
-		}
-		return result;
-	}
+    public static Map<String, Double> keyValuesToDoubleMap(List<KeyValue> row, EntityDefinition ed) {
+        Map<String, Double> result = new HashMap<String, Double>();
+        for (KeyValue kv : row) {
+            String qualifierName = new String(kv.getQualifier());
+            if (!ed.isTag(qualifierName)) {
+                Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName);
+                if (qualifier == null) {
+                    qualifier = ed.getQualifierNameMap().get(qualifierName);
+                }
+                qualifierName = qualifier.getDisplayName();
+                Object value = qualifier.getSerDeser().deserialize(kv.getValue());
+                result.put(qualifierName, convertObjToDouble(value));
+            } else {
+                result.put(qualifierName, Double.NaN);
+            }
+        }
+        return result;
+    }
 
-	/**
-	 * Map[Display Name,Double Value]
-	 *
-	 * @param map
-	 * @param ed
-	 * @return
-	 */
-	public static Map<String,Double> bytesMapToDoubleMap(Map<String,byte[]> map,EntityDefinition ed){
-		Map<String,Double> result = new HashMap<String,Double>();
-		for(Map.Entry<String,byte[]> entry:map.entrySet()){
-			String qualifierName = entry.getKey();
-			Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName);
-			if(qualifier == null) qualifier = ed.getQualifierNameMap().get(qualifierName);
-			if(qualifier!=null && entry.getValue()!=null) {
-				qualifierName = qualifier.getDisplayName();
-				Object value = qualifier.getSerDeser().deserialize(entry.getValue());
-				result.put(qualifierName, convertObjToDouble(value));
-			}else{
-				result.put(qualifierName,null);
-			}
-		}
-		return result;
-	}
+    /**
+     * Map[Display Name,Double Value]
+     *
+     * @param map
+     * @param ed
+     * @return
+     */
+    public static Map<String, Double> bytesMapToDoubleMap(Map<String, byte[]> map, EntityDefinition ed) {
+        Map<String, Double> result = new HashMap<String, Double>();
+        for (Map.Entry<String, byte[]> entry : map.entrySet()) {
+            String qualifierName = entry.getKey();
+            Qualifier qualifier = ed.getDisplayNameMap().get(qualifierName);
+            if (qualifier == null) {
+                qualifier = ed.getQualifierNameMap().get(qualifierName);
+            }
+            if (qualifier != null && entry.getValue() != null) {
+                qualifierName = qualifier.getDisplayName();
+                Object value = qualifier.getSerDeser().deserialize(entry.getValue());
+                result.put(qualifierName, convertObjToDouble(value));
+            } else {
+                result.put(qualifierName, null);
+            }
+        }
+        return result;
+    }
 
-	public static byte[] toBytes(EntityDefinition ed, String qualifierName, String qualifierValueInStr){
-		// Get field type from entity class
-		// and skip for not-found fields query expression
-		Object typedValue = null;
-		EntitySerDeser serDeser = null;
-		if(ed.isTag(qualifierName)){
-			typedValue = qualifierValueInStr;
-			serDeser = EntityDefinitionManager.getSerDeser(String.class);
-		}else{
-			try{
-				Field field = ed.getEntityClass().getDeclaredField(qualifierName);
-				Class<?> fieldType = field.getType();
-				serDeser =  EntityDefinitionManager.getSerDeser(fieldType);
-				if(serDeser == null){
-					throw new IllegalArgumentException("Can't find EntitySerDeser for field: "+ qualifierName +"'s type: "+fieldType
-							+", so the field is not supported to be filtered yet");
-				}
-				typedValue = convertStringToObject(qualifierValueInStr, fieldType);
-			} catch (NoSuchFieldException ex) {
-				// Handle the field not found exception in caller
-				LOG.error("Field " + qualifierName + " not found in " + ed.getEntityClass());
-				throw new IllegalArgumentException("Field "+qualifierName+" not found in "+ed.getEntityClass(),ex);
-			}
-		}
-		return serDeser.serialize(typedValue);
-	}
+    public static byte[] toBytes(EntityDefinition ed, String qualifierName, String qualifierValueInStr) {
+        // Get field type from entity class
+        // and skip for not-found fields query expression
+        Object typedValue = null;
+        EntitySerDeser serDeser = null;
+        if (ed.isTag(qualifierName)) {
+            typedValue = qualifierValueInStr;
+            serDeser = EntityDefinitionManager.getSerDeser(String.class);
+        } else {
+            try {
+                Field field = ed.getEntityClass().getDeclaredField(qualifierName);
+                Class<?> fieldType = field.getType();
+                serDeser = EntityDefinitionManager.getSerDeser(fieldType);
+                if (serDeser == null) {
+                    throw new IllegalArgumentException("Can't find EntitySerDeser for field: " + qualifierName
+                                                       + "'s type: " + fieldType
+                                                       + ", so the field is not supported to be filtered yet");
+                }
+                typedValue = convertStringToObject(qualifierValueInStr, fieldType);
+            } catch (NoSuchFieldException ex) {
+                // Handle the field not found exception in caller
+                LOG.error("Field " + qualifierName + " not found in " + ed.getEntityClass());
+                throw new IllegalArgumentException("Field " + qualifierName + " not found in "
+                                                   + ed.getEntityClass(), ex);
+            }
+        }
+        return serDeser.serialize(typedValue);
+    }
 
-	public static Class<?> getType(EntityDefinition ed, String qualifierName) {
-		Field field;
-		try {
-			field = ed.getEntityClass().getDeclaredField(qualifierName);
-		} catch (NoSuchFieldException e) {
-			if(LOG.isDebugEnabled()) LOG.debug("Field "+qualifierName+" not found in "+ed.getEntityClass());
-			return null;
-		}
-		return field.getType();
-	}
+    public static Class<?> getType(EntityDefinition ed, String qualifierName) {
+        Field field;
+        try {
+            field = ed.getEntityClass().getDeclaredField(qualifierName);
+        } catch (NoSuchFieldException e) {
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Field " + qualifierName + " not found in " + ed.getEntityClass());
+            }
+            return null;
+        }
+        return field.getType();
+    }
 
-	/**
-	 * Not support negative numeric value:
-	 * - http://en.wikipedia.org/wiki/Double-precision_floating-point_format
-	 *
-	 * @param value
-	 * @param type
-	 * @return
-	 */
-	public static Object convertStringToObject(String value, Class<?> type){
-		Object obj = null;
-		try{
-			if(String.class.equals(type)){
-				obj =  value;
-			}if(Long.class.equals(type) || long.class.equals(type)){
-				obj = Long.parseLong(value);
-				// if((Long) obj < 0) throw new IllegalArgumentException("Don't support negative Long yet: "+obj);
-			}else if(Integer.class.equals(type) || int.class.equals(type)){
-				obj = Integer.parseInt(value);
-				// if((Integer) obj < 0) throw new IllegalArgumentException("Don't support negative Integer yet: "+obj);
-			}else if(Double.class.equals(type) || double.class.equals(type)){
-				obj = Double.parseDouble(value);
-				// if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Double yet: "+obj);
-			}else if(Float.class.equals(type) || float.class.equals(type)){
-				obj = Float.parseFloat(value);
-				// if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Float yet: "+obj);
-			}else if(Boolean.class.equals(type) || boolean.class.equals(type)) {
-				obj = Boolean.valueOf(value);
-			}
-			if(obj != null) return obj;
-		}catch (NumberFormatException ex){
-			throw new IllegalArgumentException("Fail to convert string: "+value +" into type of "+type,ex);
-		}
+    /**
+     * Not support negative numeric value: -
+     * http://en.wikipedia.org/wiki/Double-precision_floating-point_format
+     *
+     * @param value
+     * @param type
+     * @return
+     */
+    public static Object convertStringToObject(String value, Class<?> type) {
+        Object obj = null;
+        try {
+            if (String.class.equals(type)) {
+                obj = value;
+            }
+            if (Long.class.equals(type) || long.class.equals(type)) {
+                obj = Long.parseLong(value);
+                // if((Long) obj < 0) throw new IllegalArgumentException("Don't support negative Long yet:
+                // "+obj);
+            } else if (Integer.class.equals(type) || int.class.equals(type)) {
+                obj = Integer.parseInt(value);
+                // if((Integer) obj < 0) throw new IllegalArgumentException("Don't support negative Integer
+                // yet: "+obj);
+            } else if (Double.class.equals(type) || double.class.equals(type)) {
+                obj = Double.parseDouble(value);
+                // if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Double yet:
+                // "+obj);
+            } else if (Float.class.equals(type) || float.class.equals(type)) {
+                obj = Float.parseFloat(value);
+                // if((Double) obj < 0) throw new IllegalArgumentException("Don't support negative Float yet:
+                // "+obj);
+            } else if (Boolean.class.equals(type) || boolean.class.equals(type)) {
+                obj = Boolean.valueOf(value);
+            }
+            if (obj != null) {
+                return obj;
+            }
+        } catch (NumberFormatException ex) {
+            throw new IllegalArgumentException("Fail to convert string: " + value + " into type of " + type,
+                                               ex);
+        }
 
-		throw new IllegalArgumentException("Fail to convert string: "+value +" into type of "+type+", illegal type: "+type);
-	}
+        throw new IllegalArgumentException("Fail to convert string: " + value + " into type of " + type
+                                           + ", illegal type: " + type);
+    }
 
-	/**
-	 *
-	 * @param obj
-	 * @return double value, otherwise Double.NaN
-	 */
-	public static double convertObjToDouble(Object obj){
-		if(Long.class.equals(obj.getClass()) || long.class.equals(obj.getClass())){
-			Long _value = (Long) obj;
-			return _value.doubleValue();
-		}else if(Integer.class.equals(obj.getClass()) || int.class.equals(obj.getClass())){
-			Integer _value = (Integer) obj;
-			return _value.doubleValue();
-		}else if(Double.class.equals(obj.getClass()) || double.class.equals(obj.getClass())) {
-			return (Double) obj;
-		}else if(Float.class.equals(obj.getClass()) || float.class.equals(obj.getClass())) {
-			Float _value = (Float) obj;
-			return _value.doubleValue();
-		}else if(Short.class.equals(obj.getClass()) || short.class.equals(obj.getClass())) {
-			Float _value = (Float) obj;
-			return _value.doubleValue();
-		}else if(Byte.class.equals(obj.getClass()) || byte.class.equals(obj.getClass())) {
-			Byte _value = (Byte) obj;
-			return _value.doubleValue();
-		}
-		LOG.warn("Failed to convert object " + obj.toString() + " in type of " + obj.getClass() + " to double");
-		return Double.NaN;
-	}
+    /**
+     * @param obj
+     * @return double value, otherwise Double.NaN
+     */
+    public static double convertObjToDouble(Object obj) {
+        if (Long.class.equals(obj.getClass()) || long.class.equals(obj.getClass())) {
+            Long _value = (Long)obj;
+            return _value.doubleValue();
+        } else if (Integer.class.equals(obj.getClass()) || int.class.equals(obj.getClass())) {
+            Integer _value = (Integer)obj;
+            return _value.doubleValue();
+        } else if (Double.class.equals(obj.getClass()) || double.class.equals(obj.getClass())) {
+            return (Double)obj;
+        } else if (Float.class.equals(obj.getClass()) || float.class.equals(obj.getClass())) {
+            Float _value = (Float)obj;
+            return _value.doubleValue();
+        } else if (Short.class.equals(obj.getClass()) || short.class.equals(obj.getClass())) {
+            Float _value = (Float)obj;
+            return _value.doubleValue();
+        } else if (Byte.class.equals(obj.getClass()) || byte.class.equals(obj.getClass())) {
+            Byte _value = (Byte)obj;
+            return _value.doubleValue();
+        }
+        LOG.warn("Failed to convert object " + obj.toString() + " in type of " + obj.getClass()
+                 + " to double");
+        return Double.NaN;
+    }
 
-	/**
-	 * Parse List String as Set without duplicate items
-	 *
-	 * <br></br>
-	 * Support:
-	 * <ul>
-	 * <li>normal string: ("a","b") => ["a","b"] </li>
-	 * <li>number: (1.5,"b") => [1.5,"b"] </li>
-	 * <li>inner string comma: ("va,lue","value",",") => ["va,lue","value",","]</li>
-	 * <li>inner escaped chars: ("va\"lue","value") => ["va\"lue","value"]</li>
-	 * <li>some bad formats list: ("va"lue","value") => ["va\"lue","value"]</li>
-	 * </ul>
-	 *
-	 * <b>Warning:</b> it will not throw exception if the format is not strictly valid
-	 *
-	 * @param listValue in format (item1,item2,...)
-	 * @return
-	 */
-	public static List<String> parseList(String listValue){
-		Matcher matcher = SET_PATTERN.matcher(listValue);
-		if(matcher.find()){
-			String content = matcher.group(1);
-			List<String> result = new ArrayList<String>();
-			StringBuilder str = null;
-			STATE state = null;
-			char last = 0;
-			for(char c: content.toCharArray()){
-				if(str == null) str = new StringBuilder();
-				if(c == DOUBLE_QUOTE && last != SLASH){
-					// Open or Close String
-					if(state == STATE.STRING)
-						state = null;
-					else state = STATE.STRING;
-				}else if(c == COMMA && state != STATE.STRING){
-					result.add(unescape(str.toString()));
-					str = null;
-					last = c;
-					continue;
-				}
-				last = c;
-				str.append(c);
-			}
-			if(str!=null) result.add(unescape(str.toString()));
-			return result;
-		}else{
-			LOG.error("Invalid list value: " + listValue);
-			throw new IllegalArgumentException("Invalid format of list value: "+listValue+", must be in format: (item1,item2,...)");
-		}
-	}
+    /**
+     * Parse List String as Set without duplicate items <br>
+     * <br>
+     * Support:
+     * <ul>
+     * <li>normal string: ("a","b") => ["a","b"]</li>
+     * <li>number: (1.5,"b") => [1.5,"b"]</li>
+     * <li>inner string comma: ("va,lue","value",",") => ["va,lue","value",","]</li>
+     * <li>inner escaped chars: ("va\"lue","value") => ["va\"lue","value"]</li>
+     * <li>some bad formats list: ("va"lue","value") => ["va\"lue","value"]</li>
+     * </ul>
+     * <b>Warning:</b> it will not throw exception if the format is not strictly valid
+     *
+     * @param listValue in format (item1,item2,...)
+     * @return
+     */
+    public static List<String> parseList(String listValue) {
+        Matcher matcher = SET_PATTERN.matcher(listValue);
+        if (matcher.find()) {
+            String content = matcher.group(1);
+            List<String> result = new ArrayList<String>();
+            StringBuilder str = null;
+            STATE state = null;
+            char last = 0;
+            for (char c : content.toCharArray()) {
+                if (str == null) {
+                    str = new StringBuilder();
+                }
+                if (c == DOUBLE_QUOTE && last != SLASH) {
+                    // Open or Close String
+                    if (state == STATE.STRING) {
+                        state = null;
+                    } else {
+                        state = STATE.STRING;
+                    }
+                } else if (c == COMMA && state != STATE.STRING) {
+                    result.add(unescape(str.toString()));
+                    str = null;
+                    last = c;
+                    continue;
+                }
+                last = c;
+                str.append(c);
+            }
+            if (str != null) {
+                result.add(unescape(str.toString()));
+            }
+            return result;
+        } else {
+            LOG.error("Invalid list value: " + listValue);
+            throw new IllegalArgumentException("Invalid format of list value: " + listValue
+                                               + ", must be in format: (item1,item2,...)");
+        }
+    }
 
-	private static String unescape(String str){
-		int start=0,end = str.length();
-		if(str.startsWith("\"")) start = start +1;
-		if(str.endsWith("\"")) end = end -1;
-		str = str.substring(start,end);
-		return StringEscapeUtils.unescapeJava(str);
-	}
+    private static String unescape(String str) {
+        int start = 0;
+        int end = str.length();
+        if (str.startsWith("\"")) {
+            start = start + 1;
+        }
+        if (str.endsWith("\"")) {
+            end = end - 1;
+        }
+        str = str.substring(start, end);
+        return StringEscapeUtils.unescapeJava(str);
+    }
 
-	private final static Pattern SET_PATTERN = Pattern.compile("^\\((.*)\\)$");
-	private final static char COMMA = ',';
-	private final static char DOUBLE_QUOTE = '"';
-	private final static char SLASH = '\\';
-	private static enum STATE{ STRING }
+    private static final Pattern SET_PATTERN = Pattern.compile("^\\((.*)\\)$");
+    private static final char COMMA = ',';
+    private static final char DOUBLE_QUOTE = '"';
+    private static final char SLASH = '\\';
 
+    private static enum STATE {
+        STRING
+    }
 
-
-//  TODO: NOT FINISHED
-//  private final static Map<String,String> ESCAPE_REGEXP=new HashMap<String,String>(){{
-//			this.put("\\.","\\\\.");
-//	}};
-//
-//	public static String escapeRegExp(String value) {
-//		String _value = value;
-//		for(Map.Entry<String,String> entry:ESCAPE_REGEXP.entrySet()){
-//			_value = _value.replace(entry.getKey(),entry.getValue());
-//		}
-//		return _value;
-//	}
-}
\ No newline at end of file
+    // TODO: NOT FINISHED
+    // private static final Map<String,String> ESCAPE_REGEXP=new HashMap<String,String>(){{
+    // this.put("\\.","\\\\.");
+    // }};
+    //
+    // public static String escapeRegExp(String value) {
+    // String _value = value;
+    // for(Map.Entry<String,String> entry:ESCAPE_REGEXP.entrySet()){
+    // _value = _value.replace(entry.getKey(),entry.getValue());
+    // }
+    // return _value;
+    // }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java
index 36e1e0b..df75e33 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/EntityUniq.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 /**
- * 
+ *
  */
 package org.apache.eagle.log.entity;
 
@@ -23,45 +23,48 @@
 import java.util.Map;
 import java.util.Map.Entry;
 
-
 /**
  * @since Sep 12, 2014
  */
 public class EntityUniq {
-	
-	public Map<String, String> tags;
-	public Long timestamp;
-	public long createdTime; // for cache removal;
-	
-	public EntityUniq(Map<String, String> tags, long timestamp) {
-		this.tags = new HashMap<String, String>(tags);
-		this.timestamp = timestamp;
-		this.createdTime = System.currentTimeMillis();
-	}
-	
-	@Override	
-	public boolean equals(Object obj) {		
-		if (obj instanceof EntityUniq) {
-			EntityUniq au = (EntityUniq) obj;
-			if (tags.size() != au.tags.size()) return false;
-			for (Entry<String, String> keyValue : au.tags.entrySet()) {
-				boolean keyExist = tags.containsKey(keyValue.getKey());
-				if ( !keyExist || !tags.get(keyValue.getKey()).equals(keyValue.getValue())) {				
-					return false;
-				}
-			}
-			if (!timestamp.equals(au.timestamp)) return false;
-			return true;
-		}
-		return false;
-	}
-	
-	@Override
-	public int hashCode() {	
-		int hashCode = 0;
-		for (String value : tags.values()) {
-			hashCode ^= value.hashCode();	
-		}
-		return hashCode ^= timestamp.hashCode();
-	}
+
+    public Map<String, String> tags;
+    public Long timestamp;
+    public long createdTime; // for cache removal;
+
+    public EntityUniq(Map<String, String> tags, long timestamp) {
+        this.tags = new HashMap<String, String>(tags);
+        this.timestamp = timestamp;
+        this.createdTime = System.currentTimeMillis();
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (obj instanceof EntityUniq) {
+            EntityUniq au = (EntityUniq)obj;
+            if (tags.size() != au.tags.size()) {
+                return false;
+            }
+            for (Entry<String, String> keyValue : au.tags.entrySet()) {
+                boolean keyExist = tags.containsKey(keyValue.getKey());
+                if (!keyExist || !tags.get(keyValue.getKey()).equals(keyValue.getValue())) {
+                    return false;
+                }
+            }
+            if (!timestamp.equals(au.timestamp)) {
+                return false;
+            }
+            return true;
+        }
+        return false;
+    }
+
+    @Override
+    public int hashCode() {
+        int hashCode = 0;
+        for (String value : tags.values()) {
+            hashCode ^= value.hashCode();
+        }
+        return hashCode ^= timestamp.hashCode();
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java
index e308bc3..e97ecbb 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericCreateAPIResponseEntity.java
@@ -28,30 +28,37 @@
 
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
-@XmlType(propOrder = {"success", "exception", "encodedRowkeys"})
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
-@JsonIgnoreProperties(ignoreUnknown=true)
-public class GenericCreateAPIResponseEntity{
-	private boolean success;
-	private String exception;
-	private List<String> encodedRowkeys;
-	
-	public List<String> getEncodedRowkeys() {
-		return encodedRowkeys;
-	}
-	public void setEncodedRowkeys(List<String> encodedRowkeys) {
-		this.encodedRowkeys = encodedRowkeys;
-	}
-	public boolean isSuccess() {
-		return success;
-	}
-	public void setSuccess(boolean success) {
-		this.success = success;
-	}
-	public String getException() {
-		return exception;
-	}
-	public void setException(String exception) {
-		this.exception = exception;
-	}
+@XmlType(propOrder = {
+                      "success", "exception", "encodedRowkeys"
+    })
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class GenericCreateAPIResponseEntity {
+    private boolean success;
+    private String exception;
+    private List<String> encodedRowkeys;
+
+    public List<String> getEncodedRowkeys() {
+        return encodedRowkeys;
+    }
+
+    public void setEncodedRowkeys(List<String> encodedRowkeys) {
+        this.encodedRowkeys = encodedRowkeys;
+    }
+
+    public boolean isSuccess() {
+        return success;
+    }
+
+    public void setSuccess(boolean success) {
+        this.success = success;
+    }
+
+    public String getException() {
+        return exception;
+    }
+
+    public void setException(String exception) {
+        this.exception = exception;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java
index 9c42ab2..71f27c2 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityBatchReader.java
@@ -23,36 +23,43 @@
 import java.util.ArrayList;
 import java.util.List;
 
-public class GenericEntityBatchReader implements EntityCreationListener{
-	private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class);
-	
-	private List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
-	private StreamReader reader;
-	
-	public GenericEntityBatchReader(String serviceName, SearchCondition condition) throws InstantiationException, IllegalAccessException{
-		reader = new GenericEntityStreamReader(serviceName, condition);
-		reader.register(this);
-	}
-	
-	public GenericEntityBatchReader(StreamReader reader) throws InstantiationException, IllegalAccessException{
-		this.reader = reader;
-		reader.register(this);
-	}
-	
-	public long getLastTimestamp() {
-		return reader.getLastTimestamp();
-	}
-	public long getFirstTimestamp(){ return reader.getFirstTimestamp();}
-	
-	@Override
-	public void entityCreated(TaggedLogAPIEntity entity){
-		entities.add(entity);
-	}
-	
-	@SuppressWarnings("unchecked")
-	public <T> List<T> read() throws Exception{
-		if(LOG.isDebugEnabled()) LOG.debug("Start reading as batch mode");
-		reader.readAsStream();
-		return (List<T>)entities;
-	}
+public class GenericEntityBatchReader implements EntityCreationListener {
+    private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class);
+
+    private List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
+    private StreamReader reader;
+
+    public GenericEntityBatchReader(String serviceName, SearchCondition condition)
+        throws InstantiationException, IllegalAccessException {
+        reader = new GenericEntityStreamReader(serviceName, condition);
+        reader.register(this);
+    }
+
+    public GenericEntityBatchReader(StreamReader reader)
+        throws InstantiationException, IllegalAccessException {
+        this.reader = reader;
+        reader.register(this);
+    }
+
+    public long getLastTimestamp() {
+        return reader.getLastTimestamp();
+    }
+
+    public long getFirstTimestamp() {
+        return reader.getFirstTimestamp();
+    }
+
+    @Override
+    public void entityCreated(TaggedLogAPIEntity entity) {
+        entities.add(entity);
+    }
+
+    @SuppressWarnings("unchecked")
+    public <T> List<T> read() throws Exception {
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Start reading as batch mode");
+        }
+        reader.readAsStream();
+        return (List<T>)entities;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java
index a9e03b3..6683c28 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityScanStreamReader.java
@@ -36,7 +36,8 @@
     private long lastTimestamp = 0;
     private long firstTimestamp = 0;
 
-    public GenericEntityScanStreamReader(String serviceName, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{
+    public GenericEntityScanStreamReader(String serviceName, SearchCondition condition, String prefix)
+        throws InstantiationException, IllegalAccessException {
         this.prefix = prefix;
         checkNotNull(serviceName, "serviceName");
         this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName);
@@ -44,7 +45,8 @@
         this.condition = condition;
     }
 
-    public GenericEntityScanStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{
+    public GenericEntityScanStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix)
+        throws InstantiationException, IllegalAccessException {
         this.prefix = prefix;
         checkNotNull(entityDef, "entityDef");
         this.entityDef = entityDef;
@@ -52,12 +54,13 @@
         this.condition = condition;
     }
 
+    @Override
     public long getLastTimestamp() {
         return lastTimestamp;
     }
 
-    private void checkNotNull(Object o, String message){
-        if(o == null){
+    private void checkNotNull(Object o, String message) {
+        if (o == null) {
             throw new IllegalArgumentException(message + " should not be null");
         }
     }
@@ -71,30 +74,33 @@
     }
 
     @Override
-    public void readAsStream() throws Exception{
+    public void readAsStream() throws Exception {
         Date start = null;
         Date end = null;
         // shortcut to avoid read when pageSize=0
-        if(condition.getPageSize() <= 0){
+        if (condition.getPageSize() <= 0) {
             return; // return nothing
         }
         // Process the time range if needed
-        if(entityDef.isTimeSeries()){
+        if (entityDef.isTimeSeries()) {
             start = new Date(condition.getStartTime());
             end = new Date(condition.getEndTime());
-        }else{
-            //start = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_START_HUMANTIME);
-            //end = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_END_HUMANTIME);
+        } else {
+            // start = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_START_HUMANTIME);
+            // end = DateTimeUtil.humanDateToDate(EntityConstants.FIXED_READ_END_HUMANTIME);
             start = new Date(EntityConstants.FIXED_READ_START_TIMESTAMP);
             end = new Date(EntityConstants.FIXED_READ_END_TIMESTAMP);
         }
         byte[][] outputQualifiers = null;
-        if(!condition.isOutputAll()) {
+        if (!condition.isOutputAll()) {
             // Generate the output qualifiers
-            outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields());
+            outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef,
+                                                                          condition.getOutputFields());
         }
-        HBaseLogReader2 reader = new HBaseLogReader2(entityDef, condition.getPartitionValues(), start, end, condition.getFilter(), condition.getStartRowkey(), outputQualifiers, this.prefix);
-        try{
+        HBaseLogReader2 reader = new HBaseLogReader2(entityDef, condition.getPartitionValues(), start, end,
+                                                     condition.getFilter(), condition.getStartRowkey(),
+                                                     outputQualifiers, this.prefix);
+        try {
             reader.open();
             InternalLog log;
             int count = 0;
@@ -103,23 +109,24 @@
                 if (lastTimestamp < entity.getTimestamp()) {
                     lastTimestamp = entity.getTimestamp();
                 }
-                if(firstTimestamp > entity.getTimestamp() || firstTimestamp == 0){
+                if (firstTimestamp > entity.getTimestamp() || firstTimestamp == 0) {
                     firstTimestamp = entity.getTimestamp();
                 }
 
                 entity.setSerializeVerbose(condition.isOutputVerbose());
                 entity.setSerializeAlias(condition.getOutputAlias());
 
-                for(EntityCreationListener l : _listeners){
+                for (EntityCreationListener l : listeners) {
                     l.entityCreated(entity);
                 }
-                if(++count == condition.getPageSize())
+                if (++count == condition.getPageSize()) {
                     break;
+                }
             }
-        }catch(IOException ioe){
+        } catch (IOException ioe) {
             LOG.error("Fail reading log", ioe);
             throw ioe;
-        }finally{
+        } finally {
             reader.close();
         }
     }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java
index c3d916e..6dfe27d 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReader.java
@@ -29,97 +29,103 @@
 import java.util.List;
 
 public class GenericEntityStreamReader extends StreamReader {
-	private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReader.class);
-	
-	private EntityDefinition entityDef;
-	private SearchCondition condition;
-	private String prefix;
-	private StreamReader readerAfterPlan;
+    private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReader.class);
 
-	public GenericEntityStreamReader(String serviceName, SearchCondition condition) throws InstantiationException, IllegalAccessException{
-		this(serviceName, condition, null);
-	}
+    private EntityDefinition entityDef;
+    private SearchCondition condition;
+    private String prefix;
+    private StreamReader readerAfterPlan;
 
-	public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition) throws InstantiationException, IllegalAccessException{
-		this(entityDef, condition, entityDef.getPrefix());
-	}
-	
-	public GenericEntityStreamReader(String serviceName, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{
-		this.prefix = prefix;
-		checkNotNull(serviceName, "serviceName");
-		this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName);
-		checkNotNull(entityDef, "EntityDefinition");
-		this.condition = condition;
-		this.readerAfterPlan = selectQueryReader();
-	}
+    public GenericEntityStreamReader(String serviceName, SearchCondition condition)
+        throws InstantiationException, IllegalAccessException {
+        this(serviceName, condition, null);
+    }
 
-	public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix) throws InstantiationException, IllegalAccessException{
-		this.prefix = prefix;
-		checkNotNull(entityDef, "entityDef");
-		this.entityDef = entityDef;
-		checkNotNull(entityDef, "EntityDefinition");
-		this.condition = condition;
-		this.readerAfterPlan = selectQueryReader();
-	}
+    public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition)
+        throws InstantiationException, IllegalAccessException {
+        this(entityDef, condition, entityDef.getPrefix());
+    }
 
-	private void checkNotNull(Object o, String message){
-		if(o == null){
-			throw new IllegalArgumentException(message + " should not be null");
-		}
-	}
-	
-	public EntityDefinition getEntityDefinition() {
-		return entityDef;
-	}
-	
-	public SearchCondition getSearchCondition() {
-		return condition;
-	}
-	
-	@Override
-	public void readAsStream() throws Exception{
-		readerAfterPlan._listeners.addAll(this._listeners);
-		readerAfterPlan.readAsStream();
-	}
-	
-	private StreamReader selectQueryReader() throws InstantiationException, IllegalAccessException {
-		final ORExpression query = condition.getQueryExpression();
-		IndexDefinition[] indexDefs = entityDef.getIndexes();
+    public GenericEntityStreamReader(String serviceName, SearchCondition condition, String prefix)
+        throws InstantiationException, IllegalAccessException {
+        this.prefix = prefix;
+        checkNotNull(serviceName, "serviceName");
+        this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName);
+        checkNotNull(entityDef, "EntityDefinition");
+        this.condition = condition;
+        this.readerAfterPlan = selectQueryReader();
+    }
+
+    public GenericEntityStreamReader(EntityDefinition entityDef, SearchCondition condition, String prefix)
+        throws InstantiationException, IllegalAccessException {
+        this.prefix = prefix;
+        checkNotNull(entityDef, "entityDef");
+        this.entityDef = entityDef;
+        checkNotNull(entityDef, "EntityDefinition");
+        this.condition = condition;
+        this.readerAfterPlan = selectQueryReader();
+    }
+
+    private void checkNotNull(Object o, String message) {
+        if (o == null) {
+            throw new IllegalArgumentException(message + " should not be null");
+        }
+    }
+
+    public EntityDefinition getEntityDefinition() {
+        return entityDef;
+    }
+
+    public SearchCondition getSearchCondition() {
+        return condition;
+    }
+
+    @Override
+    public void readAsStream() throws Exception {
+        readerAfterPlan.listeners.addAll(this.listeners);
+        readerAfterPlan.readAsStream();
+    }
+
+    private StreamReader selectQueryReader() throws InstantiationException, IllegalAccessException {
+        final ORExpression query = condition.getQueryExpression();
+        IndexDefinition[] indexDefs = entityDef.getIndexes();
 
         // Index just works with query condition
-		if (indexDefs != null && condition.getQueryExpression()!=null) {
-			List<byte[]> rowkeys = new ArrayList<>();
-			for (IndexDefinition index : indexDefs) {
-				// Check unique index first
-				if (index.isUnique()) {
-					final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys);
-					if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) {
-						LOG.info("Selectd query unique index " + index.getIndexName() + " for query: " + condition.getQueryExpression());
-						return new UniqueIndexStreamReader(index, condition, rowkeys);
-					}
-				}
-			}
-			for (IndexDefinition index : indexDefs) {
-				// Check non-clustered index
-				if (!index.isUnique()) {
-					final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys);
-					if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) {
-						LOG.info("Selectd query non clustered index " + index.getIndexName() + " for query: " + condition.getQueryExpression().toString());
-						return new NonClusteredIndexStreamReader(index, condition, rowkeys);
-					}
-				}
-			}
-		}
-		return new GenericEntityScanStreamReader(entityDef, condition, this.prefix);
-	}
+        if (indexDefs != null && condition.getQueryExpression() != null) {
+            List<byte[]> rowkeys = new ArrayList<>();
+            for (IndexDefinition index : indexDefs) {
+                // Check unique index first
+                if (index.isUnique()) {
+                    final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys);
+                    if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) {
+                        LOG.info("Selectd query unique index " + index.getIndexName() + " for query: "
+                                 + condition.getQueryExpression());
+                        return new UniqueIndexStreamReader(index, condition, rowkeys);
+                    }
+                }
+            }
+            for (IndexDefinition index : indexDefs) {
+                // Check non-clustered index
+                if (!index.isUnique()) {
+                    final IndexDefinition.IndexType type = index.canGoThroughIndex(query, rowkeys);
+                    if (!IndexDefinition.IndexType.NON_INDEX.equals(type)) {
+                        LOG.info("Selectd query non clustered index " + index.getIndexName() + " for query: "
+                                 + condition.getQueryExpression().toString());
+                        return new NonClusteredIndexStreamReader(index, condition, rowkeys);
+                    }
+                }
+            }
+        }
+        return new GenericEntityScanStreamReader(entityDef, condition, this.prefix);
+    }
 
-	@Override
-	public long getLastTimestamp() {
-		return readerAfterPlan.getLastTimestamp();
-	}
+    @Override
+    public long getLastTimestamp() {
+        return readerAfterPlan.getLastTimestamp();
+    }
 
-	@Override
-	public long getFirstTimestamp() {
-		return readerAfterPlan.getFirstTimestamp();
-	}
+    @Override
+    public long getFirstTimestamp() {
+        return readerAfterPlan.getFirstTimestamp();
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java
index bf72a36..15bdd20 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityStreamReaderMT.java
@@ -31,121 +31,124 @@
 
 /**
  * multi-threading stream readers which only applies to time-series entity where we split the query into
- * different time range
- * 
- * When this class is used together with list query or aggregate query, be aware that the query's behavior could
- * be changed for example pageSize does not work well, output sequence is not determined
+ * different time range When this class is used together with list query or aggregate query, be aware that the
+ * query's behavior could be changed for example pageSize does not work well, output sequence is not
+ * determined
  */
-public class GenericEntityStreamReaderMT extends StreamReader{
-	private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReaderMT.class);
-	private List<GenericEntityStreamReader> readers = new ArrayList<GenericEntityStreamReader>(); 
-	
-	public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition, int numThreads) throws Exception{
-		checkIsTimeSeries(serviceName);
-		checkNumThreads(numThreads);
-		long queryStartTime = condition.getStartTime();
-		long queryEndTime = condition.getEndTime();
-		long subStartTime = queryStartTime;
-		long subEndTime = 0;
-		long interval = (queryEndTime-queryStartTime) / numThreads;
-		for(int i=0; i<numThreads; i++){
-			// split search condition by time range
-			subStartTime = queryStartTime + i*interval;
-			if(i == numThreads-1){
-				subEndTime = queryEndTime;
-			}else{
-				subEndTime = subStartTime + interval;
-			}
-			//String strStartTime = DateTimeUtil.millisecondsToHumanDateWithSeconds(subStartTime);
-			//String strEndTime = DateTimeUtil.millisecondsToHumanDateWithSeconds(subEndTime);
-			SearchCondition sc = new SearchCondition(condition);
-			sc.setStartTime(subStartTime);
-			sc.setEndTime(subEndTime);
-			GenericEntityStreamReader reader = new GenericEntityStreamReader(serviceName, sc);
-			readers.add(reader);
-		}
-	}
-	
-	private void checkIsTimeSeries(String serviceName) throws Exception{
-		EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName);
-		if(!ed.isTimeSeries()){
-			throw new IllegalArgumentException("Multi-threading stream reader must be applied to time series table");
-		}
-	}
-	
-	private void checkNumThreads(int numThreads){
-		if(numThreads <= 0){
-			throw new IllegalArgumentException("Multi-threading stream reader must have numThreads >= 1");
-		}
-	}
-	
-	/**
-	 * default to 2 threads
-	 * @param serviceName
-	 * @param condition
-	 */
-	public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition) throws Exception{
-		this(serviceName, condition, 2);
-	}
-	
-	@Override
-	public void readAsStream() throws Exception{
-		// populate listeners to all readers
-		for(EntityCreationListener l : _listeners){
-			for(GenericEntityStreamReader r : readers){
-				r.register(l);
-			}
-		}
+public class GenericEntityStreamReaderMT extends StreamReader {
+    private static final Logger LOG = LoggerFactory.getLogger(GenericEntityStreamReaderMT.class);
+    private List<GenericEntityStreamReader> readers = new ArrayList<GenericEntityStreamReader>();
 
-		List<Future<Void>> futures = new ArrayList<Future<Void>>();
-		for(GenericEntityStreamReader r : readers){
-			SingleReader reader = new SingleReader(r);
-			Future<Void> readFuture = EagleConfigFactory.load().getExecutor().submit(reader);
-			futures.add(readFuture);
-		}
-		
-		// join threads and check exceptions
-		for(Future<Void> future : futures){
-			try{
-				future.get();
-			}catch(Exception ex){
-				LOG.error("Error in read", ex);
-				throw ex;
-			}
-		}
-	}
-	
-	private static class SingleReader implements Callable<Void>{
-		private GenericEntityStreamReader reader;
-		public SingleReader(GenericEntityStreamReader reader){
-			this.reader = reader;
-		}
-		@Override
-		public Void call() throws Exception{
-			reader.readAsStream();
-			return null;
-		}
-	}
+    public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition, int numThreads)
+        throws Exception {
+        checkIsTimeSeries(serviceName);
+        checkNumThreads(numThreads);
+        long queryStartTime = condition.getStartTime();
+        long queryEndTime = condition.getEndTime();
+        long subStartTime = queryStartTime;
+        long subEndTime = 0;
+        long interval = (queryEndTime - queryStartTime) / numThreads;
+        for (int i = 0; i < numThreads; i++) {
+            // split search condition by time range
+            subStartTime = queryStartTime + i * interval;
+            if (i == numThreads - 1) {
+                subEndTime = queryEndTime;
+            } else {
+                subEndTime = subStartTime + interval;
+            }
+            // String strStartTime = DateTimeUtil.millisecondsToHumanDateWithSeconds(subStartTime);
+            // String strEndTime = DateTimeUtil.millisecondsToHumanDateWithSeconds(subEndTime);
+            SearchCondition sc = new SearchCondition(condition);
+            sc.setStartTime(subStartTime);
+            sc.setEndTime(subEndTime);
+            GenericEntityStreamReader reader = new GenericEntityStreamReader(serviceName, sc);
+            readers.add(reader);
+        }
+    }
 
-	@Override
-	public long getLastTimestamp() {
-		long lastTimestamp = 0;
-		for (GenericEntityStreamReader reader : readers) {
-			if (lastTimestamp < reader.getLastTimestamp()) {
-				lastTimestamp = reader.getLastTimestamp();
-			}
-		}
-		return lastTimestamp;
-	}
+    private void checkIsTimeSeries(String serviceName) throws Exception {
+        EntityDefinition ed = EntityDefinitionManager.getEntityByServiceName(serviceName);
+        if (!ed.isTimeSeries()) {
+            throw new IllegalArgumentException("Multi-threading stream reader must be applied to time series table");
+        }
+    }
 
-	@Override
-	public long getFirstTimestamp() {
-		long firstTimestamp = 0;
-		for (GenericEntityStreamReader reader : readers) {
-			if (firstTimestamp > reader.getLastTimestamp() || firstTimestamp == 0) {
-				firstTimestamp = reader.getLastTimestamp();
-			}
-		}
-		return firstTimestamp;
-	}
+    private void checkNumThreads(int numThreads) {
+        if (numThreads <= 0) {
+            throw new IllegalArgumentException("Multi-threading stream reader must have numThreads >= 1");
+        }
+    }
+
+    /**
+     * default to 2 threads
+     *
+     * @param serviceName
+     * @param condition
+     */
+    public GenericEntityStreamReaderMT(String serviceName, SearchCondition condition) throws Exception {
+        this(serviceName, condition, 2);
+    }
+
+    @Override
+    public void readAsStream() throws Exception {
+        // populate listeners to all readers
+        for (EntityCreationListener l : listeners) {
+            for (GenericEntityStreamReader r : readers) {
+                r.register(l);
+            }
+        }
+
+        List<Future<Void>> futures = new ArrayList<Future<Void>>();
+        for (GenericEntityStreamReader r : readers) {
+            SingleReader reader = new SingleReader(r);
+            Future<Void> readFuture = EagleConfigFactory.load().getExecutor().submit(reader);
+            futures.add(readFuture);
+        }
+
+        // join threads and check exceptions
+        for (Future<Void> future : futures) {
+            try {
+                future.get();
+            } catch (Exception ex) {
+                LOG.error("Error in read", ex);
+                throw ex;
+            }
+        }
+    }
+
+    private static class SingleReader implements Callable<Void> {
+        private GenericEntityStreamReader reader;
+
+        public SingleReader(GenericEntityStreamReader reader) {
+            this.reader = reader;
+        }
+
+        @Override
+        public Void call() throws Exception {
+            reader.readAsStream();
+            return null;
+        }
+    }
+
+    @Override
+    public long getLastTimestamp() {
+        long lastTimestamp = 0;
+        for (GenericEntityStreamReader reader : readers) {
+            if (lastTimestamp < reader.getLastTimestamp()) {
+                lastTimestamp = reader.getLastTimestamp();
+            }
+        }
+        return lastTimestamp;
+    }
+
+    @Override
+    public long getFirstTimestamp() {
+        long firstTimestamp = 0;
+        for (GenericEntityStreamReader reader : readers) {
+            if (firstTimestamp > reader.getLastTimestamp() || firstTimestamp == 0) {
+                firstTimestamp = reader.getLastTimestamp();
+            }
+        }
+        return firstTimestamp;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java
index 5c8b12d..926fcba 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericEntityWriter.java
@@ -27,52 +27,53 @@
 import java.util.List;
 
 public class GenericEntityWriter {
-	private static final Logger LOG = LoggerFactory.getLogger(GenericEntityWriter.class);
-	private EntityDefinition entityDef;
+    private static final Logger LOG = LoggerFactory.getLogger(GenericEntityWriter.class);
+    private EntityDefinition entityDef;
 
-	public GenericEntityWriter(String serviceName) throws InstantiationException, IllegalAccessException{
-		this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName);
-		checkNotNull(entityDef, "serviceName");
-	}
+    public GenericEntityWriter(String serviceName) throws InstantiationException, IllegalAccessException {
+        this.entityDef = EntityDefinitionManager.getEntityByServiceName(serviceName);
+        checkNotNull(entityDef, "serviceName");
+    }
 
-	public GenericEntityWriter(EntityDefinition entityDef) throws InstantiationException, IllegalAccessException{
-		this.entityDef = entityDef;
-		checkNotNull(entityDef, "serviceName");
-	}
-	
-	private void checkNotNull(Object o, String message) {
-		if(o == null){
-			throw new IllegalArgumentException(message + " should not be null");
-		}
-	}
+    public GenericEntityWriter(EntityDefinition entityDef)
+        throws InstantiationException, IllegalAccessException {
+        this.entityDef = entityDef;
+        checkNotNull(entityDef, "serviceName");
+    }
 
-	/**
-	 * @param entities
-	 * @return row keys
-	 * @throws Exception
-	 */
-	public List<String> write(List<? extends TaggedLogAPIEntity> entities) throws Exception{
-		HBaseLogWriter writer = new HBaseLogWriter(entityDef.getTable(), entityDef.getColumnFamily());
-		List<String> rowkeys = new ArrayList<String>(entities.size());
-		List<InternalLog> logs = new ArrayList<InternalLog>(entities.size());
-		
-		try{
-			writer.open();
-			for(TaggedLogAPIEntity entity : entities){
-				final InternalLog entityLog = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef);
-				logs.add(entityLog);
-			}
-			List<byte[]> bRowkeys  = writer.write(logs);
-			for (byte[] rowkey : bRowkeys) {
-				rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
-			}
+    private void checkNotNull(Object o, String message) {
+        if (o == null) {
+            throw new IllegalArgumentException(message + " should not be null");
+        }
+    }
 
-		}catch(Exception ex){
-			LOG.error("fail writing tagged log", ex);
-			throw ex;
-		}finally{
-			writer.close();
-	 	}
-		return rowkeys;
-	}
+    /**
+     * @param entities
+     * @return row keys
+     * @throws Exception
+     */
+    public List<String> write(List<? extends TaggedLogAPIEntity> entities) throws Exception {
+        HBaseLogWriter writer = new HBaseLogWriter(entityDef.getTable(), entityDef.getColumnFamily());
+        List<String> rowkeys = new ArrayList<String>(entities.size());
+        List<InternalLog> logs = new ArrayList<InternalLog>(entities.size());
+
+        try {
+            writer.open();
+            for (TaggedLogAPIEntity entity : entities) {
+                final InternalLog entityLog = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef);
+                logs.add(entityLog);
+            }
+            List<byte[]> bRowkeys = writer.write(logs);
+            for (byte[] rowkey : bRowkeys) {
+                rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
+            }
+
+        } catch (Exception ex) {
+            LOG.error("fail writing tagged log", ex);
+            throw ex;
+        } finally {
+            writer.close();
+        }
+        return rowkeys;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java
index 9f6937b..56cd453 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntity.java
@@ -21,33 +21,35 @@
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
 /**
- * GenericMetricEntity should use prefix field which is extended from TaggedLogAPIEntity as metric name
- * metric name is used to partition the metric tables
+ * GenericMetricEntity should use prefix field which is extended from TaggedLogAPIEntity as metric name metric
+ * name is used to partition the metric tables
  */
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @Table("eagle_metric")
 @ColumnFamily("f")
 @Prefix(GenericMetricEntity.GENERIC_METRIC_PREFIX_PLACE_HOLDER)
 @Service(GenericMetricEntity.GENERIC_METRIC_SERVICE)
 @TimeSeries(true)
-@Metric(interval=60000)
+@Metric(interval = 60000)
 @ServicePath(path = "/metric")
 // TODO:
-@Tags({"site","application","policyId","alertExecutorId", "streamName","source","partitionSeq"})
+@Tags({
+       "site", "application", "policyId", "alertExecutorId", "streamName", "source", "partitionSeq"
+    })
 public class GenericMetricEntity extends TaggedLogAPIEntity {
-	public static final String GENERIC_METRIC_SERVICE = "GenericMetricService";
-	public static final String GENERIC_METRIC_PREFIX_PLACE_HOLDER = "GENERIC_METRIC_PREFIX_PLACEHODLER";
-	public static final String VALUE_FIELD ="value";
+    public static final String GENERIC_METRIC_SERVICE = "GenericMetricService";
+    public static final String GENERIC_METRIC_PREFIX_PLACE_HOLDER = "GENERIC_METRIC_PREFIX_PLACEHODLER";
+    public static final String VALUE_FIELD = "value";
 
-	@Column("a")
-	private double[] value;
+    @Column("a")
+    private double[] value;
 
-	public double[] getValue() {
-		return value;
-	}
+    public double[] getValue() {
+        return value;
+    }
 
-	public void setValue(double[] value) {
-		this.value = value;
-		pcs.firePropertyChange("value", null, null);
-	}
-}
\ No newline at end of file
+    public void setValue(double[] value) {
+        this.value = value;
+        pcs.firePropertyChange("value", null, null);
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java
index 84b02ae..bc99a81 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityBatchReader.java
@@ -23,32 +23,37 @@
 import java.util.ArrayList;
 import java.util.List;
 
-public class GenericMetricEntityBatchReader  implements EntityCreationListener{
-	private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class);
-	
-	private List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
-	private GenericEntityStreamReader reader;
-	
-	public GenericMetricEntityBatchReader(String metricName, SearchCondition condition) throws Exception{
-		reader = new GenericEntityStreamReader(GenericMetricEntity.GENERIC_METRIC_SERVICE, condition, metricName);
-	}
-	
-	public long getLastTimestamp() {
-		return reader.getLastTimestamp();
-	}
-	public long getFirstTimestamp() {
-		return reader.getFirstTimestamp();
-	}
-	@Override
-	public void entityCreated(TaggedLogAPIEntity entity){
-		entities.add(entity);
-	}
-	
-	@SuppressWarnings("unchecked")
-	public <T> List<T> read() throws Exception{
-		if(LOG.isDebugEnabled()) LOG.debug("Start reading as batch mode");
-		reader.register(this);
-		reader.readAsStream();
-		return (List<T>)entities;
-	}
+public class GenericMetricEntityBatchReader implements EntityCreationListener {
+    private static final Logger LOG = LoggerFactory.getLogger(GenericEntityBatchReader.class);
+
+    private List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
+    private GenericEntityStreamReader reader;
+
+    public GenericMetricEntityBatchReader(String metricName, SearchCondition condition) throws Exception {
+        reader = new GenericEntityStreamReader(GenericMetricEntity.GENERIC_METRIC_SERVICE, condition,
+                                               metricName);
+    }
+
+    public long getLastTimestamp() {
+        return reader.getLastTimestamp();
+    }
+
+    public long getFirstTimestamp() {
+        return reader.getFirstTimestamp();
+    }
+
+    @Override
+    public void entityCreated(TaggedLogAPIEntity entity) {
+        entities.add(entity);
+    }
+
+    @SuppressWarnings("unchecked")
+    public <T> List<T> read() throws Exception {
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Start reading as batch mode");
+        }
+        reader.register(this);
+        reader.readAsStream();
+        return (List<T>)entities;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java
index 1cf3905..216022f 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricEntityDecompactionStreamReader.java
@@ -25,74 +25,79 @@
 
 import java.text.ParseException;
 
-public class GenericMetricEntityDecompactionStreamReader extends StreamReader implements EntityCreationListener{
-	@SuppressWarnings("unused")
-	private static final Logger LOG = LoggerFactory.getLogger(GenericMetricEntityDecompactionStreamReader.class);
-	private GenericEntityStreamReader reader;
-	private EntityDefinition ed;
-	private String serviceName = GenericMetricEntity.GENERIC_METRIC_SERVICE;
-	private long start;
-	private long end;
-	private GenericMetricShadowEntity single = new GenericMetricShadowEntity();
-	
-	/**
-	 * it makes sense that serviceName should not be provided while metric name should be provided as prefix
-	 * @param metricName
-	 * @param condition
-	 * @throws InstantiationException
-	 * @throws IllegalAccessException
-	 * @throws ParseException
-	 */
-	public GenericMetricEntityDecompactionStreamReader(String metricName, SearchCondition condition) throws InstantiationException, IllegalAccessException, ParseException{
-		ed = EntityDefinitionManager.getEntityByServiceName(serviceName);
-		checkIsMetric(ed);
-		reader = new GenericEntityStreamReader(serviceName, condition, metricName);
-		start = condition.getStartTime();
-		end = condition.getEndTime();
-	}
-	
-	private void checkIsMetric(EntityDefinition ed){
-		if(ed.getMetricDefinition() == null)
-			throw new IllegalArgumentException("Only metric entity comes here");
-	}
-	
-	@Override
-	public void entityCreated(TaggedLogAPIEntity entity) throws Exception{
-		GenericMetricEntity e = (GenericMetricEntity)entity;
-		double[] value = e.getValue();
-		if(value != null) {
-			int count =value.length;
-			@SuppressWarnings("unused")
-			Class<?> cls = ed.getMetricDefinition().getSingleTimestampEntityClass();
-			for (int i = 0; i < count; i++) {
-				long ts = entity.getTimestamp() + i * ed.getMetricDefinition().getInterval();
-				// exclude those entity which is not within the time range in search condition. [start, end)
-				if (ts < start || ts >= end) {
-					continue;
-				}
-				single.setTimestamp(ts);
-				single.setTags(entity.getTags());
-				single.setValue(e.getValue()[i]);
-				for (EntityCreationListener l : _listeners) {
-					l.entityCreated(single);
-				}
-			}
-		}
-	}
-	
-	@Override
-	public void readAsStream() throws Exception{
-		reader.register(this);
-		reader.readAsStream();
-	}
+public class GenericMetricEntityDecompactionStreamReader extends StreamReader
+    implements EntityCreationListener {
+    @SuppressWarnings("unused")
+    private static final Logger LOG = LoggerFactory
+        .getLogger(GenericMetricEntityDecompactionStreamReader.class);
+    private GenericEntityStreamReader reader;
+    private EntityDefinition ed;
+    private String serviceName = GenericMetricEntity.GENERIC_METRIC_SERVICE;
+    private long start;
+    private long end;
+    private GenericMetricShadowEntity single = new GenericMetricShadowEntity();
 
-	@Override
-	public long getLastTimestamp() {
-		return reader.getLastTimestamp();
-	}
+    /**
+     * it makes sense that serviceName should not be provided while metric name should be provided as prefix
+     *
+     * @param metricName
+     * @param condition
+     * @throws InstantiationException
+     * @throws IllegalAccessException
+     * @throws ParseException
+     */
+    public GenericMetricEntityDecompactionStreamReader(String metricName, SearchCondition condition)
+        throws InstantiationException, IllegalAccessException, ParseException {
+        ed = EntityDefinitionManager.getEntityByServiceName(serviceName);
+        checkIsMetric(ed);
+        reader = new GenericEntityStreamReader(serviceName, condition, metricName);
+        start = condition.getStartTime();
+        end = condition.getEndTime();
+    }
 
-	@Override
-	public long getFirstTimestamp() {
-		return reader.getFirstTimestamp();
-	}
-}
\ No newline at end of file
+    private void checkIsMetric(EntityDefinition ed) {
+        if (ed.getMetricDefinition() == null) {
+            throw new IllegalArgumentException("Only metric entity comes here");
+        }
+    }
+
+    @Override
+    public void entityCreated(TaggedLogAPIEntity entity) throws Exception {
+        GenericMetricEntity e = (GenericMetricEntity)entity;
+        double[] value = e.getValue();
+        if (value != null) {
+            int count = value.length;
+            @SuppressWarnings("unused")
+            Class<?> cls = ed.getMetricDefinition().getSingleTimestampEntityClass();
+            for (int i = 0; i < count; i++) {
+                long ts = entity.getTimestamp() + i * ed.getMetricDefinition().getInterval();
+                // exclude those entity which is not within the time range in search condition. [start, end)
+                if (ts < start || ts >= end) {
+                    continue;
+                }
+                single.setTimestamp(ts);
+                single.setTags(entity.getTags());
+                single.setValue(e.getValue()[i]);
+                for (EntityCreationListener l : listeners) {
+                    l.entityCreated(single);
+                }
+            }
+        }
+    }
+
+    @Override
+    public void readAsStream() throws Exception {
+        reader.register(this);
+        reader.readAsStream();
+    }
+
+    @Override
+    public long getLastTimestamp() {
+        return reader.getLastTimestamp();
+    }
+
+    @Override
+    public long getFirstTimestamp() {
+        return reader.getFirstTimestamp();
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java
index acd1290..8ead7cd 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericMetricShadowEntity.java
@@ -22,13 +22,13 @@
  * just a shadow class to avoid dynamically create the class and instantiate using reflection
  */
 public class GenericMetricShadowEntity extends TaggedLogAPIEntity {
-	private double value;
+    private double value;
 
-	public double getValue() {
-		return value;
-	}
+    public double getValue() {
+        return value;
+    }
 
-	public void setValue(double value) {
-		this.value = value;
-	}
+    public void setValue(double value) {
+        this.value = value;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java
index 6869c7c..97f538c 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntity.java
@@ -35,24 +35,27 @@
  */
 @XmlRootElement
 @XmlAccessorType(XmlAccessType.FIELD)
-@XmlType(propOrder = {"success","exception","meta","type","obj"})
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@XmlType(propOrder = {
+                      "success", "exception", "meta", "type", "obj"
+    })
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @JsonDeserialize(using = GenericServiceAPIResponseEntityDeserializer.class)
-@JsonIgnoreProperties(ignoreUnknown=true)
-public class GenericServiceAPIResponseEntity<T>{
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class GenericServiceAPIResponseEntity<T> {
     /**
      * Please use primitive type of value in meta as possible
      */
-    private Map<String,Object> meta;
-	private boolean success;
-	private String exception;
+    private Map<String, Object> meta;
+    private boolean success;
+    private String exception;
     private List<T> obj;
     private Class<T> type;
 
-    public GenericServiceAPIResponseEntity(){
+    public GenericServiceAPIResponseEntity() {
         // default constructor
     }
-    public GenericServiceAPIResponseEntity(Class<T> type){
+
+    public GenericServiceAPIResponseEntity(Class<T> type) {
         this.setType(type);
     }
 
@@ -72,7 +75,7 @@
         this.obj = obj;
     }
 
-    public void setObj(List<T> obj,Class<T> type) {
+    public void setObj(List<T> obj, Class<T> type) {
         this.setObj(obj);
         this.setType(type);
     }
@@ -85,10 +88,10 @@
      * Set the first object's class as type
      */
     @SuppressWarnings("unused")
-    public void setTypeByObj(){
-        for(T t:this.obj){
-            if(this.type == null && t!=null){
-                this.type = (Class<T>) t.getClass();
+    public void setTypeByObj() {
+        for (T t : this.obj) {
+            if (this.type == null && t != null) {
+                this.type = (Class<T>)t.getClass();
             }
         }
     }
@@ -102,17 +105,19 @@
         this.type = type;
     }
 
-	public boolean isSuccess() {
-		return success;
-	}
-	public void setSuccess(boolean success) {
-		this.success = success;
-	}
-	public String getException() {
-		return exception;
-	}
+    public boolean isSuccess() {
+        return success;
+    }
 
-    public void setException(Exception exceptionObj){
+    public void setSuccess(boolean success) {
+        this.success = success;
+    }
+
+    public String getException() {
+        return exception;
+    }
+
+    public void setException(Exception exceptionObj) {
         this.exception = EagleExceptionWrapper.wrap(exceptionObj);
     }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java
index 836295b..8ccb43a 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/GenericServiceAPIResponseEntityDeserializer.java
@@ -30,57 +30,61 @@
 import java.util.*;
 
 /**
- * @since 3/18/15
+ * @since 3/18/15.
  */
-public class GenericServiceAPIResponseEntityDeserializer extends JsonDeserializer<GenericServiceAPIResponseEntity> {
-    private final static String META_FIELD="meta";
-    private final static String SUCCESS_FIELD="success";
-    private final static String EXCEPTION_FIELD="exception";
-    private final static String OBJ_FIELD="obj";
-    private final static String TYPE_FIELD="type";
+public class GenericServiceAPIResponseEntityDeserializer
+    extends JsonDeserializer<GenericServiceAPIResponseEntity> {
+    private static final String META_FIELD = "meta";
+    private static final String SUCCESS_FIELD = "success";
+    private static final String EXCEPTION_FIELD = "exception";
+    private static final String OBJ_FIELD = "obj";
+    private static final String TYPE_FIELD = "type";
 
     @Override
-    public GenericServiceAPIResponseEntity deserialize(JsonParser jp, DeserializationContext ctxt) throws IOException, JsonProcessingException {
+    public GenericServiceAPIResponseEntity deserialize(JsonParser jp, DeserializationContext ctxt)
+        throws IOException, JsonProcessingException {
         GenericServiceAPIResponseEntity entity = new GenericServiceAPIResponseEntity();
         ObjectCodec objectCodec = jp.getCodec();
 
         JsonNode rootNode = jp.getCodec().readTree(jp);
-        if(rootNode.isObject()){
-            Iterator<Map.Entry<String,JsonNode>> fields = rootNode.fields();
+        if (rootNode.isObject()) {
+            Iterator<Map.Entry<String, JsonNode>> fields = rootNode.fields();
             JsonNode objNode = null;
-            while(fields.hasNext()){
-                Map.Entry<String,JsonNode> field = fields.next();
-                if (META_FIELD.equals(field.getKey()) && field.getValue() != null)
+            while (fields.hasNext()) {
+                Map.Entry<String, JsonNode> field = fields.next();
+                if (META_FIELD.equals(field.getKey()) && field.getValue() != null) {
                     entity.setMeta(objectCodec.readValue(field.getValue().traverse(), Map.class));
-                else if(SUCCESS_FIELD.equals(field.getKey()) && field.getValue() != null){
+                } else if (SUCCESS_FIELD.equals(field.getKey()) && field.getValue() != null) {
                     entity.setSuccess(field.getValue().booleanValue());
-                }else if(EXCEPTION_FIELD.equals(field.getKey()) && field.getValue() != null){
+                } else if (EXCEPTION_FIELD.equals(field.getKey()) && field.getValue() != null) {
                     entity.setException(new Exception(field.getValue().textValue()));
-                }else if(TYPE_FIELD.endsWith(field.getKey())  && field.getValue() != null){
-                    Preconditions.checkNotNull(field.getValue().textValue(),"Response type class is null");
+                } else if (TYPE_FIELD.endsWith(field.getKey()) && field.getValue() != null) {
+                    Preconditions.checkNotNull(field.getValue().textValue(), "Response type class is null");
                     try {
                         entity.setType(Class.forName(field.getValue().textValue()));
                     } catch (ClassNotFoundException e) {
                         throw new IOException(e);
                     }
-                }else if(OBJ_FIELD.equals(field.getKey()) && field.getValue() != null){
+                } else if (OBJ_FIELD.equals(field.getKey()) && field.getValue() != null) {
                     objNode = field.getValue();
                 }
             }
 
-            if(objNode!=null) {
-                JavaType collectionType=null;
+            if (objNode != null) {
+                JavaType collectionType = null;
                 if (entity.getType() != null) {
-                    collectionType = TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, entity.getType());
-                }else{
-                    collectionType = TypeFactory.defaultInstance().constructCollectionType(LinkedList.class, Map.class);
+                    collectionType = TypeFactory.defaultInstance().constructCollectionType(LinkedList.class,
+                                                                                           entity.getType());
+                } else {
+                    collectionType = TypeFactory.defaultInstance().constructCollectionType(LinkedList.class,
+                                                                                           Map.class);
                 }
                 List obj = objectCodec.readValue(objNode.traverse(), collectionType);
                 entity.setObj(obj);
             }
-        }else{
+        } else {
             throw new IOException("root node is not object");
         }
         return entity;
     }
-}
\ No newline at end of file
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java
index 7a38033..32f382b 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseInternalLogHelper.java
@@ -30,216 +30,232 @@
 import java.util.*;
 
 public class HBaseInternalLogHelper {
-	private final static Logger LOG  = LoggerFactory.getLogger(HBaseInternalLogHelper.class);
+    private static final Logger LOG = LoggerFactory.getLogger(HBaseInternalLogHelper.class);
 
-	private static final EntitySerDeserializer ENTITY_SERDESER = new EntitySerDeserializer();
+    private static final EntitySerDeserializer ENTITY_SERDESER = new EntitySerDeserializer();
 
-	/**
-	 *
-	 * @param ed
-	 * @param r
-	 * @param qualifiers if null, return all qualifiers defined in ed
-	 * @return
-	 */
-	public static InternalLog parse(EntityDefinition ed, Result r, byte[][] qualifiers) {
-		final byte[] row = r.getRow();
-		// skip the first 4 bytes : prefix
-		final int offset = (ed.getPartitions() == null) ? (4) : (4 + ed.getPartitions().length * 4);
-		long timestamp = ByteUtil.bytesToLong(row, offset);
-		// reverse timestamp
-		timestamp = Long.MAX_VALUE - timestamp;
-		final byte[] family = ed.getColumnFamily().getBytes();
-		final Map<String, byte[]> allQualifierValues = new HashMap<String, byte[]>();
+    /**
+     * @param ed
+     * @param r
+     * @param qualifiers if null, return all qualifiers defined in ed
+     * @return
+     */
+    public static InternalLog parse(EntityDefinition ed, Result r, byte[][] qualifiers) {
+        final byte[] row = r.getRow();
+        // skip the first 4 bytes : prefix
+        final int offset = (ed.getPartitions() == null) ? (4) : (4 + ed.getPartitions().length * 4);
+        long timestamp = ByteUtil.bytesToLong(row, offset);
+        // reverse timestamp
+        timestamp = Long.MAX_VALUE - timestamp;
+        final byte[] family = ed.getColumnFamily().getBytes();
+        final Map<String, byte[]> allQualifierValues = new HashMap<String, byte[]>();
 
-		if (qualifiers != null) {
-			int count = qualifiers.length;
-			final byte[][] values = new byte[count][];
-			for (int i = 0; i < count; i++) {
-				// TODO if returned value is null, it means no this column for this row, so why set null to the object?
-				values[i] = r.getValue(family, qualifiers[i]);
-				allQualifierValues.put(new String(qualifiers[i]), values[i]);
-			}
-		}else{
-			// return all qualifiers
-			for(KeyValue kv:r.list()){
-				byte[] qualifier = kv.getQualifier();
-				byte[] value = kv.getValue();
-				allQualifierValues.put(new String(qualifier),value);
-			}
-		}
-		final InternalLog log = buildObject(ed, row, timestamp, allQualifierValues);
-		return log;
-	}
+        if (qualifiers != null) {
+            int count = qualifiers.length;
+            final byte[][] values = new byte[count][];
+            for (int i = 0; i < count; i++) {
+                // TODO if returned value is null, it means no this column for this row, so why set null to
+                // the object?
+                values[i] = r.getValue(family, qualifiers[i]);
+                allQualifierValues.put(new String(qualifiers[i]), values[i]);
+            }
+        } else {
+            // return all qualifiers
+            for (KeyValue kv : r.list()) {
+                byte[] qualifier = kv.getQualifier();
+                byte[] value = kv.getValue();
+                allQualifierValues.put(new String(qualifier), value);
+            }
+        }
+        final InternalLog log = buildObject(ed, row, timestamp, allQualifierValues);
+        return log;
+    }
 
-	/**
-	 *
-	 * @param ed
-	 * @param row
-	 * @param timestamp
-	 * @param allQualifierValues <code>Map &lt; Qualifier name (not display name),Value in bytes array &gt;</code>
-	 * @return
-	 */
-	public static InternalLog buildObject(EntityDefinition ed, byte[] row, long timestamp, Map<String, byte[]> allQualifierValues) {
-		InternalLog log = new InternalLog();
-		String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row);
-		log.setEncodedRowkey(myRow);
-		log.setPrefix(ed.getPrefix());
-		log.setTimestamp(timestamp);
+    /**
+     * @param ed
+     * @param row
+     * @param timestamp
+     * @param allQualifierValues
+     *            <code>Map &lt; Qualifier name (not display name),Value in bytes array &gt;</code>
+     * @return
+     */
+    public static InternalLog buildObject(EntityDefinition ed, byte[] row, long timestamp,
+                                          Map<String, byte[]> allQualifierValues) {
+        InternalLog log = new InternalLog();
+        String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row);
+        log.setEncodedRowkey(myRow);
+        log.setPrefix(ed.getPrefix());
+        log.setTimestamp(timestamp);
 
-		Map<String, byte[]> logQualifierValues = new HashMap<String, byte[]>();
-		Map<String, String> logTags = new HashMap<String, String>();
-		Map<String, Object> extra = null;
+        Map<String, byte[]> logQualifierValues = new HashMap<String, byte[]>();
+        Map<String, String> logTags = new HashMap<String, String>();
+        Map<String, Object> extra = null;
 
-		Map<String,Double> doubleMap = null;
-		// handle with metric
-		boolean isMetricEntity = GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(ed.getService());
-		double[] metricValueArray = null;
+        Map<String, Double> doubleMap = null;
+        // handle with metric
+        boolean isMetricEntity = GenericMetricEntity.GENERIC_METRIC_SERVICE.equals(ed.getService());
+        double[] metricValueArray = null;
 
-		for (Map.Entry<String, byte[]> entry : allQualifierValues.entrySet()) {
-			if (ed.isTag(entry.getKey())) {
-				if (entry.getValue() != null) {
-					logTags.put(entry.getKey(), new String(entry.getValue()));
-				}else if (TokenConstant.isExpression(entry.getKey())){
-					if(doubleMap == null) doubleMap = EntityQualifierUtils.bytesMapToDoubleMap(allQualifierValues, ed);
-					// Caculate expression based fields
-					String expression = TokenConstant.parseExpressionContent(entry.getKey());
-					if (extra == null) extra = new HashMap<String, Object>();
+        for (Map.Entry<String, byte[]> entry : allQualifierValues.entrySet()) {
+            if (ed.isTag(entry.getKey())) {
+                if (entry.getValue() != null) {
+                    logTags.put(entry.getKey(), new String(entry.getValue()));
+                } else if (TokenConstant.isExpression(entry.getKey())) {
+                    if (doubleMap == null) {
+                        doubleMap = EntityQualifierUtils.bytesMapToDoubleMap(allQualifierValues, ed);
+                    }
+                    // Caculate expression based fields
+                    String expression = TokenConstant.parseExpressionContent(entry.getKey());
+                    if (extra == null) {
+                        extra = new HashMap<String, Object>();
+                    }
 
-					// Evaluation expression as output based on entity
-					// -----------------------------------------------
-					// 1) Firstly, check whether is metric entity and expression requires value and also value is not number (i.e. double[])
-					// 2) Treat all required fields as double, if not number, then set result as NaN
+                    // Evaluation expression as output based on entity
+                    // -----------------------------------------------
+                    // 1) Firstly, check whether is metric entity and expression requires value and also value
+                    // is not number (i.e. double[])
+                    // 2) Treat all required fields as double, if not number, then set result as NaN
 
-					try {
-						ExpressionParser parser = ExpressionParser.parse(expression);
-						boolean isRequiringValue = parser.getDependentFields().contains(GenericMetricEntity.VALUE_FIELD);
+                    try {
+                        ExpressionParser parser = ExpressionParser.parse(expression);
+                        boolean isRequiringValue = parser.getDependentFields()
+                            .contains(GenericMetricEntity.VALUE_FIELD);
 
-						if(isMetricEntity && isRequiringValue && doubleMap.get(GenericMetricEntity.VALUE_FIELD)!=null
-								&& Double.isNaN(doubleMap.get(GenericMetricEntity.VALUE_FIELD))) // EntityQualifierUtils will convert non-number field into Double.NaN
-						{
-							// if dependent fields require "value"
-							// and value exists but value's type is double[] instead of double
+                        if (isMetricEntity && isRequiringValue
+                            && doubleMap.get(GenericMetricEntity.VALUE_FIELD) != null
+                            && Double.isNaN(doubleMap.get(GenericMetricEntity.VALUE_FIELD))) {
+                            // EntityQualifierUtils will convert non-number field into Double.NaN
+                            // if dependent fields require "value"
+                            // and value exists but value's type is double[] instead of double
 
-							// handle with metric value array based expression
-							// lazily extract metric value as double array if required
-							if(metricValueArray == null){
-								// if(allQualifierValues.containsKey(GenericMetricEntity.VALUE_FIELD)){
-								Qualifier qualifier = ed.getDisplayNameMap().get(GenericMetricEntity.VALUE_FIELD);
-								EntitySerDeser serDeser = qualifier.getSerDeser();
-								if(serDeser instanceof DoubleArraySerDeser){
-									byte[] value = allQualifierValues.get(qualifier.getQualifierName());
-									if(value !=null ) metricValueArray = (double[]) serDeser.deserialize(value);
-								}
-								// }
-							}
+                            // handle with metric value array based expression
+                            // lazily extract metric value as double array if required
+                            if (metricValueArray == null) {
+                                // if(allQualifierValues.containsKey(GenericMetricEntity.VALUE_FIELD)){
+                                Qualifier qualifier = ed.getDisplayNameMap()
+                                    .get(GenericMetricEntity.VALUE_FIELD);
+                                EntitySerDeser serDeser = qualifier.getSerDeser();
+                                if (serDeser instanceof DoubleArraySerDeser) {
+                                    byte[] value = allQualifierValues.get(qualifier.getQualifierName());
+                                    if (value != null) {
+                                        metricValueArray = (double[])serDeser.deserialize(value);
+                                    }
+                                }
+                                // }
+                            }
 
-							if(metricValueArray!=null){
-								double[] resultBucket = new double[metricValueArray.length];
-								Map<String, Double> _doubleMap = new HashMap<String,Double>(doubleMap);
-								_doubleMap.remove(entry.getKey());
-								for(int i=0;i< resultBucket.length;i++) {
-									_doubleMap.put(GenericMetricEntity.VALUE_FIELD, metricValueArray[i]);
-									resultBucket[i]=  parser.eval(_doubleMap);
-								}
-								extra.put(expression,resultBucket);
-							}else{
-								LOG.warn("Failed convert metric value into double[] type which is required by expression: "+expression);
-								// if require value in double[] is NaN
-								double value = parser.eval(doubleMap);
-								extra.put(expression, value);
-							}
-						}else {
-							double value = parser.eval(doubleMap);
-							extra.put(expression, value);
-							// LOG.info("DEBUG: "+entry.getKey()+" = "+ value);
-						}
-					} catch (Exception e) {
-						LOG.error("Failed to eval expression "+expression+", exception: "+e.getMessage(),e);
-					}
-				}
-			} else {
-				logQualifierValues.put(entry.getKey(),entry.getValue());
-			}
-		}
-		log.setQualifierValues(logQualifierValues);
-		log.setTags(logTags);
-		log.setExtraValues(extra);
-		return log;
-	}
-	
-	public static TaggedLogAPIEntity buildEntity(InternalLog log, EntityDefinition entityDef) throws Exception {
-		Map<String, byte[]> qualifierValues = log.getQualifierValues();
-		TaggedLogAPIEntity entity = ENTITY_SERDESER.readValue(qualifierValues, entityDef);
-		if (entity.getTags() == null && log.getTags() != null) {
-			entity.setTags(log.getTags());
-		}
-		entity.setExp(log.getExtraValues());
-		entity.setTimestamp(log.getTimestamp());
-		entity.setEncodedRowkey(log.getEncodedRowkey());
-		entity.setPrefix(log.getPrefix());
-		return entity;
-	}
-	
-	public static List<TaggedLogAPIEntity> buildEntities(List<InternalLog> logs, EntityDefinition entityDef) throws Exception {
-		final List<TaggedLogAPIEntity> result = new ArrayList<TaggedLogAPIEntity>(logs.size());
-		for (InternalLog log : logs) {
-			result.add(buildEntity(log, entityDef));
-		}
-		return result;
-	}
-	
-	public static byte[][] getOutputQualifiers(EntityDefinition entityDef, List<String> outputFields) {
-		final byte[][] result = new byte[outputFields.size()][];
-		int index = 0;
-		for(String field : outputFields){
-			// convert displayName to qualifierName
-			Qualifier q = entityDef.getDisplayNameMap().get(field);
-			if(q == null){ // for tag case
-				result[index++] = field.getBytes();
-			}else{ // for qualifier case
-				result[index++] = q.getQualifierName().getBytes();
-			}
-		}
-		return result;
-	}
+                            if (metricValueArray != null) {
+                                double[] resultBucket = new double[metricValueArray.length];
+                                Map<String, Double> _doubleMap = new HashMap<String, Double>(doubleMap);
+                                _doubleMap.remove(entry.getKey());
+                                for (int i = 0; i < resultBucket.length; i++) {
+                                    _doubleMap.put(GenericMetricEntity.VALUE_FIELD, metricValueArray[i]);
+                                    resultBucket[i] = parser.eval(_doubleMap);
+                                }
+                                extra.put(expression, resultBucket);
+                            } else {
+                                LOG.warn("Failed convert metric value into double[] type which is required by expression: "
+                                         + expression);
+                                // if require value in double[] is NaN
+                                double value = parser.eval(doubleMap);
+                                extra.put(expression, value);
+                            }
+                        } else {
+                            double value = parser.eval(doubleMap);
+                            extra.put(expression, value);
+                            // LOG.info("DEBUG: "+entry.getKey()+" = "+ value);
+                        }
+                    } catch (Exception e) {
+                        LOG.error("Failed to eval expression " + expression + ", exception: "
+                                  + e.getMessage(), e);
+                    }
+                }
+            } else {
+                logQualifierValues.put(entry.getKey(), entry.getValue());
+            }
+        }
+        log.setQualifierValues(logQualifierValues);
+        log.setTags(logTags);
+        log.setExtraValues(extra);
+        return log;
+    }
 
-	public static InternalLog convertToInternalLog(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception {
-		final InternalLog log = new InternalLog();
-		final Map<String, String> inputTags = entity.getTags();
-		final Map<String, String> tags = new TreeMap<String, String>();
-		if(inputTags!=null) {
-			for (Map.Entry<String, String> entry : inputTags.entrySet()) {
-				tags.put(entry.getKey(), entry.getValue());
-			}
-		}
-		log.setTags(tags);
-		if(entityDef.isTimeSeries()){
-			log.setTimestamp(entity.getTimestamp());
-		}else{
-			log.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0
-		}
-		
-		// For Metric entity, prefix is populated along with entity instead of EntityDefinition
-		if(entity.getPrefix() != null && !entity.getPrefix().isEmpty()){
-			log.setPrefix(entity.getPrefix());
-		}else{
-			log.setPrefix(entityDef.getPrefix());
-		}
-		
-		log.setPartitions(entityDef.getPartitions());
-		EntitySerDeserializer des = new EntitySerDeserializer();
-		log.setQualifierValues(des.writeValue(entity, entityDef));
-		
-		final IndexDefinition[] indexDefs = entityDef.getIndexes();
-		if (indexDefs != null) {
-			final List<byte[]> indexRowkeys = new ArrayList<byte[]>();
-			for (int i = 0; i < indexDefs.length; ++i) {
-				final IndexDefinition indexDef = indexDefs[i];
-				final byte[] indexRowkey = indexDef.generateIndexRowkey(entity);
-				indexRowkeys.add(indexRowkey);
-			}
-			log.setIndexRowkeys(indexRowkeys);
-		}
-		return log;
-	}
+    public static TaggedLogAPIEntity buildEntity(InternalLog log, EntityDefinition entityDef)
+        throws Exception {
+        Map<String, byte[]> qualifierValues = log.getQualifierValues();
+        TaggedLogAPIEntity entity = ENTITY_SERDESER.readValue(qualifierValues, entityDef);
+        if (entity.getTags() == null && log.getTags() != null) {
+            entity.setTags(log.getTags());
+        }
+        entity.setExp(log.getExtraValues());
+        entity.setTimestamp(log.getTimestamp());
+        entity.setEncodedRowkey(log.getEncodedRowkey());
+        entity.setPrefix(log.getPrefix());
+        return entity;
+    }
+
+    public static List<TaggedLogAPIEntity> buildEntities(List<InternalLog> logs, EntityDefinition entityDef)
+        throws Exception {
+        final List<TaggedLogAPIEntity> result = new ArrayList<TaggedLogAPIEntity>(logs.size());
+        for (InternalLog log : logs) {
+            result.add(buildEntity(log, entityDef));
+        }
+        return result;
+    }
+
+    public static byte[][] getOutputQualifiers(EntityDefinition entityDef, List<String> outputFields) {
+        final byte[][] result = new byte[outputFields.size()][];
+        int index = 0;
+        for (String field : outputFields) {
+            // convert displayName to qualifierName
+            Qualifier q = entityDef.getDisplayNameMap().get(field);
+            if (q == null) { // for tag case
+                result[index++] = field.getBytes();
+            } else { // for qualifier case
+                result[index++] = q.getQualifierName().getBytes();
+            }
+        }
+        return result;
+    }
+
+    public static InternalLog convertToInternalLog(TaggedLogAPIEntity entity, EntityDefinition entityDef)
+        throws Exception {
+        final InternalLog log = new InternalLog();
+        final Map<String, String> inputTags = entity.getTags();
+        final Map<String, String> tags = new TreeMap<String, String>();
+        if (inputTags != null) {
+            for (Map.Entry<String, String> entry : inputTags.entrySet()) {
+                tags.put(entry.getKey(), entry.getValue());
+            }
+        }
+        log.setTags(tags);
+        if (entityDef.isTimeSeries()) {
+            log.setTimestamp(entity.getTimestamp());
+        } else {
+            log.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0
+        }
+
+        // For Metric entity, prefix is populated along with entity instead of EntityDefinition
+        if (entity.getPrefix() != null && !entity.getPrefix().isEmpty()) {
+            log.setPrefix(entity.getPrefix());
+        } else {
+            log.setPrefix(entityDef.getPrefix());
+        }
+
+        log.setPartitions(entityDef.getPartitions());
+        EntitySerDeserializer des = new EntitySerDeserializer();
+        log.setQualifierValues(des.writeValue(entity, entityDef));
+
+        final IndexDefinition[] indexDefs = entityDef.getIndexes();
+        if (indexDefs != null) {
+            final List<byte[]> indexRowkeys = new ArrayList<byte[]>();
+            for (int i = 0; i < indexDefs.length; ++i) {
+                final IndexDefinition indexDef = indexDefs[i];
+                final byte[] indexRowkey = indexDef.generateIndexRowkey(entity);
+                indexRowkeys.add(indexRowkey);
+            }
+            log.setIndexRowkeys(indexRowkeys);
+        }
+        return log;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java
index c8b9a33..d5c8e2c 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogReader2.java
@@ -28,59 +28,62 @@
 import java.util.List;
 
 public class HBaseLogReader2 extends AbstractHBaseLogReader<InternalLog> {
-	protected ResultScanner rs;
+    protected ResultScanner rs;
 
-	public HBaseLogReader2(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime, Filter filter, String lastScanKey, byte[][] outputQualifiers) {
-		super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers);
-	}
+    public HBaseLogReader2(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime,
+                           Filter filter, String lastScanKey, byte[][] outputQualifiers) {
+        super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers);
+    }
 
-	/**
-	 * This constructor supports partition.
-	 *
-	 * @param ed               entity definition
-	 * @param partitions       partition values, which is sorted in partition definition order. TODO: in future we need to support
-	 *                         multiple values for one partition field
-	 * @param startTime        start time of the query
-	 * @param endTime          end time of the query
-	 * @param filter           filter for the hbase scan
-	 * @param lastScanKey      the key of last scan
-	 * @param outputQualifiers the bytes of output qualifier names
-	 * @param prefix           can be populated from outside world specifically for generic metric reader
-	 */
-	public HBaseLogReader2(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime, Filter filter, String lastScanKey, byte[][] outputQualifiers, String prefix) {
-		super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, prefix);
-	}
+    /**
+     * This constructor supports partition.
+     *
+     * @param ed entity definition
+     * @param partitions partition values, which is sorted in partition definition order. TODO: in future we
+     *            need to support multiple values for one partition field
+     * @param startTime start time of the query
+     * @param endTime end time of the query
+     * @param filter filter for the hbase scan
+     * @param lastScanKey the key of last scan
+     * @param outputQualifiers the bytes of output qualifier names
+     * @param prefix can be populated from outside world specifically for generic metric reader
+     */
+    public HBaseLogReader2(EntityDefinition ed, List<String> partitions, Date startTime, Date endTime,
+                           Filter filter, String lastScanKey, byte[][] outputQualifiers, String prefix) {
+        super(ed, partitions, startTime, endTime, filter, lastScanKey, outputQualifiers, prefix);
+    }
 
-	@Override
-	protected void onOpen(HTableInterface tbl, Scan scan) throws IOException {
-		rs = tbl.getScanner(scan);
-	}
+    @Override
+    protected void onOpen(HTableInterface tbl, Scan scan) throws IOException {
+        rs = tbl.getScanner(scan);
+    }
 
-	/**
-	 * <h2>Close:</h2>
-	 * 1. Call super.close(): release current table connection <br></br>
-	 * 2. Close Scanner<br></br>
-	 *
-	 * @throws IOException
-	 */
-	@Override
-	public void close() throws IOException {
-		super.close();
-		if(rs != null){
-			rs.close();
-		}
-	}
+    /**
+     * <h2>Close:</h2> 1. Call super.close(): release current table connection <br>
+     * <br>
+     * 2. Close Scanner<br>
+     * <br>
+     *
+     * @throws IOException
+     */
+    @Override
+    public void close() throws IOException {
+        super.close();
+        if (rs != null) {
+            rs.close();
+        }
+    }
 
-	@Override
-	public InternalLog read() throws IOException {
-		if (rs == null)
-			throw new IllegalArgumentException(
-					"ResultScanner must be initialized before reading");
-		InternalLog t = null;
-		Result r = rs.next();
-		if (r != null) {
-			t = HBaseInternalLogHelper.parse(_ed, r, qualifiers);
-		}
-		return t;
-	}
+    @Override
+    public InternalLog read() throws IOException {
+        if (rs == null) {
+            throw new IllegalArgumentException("ResultScanner must be initialized before reading");
+        }
+        InternalLog t = null;
+        Result r = rs.next();
+        if (r != null) {
+            t = HBaseInternalLogHelper.parse(ed, r, qualifiers);
+        }
+        return t;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java
index 059ee7f..1cf23b6 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/HBaseLogWriter.java
@@ -29,124 +29,125 @@
 import org.slf4j.LoggerFactory;
 
 public class HBaseLogWriter implements LogWriter {
-	private static Logger LOG = LoggerFactory.getLogger(HBaseLogWriter.class);
-	private static byte[] EMPTY_INDEX_QUALIFER_VALUE = "".getBytes();
-	
-	private HTableInterface tbl;
-	private String table;
-	private String columnFamily;
-	
-	public HBaseLogWriter(String table, String columnFamily) {
-		// TODO assert for non-null of table and columnFamily
-		this.table = table;
-		this.columnFamily = columnFamily;
-	}
-	
-	@Override
-	public void open() throws IOException {
-		try{
-			tbl = EagleConfigFactory.load().getHTable(this.table);
-//			LOGGER.info("HBase table " + table + " audo reflush is " + (tbl.isAutoFlush() ? "enabled" : "disabled"));
-		}catch(Exception ex){
-			LOG.error("Cannot create htable", ex);
-			throw new IOException(ex);
-		}
-	}
+    private static Logger LOG = LoggerFactory.getLogger(HBaseLogWriter.class);
+    private static byte[] EMPTY_INDEX_QUALIFER_VALUE = "".getBytes();
 
-	@Override
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-	}
+    private HTableInterface tbl;
+    private String table;
+    private String columnFamily;
 
-	@Override
-	public void flush() throws IOException {
-		tbl.flushCommits();
-	}
-	
-	protected void populateColumnValues(Put p, InternalLog log){
-		Map<String, byte[]> qualifierValues = log.getQualifierValues();
-		// iterate all qualifierValues
-		for(Map.Entry<String, byte[]> entry : qualifierValues.entrySet()){
-			p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue());
-		}
-		
-		Map<String, String> tags = log.getTags();
-		// iterate all tags, each tag will be stored as a column qualifier
-		if(tags != null){
-			for(Map.Entry<String, String> entry : tags.entrySet()){
-				// TODO need a consistent handling of null values
-				if(entry.getValue() != null)
-					p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue().getBytes());
-			}
-		}
-	}
+    public HBaseLogWriter(String table, String columnFamily) {
+        // TODO assert for non-null of table and columnFamily
+        this.table = table;
+        this.columnFamily = columnFamily;
+    }
 
-	/**
-	 * TODO need think about if multi-PUT is necessary, by checking if autoFlush works
-	 */
-	@Override
-	public byte[] write(InternalLog log) throws IOException{
-		final byte[] rowkey = RowkeyBuilder.buildRowkey(log);
-		final Put p = new Put(rowkey);
-		populateColumnValues(p, log);
-		tbl.put(p);
-		final List<byte[]> indexRowkeys = log.getIndexRowkeys();
-		if (indexRowkeys != null) {
-			writeIndexes(rowkey, indexRowkeys);
-		}
-		return rowkey;
-	}
+    @Override
+    public void open() throws IOException {
+        try {
+            tbl = EagleConfigFactory.load().getHTable(this.table);
+            // LOGGER.info("HBase table " + table + " audo reflush is " + (tbl.isAutoFlush() ? "enabled" :
+            // "disabled"));
+        } catch (Exception ex) {
+            LOG.error("Cannot create htable", ex);
+            throw new IOException(ex);
+        }
+    }
 
-	/**
-	 * TODO need think about if multi-PUT is necessary, by checking if autoFlush works
-	 */
-	public List<byte[]> write(List<InternalLog> logs) throws IOException{
-		final List<Put> puts = new ArrayList<Put>(logs.size());
-		final List<byte[]> result = new ArrayList<byte[]>(logs.size());
-		for (InternalLog log : logs) {
-			final byte[] rowkey = RowkeyBuilder.buildRowkey(log);
-			final Put p = new Put(rowkey);
-			populateColumnValues(p, log);
-			puts.add(p);
-			final List<byte[]> indexRowkeys = log.getIndexRowkeys();
-			if (indexRowkeys != null) {
-				writeIndexes(rowkey, indexRowkeys, puts);
-			}
-			result.add(rowkey);
-		}
-		tbl.put(puts);
-		return result;
-	}
-	
-	@Override
-	public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException{
-		Put p = new Put(rowkey);
-		populateColumnValues(p, log);
-		tbl.put(p);
-		final List<byte[]> indexRowkeys = log.getIndexRowkeys();
-		if (indexRowkeys != null) {
-			writeIndexes(rowkey, indexRowkeys);
-		}
-	}
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+    }
 
-	private void writeIndexes(byte[] rowkey, List<byte[]> indexRowkeys) throws IOException {
-		for (byte[] indexRowkey : indexRowkeys) {
-			Put p = new Put(indexRowkey);
-			p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE);
-			tbl.put(p);
-		}
-	}
+    @Override
+    public void flush() throws IOException {
+        tbl.flushCommits();
+    }
 
-	private void writeIndexes(byte[] rowkey, List<byte[]> indexRowkeys, List<Put> puts) throws IOException {
-		for (byte[] indexRowkey : indexRowkeys) {
-			Put p = new Put(indexRowkey);
-			p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE);
-			puts.add(p);
-//			tbl.put(p);
-		}
-	}
+    protected void populateColumnValues(Put p, InternalLog log) {
+        Map<String, byte[]> qualifierValues = log.getQualifierValues();
+        // iterate all qualifierValues
+        for (Map.Entry<String, byte[]> entry : qualifierValues.entrySet()) {
+            p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue());
+        }
 
-	
+        Map<String, String> tags = log.getTags();
+        // iterate all tags, each tag will be stored as a column qualifier
+        if (tags != null) {
+            for (Map.Entry<String, String> entry : tags.entrySet()) {
+                // TODO need a consistent handling of null values
+                if (entry.getValue() != null) {
+                    p.add(columnFamily.getBytes(), entry.getKey().getBytes(), entry.getValue().getBytes());
+                }
+            }
+        }
+    }
+
+    /**
+     * TODO need think about if multi-PUT is necessary, by checking if autoFlush works
+     */
+    @Override
+    public byte[] write(InternalLog log) throws IOException {
+        final byte[] rowkey = RowkeyBuilder.buildRowkey(log);
+        final Put p = new Put(rowkey);
+        populateColumnValues(p, log);
+        tbl.put(p);
+        final List<byte[]> indexRowkeys = log.getIndexRowkeys();
+        if (indexRowkeys != null) {
+            writeIndexes(rowkey, indexRowkeys);
+        }
+        return rowkey;
+    }
+
+    /**
+     * TODO need think about if multi-PUT is necessary, by checking if autoFlush works
+     */
+    public List<byte[]> write(List<InternalLog> logs) throws IOException {
+        final List<Put> puts = new ArrayList<Put>(logs.size());
+        final List<byte[]> result = new ArrayList<byte[]>(logs.size());
+        for (InternalLog log : logs) {
+            final byte[] rowkey = RowkeyBuilder.buildRowkey(log);
+            final Put p = new Put(rowkey);
+            populateColumnValues(p, log);
+            puts.add(p);
+            final List<byte[]> indexRowkeys = log.getIndexRowkeys();
+            if (indexRowkeys != null) {
+                writeIndexes(rowkey, indexRowkeys, puts);
+            }
+            result.add(rowkey);
+        }
+        tbl.put(puts);
+        return result;
+    }
+
+    @Override
+    public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException {
+        Put p = new Put(rowkey);
+        populateColumnValues(p, log);
+        tbl.put(p);
+        final List<byte[]> indexRowkeys = log.getIndexRowkeys();
+        if (indexRowkeys != null) {
+            writeIndexes(rowkey, indexRowkeys);
+        }
+    }
+
+    private void writeIndexes(byte[] rowkey, List<byte[]> indexRowkeys) throws IOException {
+        for (byte[] indexRowkey : indexRowkeys) {
+            Put p = new Put(indexRowkey);
+            p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE);
+            tbl.put(p);
+        }
+    }
+
+    private void writeIndexes(byte[] rowkey, List<byte[]> indexRowkeys, List<Put> puts) throws IOException {
+        for (byte[] indexRowkey : indexRowkeys) {
+            Put p = new Put(indexRowkey);
+            p.add(columnFamily.getBytes(), rowkey, EMPTY_INDEX_QUALIFER_VALUE);
+            puts.add(p);
+            // tbl.put(p);
+        }
+    }
+
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java
index 8276640..066401f 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/InternalLog.java
@@ -25,115 +25,134 @@
  * TODO we should decouple BaseLog during write time and BaseLog during read time
  */
 public class InternalLog {
-	private String encodedRowkey;
-	private String prefix;
-	private String[] partitions;
-	private long timestamp;
-	private Map<String, byte[]> qualifierValues;
+    private String encodedRowkey;
+    private String prefix;
+    private String[] partitions;
+    private long timestamp;
+    private Map<String, byte[]> qualifierValues;
 
-	private Map<String,Object> extraValues;
-	private Map<String, String> tags;
-	private Map<String, List<String>> searchTags;
-	private List<byte[]> indexRowkeys;
+    private Map<String, Object> extraValues;
+    private Map<String, String> tags;
+    private Map<String, List<String>> searchTags;
+    private List<byte[]> indexRowkeys;
 
-	public String getEncodedRowkey() {
-		return encodedRowkey;
-	}
+    public String getEncodedRowkey() {
+        return encodedRowkey;
+    }
 
-	public void setEncodedRowkey(String encodedRowkey) {
-		this.encodedRowkey = encodedRowkey;
-	}
-	
-	public Map<String, byte[]> getQualifierValues() {
-		return qualifierValues;
-	}
-	public void setQualifierValues(Map<String, byte[]> qualifierValues) {
-		this.qualifierValues = qualifierValues;
-	}
+    public void setEncodedRowkey(String encodedRowkey) {
+        this.encodedRowkey = encodedRowkey;
+    }
 
-	public Map<String, List<String>> getSearchTags() {
-		return searchTags;
-	}
-	public void setSearchTags(Map<String, List<String>> searchTags) {
-		this.searchTags = searchTags;
-	}
-	public String getPrefix() {
-		return prefix;
-	}
-	public void setPrefix(String prefix) {
-		this.prefix = prefix;
-	}
-	public String[] getPartitions() {
-		return partitions;
-	}
-	public void setPartitions(String[] partitions) {
-		this.partitions = partitions;
-	}
-	public long getTimestamp() {
-		return timestamp;
-	}
-	public void setTimestamp(long timestamp) {
-		this.timestamp = timestamp;
-	}
-	public Map<String, String> getTags() {
-		return tags;
-	}
-	public void setTags(Map<String, String> tags) {
-		this.tags = tags;
-	}
-	public List<byte[]> getIndexRowkeys() {
-		return indexRowkeys;
-	}
-	public void setIndexRowkeys(List<byte[]> indexRowkeys) {
-		this.indexRowkeys = indexRowkeys;
-	}
-	public Map<String, Object> getExtraValues() { return extraValues; }
-	public void setExtraValues(Map<String, Object> extraValues) { this.extraValues = extraValues; }
+    public Map<String, byte[]> getQualifierValues() {
+        return qualifierValues;
+    }
 
-	public String toString(){
-		StringBuffer sb = new StringBuffer();
-		sb.append(prefix);
-		sb.append("|");
-		sb.append(DateTimeUtil.millisecondsToHumanDateWithMilliseconds(timestamp));
-		sb.append("(");
-		sb.append(timestamp);
-		sb.append(")");
-		sb.append("|searchTags:");
-		if(searchTags != null){
-			for(String tagkey : searchTags.keySet()){
-				sb.append(tagkey);
-				sb.append('=');
-				List<String> tagValues = searchTags.get(tagkey);
-				sb.append("(");
-				for(String tagValue : tagValues){
-					sb.append(tagValue);
-					sb.append(",");
-				}
-				sb.append(")");
-				sb.append(",");
-			}
-		}
-		sb.append("|tags:");
-		if(tags != null){
-			for(Map.Entry<String, String> entry : tags.entrySet()){
-				sb.append(entry.getKey());
-				sb.append("=");
-				sb.append(entry.getValue());
-				sb.append(",");
-			}
-		}
-		sb.append("|columns:");
-		if(qualifierValues != null){
-			for(String qualifier : qualifierValues.keySet()){
-				byte[] value = qualifierValues.get(qualifier);
-				sb.append(qualifier);
-				sb.append("=");
-				if(value != null){
-					sb.append(new String(value));
-				}
-				sb.append(",");
-			}
-		}
-		return sb.toString();
-	}
+    public void setQualifierValues(Map<String, byte[]> qualifierValues) {
+        this.qualifierValues = qualifierValues;
+    }
+
+    public Map<String, List<String>> getSearchTags() {
+        return searchTags;
+    }
+
+    public void setSearchTags(Map<String, List<String>> searchTags) {
+        this.searchTags = searchTags;
+    }
+
+    public String getPrefix() {
+        return prefix;
+    }
+
+    public void setPrefix(String prefix) {
+        this.prefix = prefix;
+    }
+
+    public String[] getPartitions() {
+        return partitions;
+    }
+
+    public void setPartitions(String[] partitions) {
+        this.partitions = partitions;
+    }
+
+    public long getTimestamp() {
+        return timestamp;
+    }
+
+    public void setTimestamp(long timestamp) {
+        this.timestamp = timestamp;
+    }
+
+    public Map<String, String> getTags() {
+        return tags;
+    }
+
+    public void setTags(Map<String, String> tags) {
+        this.tags = tags;
+    }
+
+    public List<byte[]> getIndexRowkeys() {
+        return indexRowkeys;
+    }
+
+    public void setIndexRowkeys(List<byte[]> indexRowkeys) {
+        this.indexRowkeys = indexRowkeys;
+    }
+
+    public Map<String, Object> getExtraValues() {
+        return extraValues;
+    }
+
+    public void setExtraValues(Map<String, Object> extraValues) {
+        this.extraValues = extraValues;
+    }
+
+    @Override
+    public String toString() {
+        StringBuffer sb = new StringBuffer();
+        sb.append(prefix);
+        sb.append("|");
+        sb.append(DateTimeUtil.millisecondsToHumanDateWithMilliseconds(timestamp));
+        sb.append("(");
+        sb.append(timestamp);
+        sb.append(")");
+        sb.append("|searchTags:");
+        if (searchTags != null) {
+            for (String tagkey : searchTags.keySet()) {
+                sb.append(tagkey);
+                sb.append('=');
+                List<String> tagValues = searchTags.get(tagkey);
+                sb.append("(");
+                for (String tagValue : tagValues) {
+                    sb.append(tagValue);
+                    sb.append(",");
+                }
+                sb.append(")");
+                sb.append(",");
+            }
+        }
+        sb.append("|tags:");
+        if (tags != null) {
+            for (Map.Entry<String, String> entry : tags.entrySet()) {
+                sb.append(entry.getKey());
+                sb.append("=");
+                sb.append(entry.getValue());
+                sb.append(",");
+            }
+        }
+        sb.append("|columns:");
+        if (qualifierValues != null) {
+            for (String qualifier : qualifierValues.keySet()) {
+                byte[] value = qualifierValues.get(qualifier);
+                sb.append(qualifier);
+                sb.append("=");
+                if (value != null) {
+                    sb.append(new String(value));
+                }
+                sb.append(",");
+            }
+        }
+        return sb.toString();
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java
index 3f748d6..93e714b 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/ListQueryAPIResponseEntity.java
@@ -20,59 +20,77 @@
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
 /**
- * TODO: (hchen9) currently we disable firstTimestamp in response avoid breaking older client implementation, but we may need to remove "firstTimestamp" from @JsonIgnoreProperties(ignoreUnknown = true,value={"firstTimestamp"}) to enable the feature later
+ * TODO: (hchen9) currently we disable firstTimestamp in response avoid breaking older client implementation,
+ * but we may need to remove "firstTimestamp" from @JsonIgnoreProperties(ignoreUnknown =
+ * true,value={"firstTimestamp"}) to enable the feature later
  */
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
-@JsonIgnoreProperties(ignoreUnknown = true,value={"firstTimestamp"})
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
+@JsonIgnoreProperties(ignoreUnknown = true, value = {
+    "firstTimestamp"
+    })
 public class ListQueryAPIResponseEntity {
-	private boolean success;
-	private String exception;
-	private int totalResults;
-	private long elapsedms;
-	private long lastTimestamp;
-	private long firstTimestamp;
-	public long getFirstTimestamp() {
-		return firstTimestamp;
-	}
-	public void setFirstTimestamp(long firstTimestamp) {
-		this.firstTimestamp = firstTimestamp;
-	}
-	private Object obj;
+    private boolean success;
+    private String exception;
+    private int totalResults;
+    private long elapsedms;
+    private long lastTimestamp;
+    private long firstTimestamp;
 
-	public long getElapsedms() {
-		return elapsedms;
-	}
-	public void setElapsedms(long elapsedms) {
-		this.elapsedms = elapsedms;
-	}
-	public boolean isSuccess() {
-		return success;
-	}
-	public void setSuccess(boolean success) {
-		this.success = success;
-	}
-	public String getException() {
-		return exception;
-	}
-	public void setException(String exception) {
-		this.exception = exception;
-	}
-	public int getTotalResults() {
-		return totalResults;
-	}
-	public void setTotalResults(int totalResults) {
-		this.totalResults = totalResults;
-	}
-	public long getLastTimestamp() {
-		return lastTimestamp;
-	}
-	public void setLastTimestamp(long lastTimestamp) {
-		this.lastTimestamp = lastTimestamp;
-	}
-	public Object getObj() {
-		return obj;
-	}
-	public void setObj(Object obj) {
-		this.obj = obj;
-	}
-}
\ No newline at end of file
+    public long getFirstTimestamp() {
+        return firstTimestamp;
+    }
+
+    public void setFirstTimestamp(long firstTimestamp) {
+        this.firstTimestamp = firstTimestamp;
+    }
+
+    private Object obj;
+
+    public long getElapsedms() {
+        return elapsedms;
+    }
+
+    public void setElapsedms(long elapsedms) {
+        this.elapsedms = elapsedms;
+    }
+
+    public boolean isSuccess() {
+        return success;
+    }
+
+    public void setSuccess(boolean success) {
+        this.success = success;
+    }
+
+    public String getException() {
+        return exception;
+    }
+
+    public void setException(String exception) {
+        this.exception = exception;
+    }
+
+    public int getTotalResults() {
+        return totalResults;
+    }
+
+    public void setTotalResults(int totalResults) {
+        this.totalResults = totalResults;
+    }
+
+    public long getLastTimestamp() {
+        return lastTimestamp;
+    }
+
+    public void setLastTimestamp(long lastTimestamp) {
+        this.lastTimestamp = lastTimestamp;
+    }
+
+    public Object getObj() {
+        return obj;
+    }
+
+    public void setObj(Object obj) {
+        this.obj = obj;
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java
index da1e1ab..a0dd29a 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogReader.java
@@ -19,10 +19,11 @@
 import java.io.Closeable;
 import java.io.IOException;
 
-public interface LogReader<T> extends Closeable{
-	public void open() throws IOException;
+public interface LogReader<T> extends Closeable {
+    public void open() throws IOException;
 
-	public void close() throws IOException;
-	
-	public T read() throws IOException;
+    @Override
+    public void close() throws IOException;
+
+    public T read() throws IOException;
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java
index 6ef4ee3..9c10cd4 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/LogWriter.java
@@ -19,14 +19,15 @@
 import java.io.Closeable;
 import java.io.IOException;
 
-public interface LogWriter extends Closeable{
-	public void flush() throws IOException;
+public interface LogWriter extends Closeable {
+    public void flush() throws IOException;
 
-	public void open() throws IOException;
+    public void open() throws IOException;
 
-	public void close() throws IOException;
+    @Override
+    public void close() throws IOException;
 
-	public byte[] write(InternalLog log) throws IOException;
-	
-	public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException;
-}
\ No newline at end of file
+    public byte[] write(InternalLog log) throws IOException;
+
+    public void updateByRowkey(byte[] rowkey, InternalLog log) throws IOException;
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java
index a430393..4bf82e6 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/MetricMetadataEntity.java
@@ -28,90 +28,106 @@
 import org.apache.eagle.log.entity.meta.Table;
 import org.apache.eagle.log.entity.meta.TimeSeries;
 
-
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @Table("eagle_metric")
 @ColumnFamily("f")
 @Prefix("dmeta")
 @Service("MetricMetadataService")
 @TimeSeries(false)
 @Indexes({
-	@Index(name="Index_1_name", columns = { "name" }, unique = true)
-	})
+          @Index(name = "Index_1_name", columns = {
+                                                   "name"
+          }, unique = true)
+    })
 public class MetricMetadataEntity extends TaggedLogAPIEntity {
-	
-	@Column("a")
-	private String storeType;
-	@Column("b")
-	private String displayName;
-	@Column("c")
-	private String defaultDownSamplingFunction;
-	@Column("d")
-	private String defaultAggregateFunction;
-	@Column("e")
-	private String aggFunctions;
-	@Column("f")
-	private String downSamplingFunctions;
-	@Column("g")
-	private String resolutions;
-	@Column("h")
-	private String drillDownPaths;
-	
-	public String getStoreType() {
-		return storeType;
-	}
-	public void setStoreType(String storeType) {
-		this.storeType = storeType;
-		pcs.firePropertyChange("storeType", null, null);
-	}
-	public String getDisplayName() {
-		return displayName;
-	}
-	public void setDisplayName(String displayName) {
-		this.displayName = displayName;
-		pcs.firePropertyChange("displayName", null, null);
-	}
-	public String getDefaultDownSamplingFunction() {
-		return defaultDownSamplingFunction;
-	}
-	public void setDefaultDownSamplingFunction(String defaultDownSamplingFunction) {
-		this.defaultDownSamplingFunction = defaultDownSamplingFunction;
-		pcs.firePropertyChange("defaultDownSamplingFunction", null, null);
-	}
-	public String getDefaultAggregateFunction() {
-		return defaultAggregateFunction;
-	}
-	public void setDefaultAggregateFunction(String defaultAggregateFunction) {
-		this.defaultAggregateFunction = defaultAggregateFunction;
-		pcs.firePropertyChange("defaultAggregateFunction", null, null);
-	}
-	public String getAggFunctions() {
-		return aggFunctions;
-	}
-	public void setAggFunctions(String aggFunctions) {
-		this.aggFunctions = aggFunctions;
-		pcs.firePropertyChange("aggFunctions", null, null);
-	}
-	public String getDownSamplingFunctions() {
-		return downSamplingFunctions;
-	}
-	public void setDownSamplingFunctions(String downSamplingFunctions) {
-		this.downSamplingFunctions = downSamplingFunctions;
-		pcs.firePropertyChange("downSamplingFunctions", null, null);
-	}
-	public String getResolutions() {
-		return resolutions;
-	}
-	public void setResolutions(String resolutions) {
-		this.resolutions = resolutions;
-		pcs.firePropertyChange("resolutions", null, null);
-	}
-	public String getDrillDownPaths() {
-		return drillDownPaths;
-	}
-	public void setDrillDownPaths(String drillDownPaths) {
-		this.drillDownPaths = drillDownPaths;
-		pcs.firePropertyChange("drillDownPaths", null, null);
-	}
-	
+
+    @Column("a")
+    private String storeType;
+    @Column("b")
+    private String displayName;
+    @Column("c")
+    private String defaultDownSamplingFunction;
+    @Column("d")
+    private String defaultAggregateFunction;
+    @Column("e")
+    private String aggFunctions;
+    @Column("f")
+    private String downSamplingFunctions;
+    @Column("g")
+    private String resolutions;
+    @Column("h")
+    private String drillDownPaths;
+
+    public String getStoreType() {
+        return storeType;
+    }
+
+    public void setStoreType(String storeType) {
+        this.storeType = storeType;
+        pcs.firePropertyChange("storeType", null, null);
+    }
+
+    public String getDisplayName() {
+        return displayName;
+    }
+
+    public void setDisplayName(String displayName) {
+        this.displayName = displayName;
+        pcs.firePropertyChange("displayName", null, null);
+    }
+
+    public String getDefaultDownSamplingFunction() {
+        return defaultDownSamplingFunction;
+    }
+
+    public void setDefaultDownSamplingFunction(String defaultDownSamplingFunction) {
+        this.defaultDownSamplingFunction = defaultDownSamplingFunction;
+        pcs.firePropertyChange("defaultDownSamplingFunction", null, null);
+    }
+
+    public String getDefaultAggregateFunction() {
+        return defaultAggregateFunction;
+    }
+
+    public void setDefaultAggregateFunction(String defaultAggregateFunction) {
+        this.defaultAggregateFunction = defaultAggregateFunction;
+        pcs.firePropertyChange("defaultAggregateFunction", null, null);
+    }
+
+    public String getAggFunctions() {
+        return aggFunctions;
+    }
+
+    public void setAggFunctions(String aggFunctions) {
+        this.aggFunctions = aggFunctions;
+        pcs.firePropertyChange("aggFunctions", null, null);
+    }
+
+    public String getDownSamplingFunctions() {
+        return downSamplingFunctions;
+    }
+
+    public void setDownSamplingFunctions(String downSamplingFunctions) {
+        this.downSamplingFunctions = downSamplingFunctions;
+        pcs.firePropertyChange("downSamplingFunctions", null, null);
+    }
+
+    public String getResolutions() {
+        return resolutions;
+    }
+
+    public void setResolutions(String resolutions) {
+        this.resolutions = resolutions;
+        pcs.firePropertyChange("resolutions", null, null);
+    }
+
+    public String getDrillDownPaths() {
+        return drillDownPaths;
+    }
+
+    public void setDrillDownPaths(String drillDownPaths) {
+        this.drillDownPaths = drillDownPaths;
+        pcs.firePropertyChange("drillDownPaths", null, null);
+    }
+
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java
index b0eeaed..890540d 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierCreationListener.java
@@ -19,10 +19,10 @@
 import java.util.Map;
 
 public interface QualifierCreationListener {
-	/**
-	 * Qualifier <b>display name</b> mapped to qualifier value in bytes[]
-	 *
-	 * @param qualifiers
-	 */
-	public void qualifierCreated(Map<String, byte[]> qualifiers);
+    /**
+     * Qualifier <b>display name</b> mapped to qualifier value in bytes[]
+     *
+     * @param qualifiers
+     */
+    public void qualifierCreated(Map<String, byte[]> qualifiers);
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java
index 88135bb..1225ba7 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/QualifierNotDefinedException.java
@@ -16,13 +16,13 @@
  */
 package org.apache.eagle.log.entity;
 
-public class QualifierNotDefinedException extends Exception{
-	/**
-	 * 
-	 */
-	private static final long serialVersionUID = 1L;
+public class QualifierNotDefinedException extends Exception {
+    /**
+     *
+     */
+    private static final long serialVersionUID = 1L;
 
-	public QualifierNotDefinedException(String message){
-		super(message);
-	}
+    public QualifierNotDefinedException(String message) {
+        super(message);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java
index 5154cc4..2ef0680 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyBuilder.java
@@ -27,114 +27,122 @@
 import org.apache.eagle.common.ByteUtil;
 
 public class RowkeyBuilder {
-	
-	public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0;
-	
-	/**
-	 * Generate the internal sorted hashmap for tags. Please note the partition tags should not be included in the result map.
-	 * @param partitions array of partition tags in order
-	 * @param tags tags of the entity
-	 * @return the sorted hash map of the tags
-	 */
-	public static SortedMap<Integer, Integer> generateSortedTagMap(String[] partitions, Map<String, String> tags) {
-		final SortedMap<Integer, Integer> tagHashMap = new TreeMap<Integer, Integer>();
-		for (Map.Entry<String, String> entry: tags.entrySet()) {
-			final String tagName = entry.getKey();
-			final String tagValue = entry.getValue();
-			// If it's a partition tag, we need to remove it from tag hash list. It need to 
-			// put to the fix partition hash slot in rowkey.
-			if (tagValue == null || isPartitionTag(partitions, tagName))
-				continue;
-			tagHashMap.put(tagName.hashCode(), tagValue.hashCode());
-		}
-		return tagHashMap;
-	}
-	
-	/**
-	 * build rowkey from InternalLog object
-	 * @param log internal log entity to write
-	 * @return the rowkey of the entity
-	 */
-	public static byte[] buildRowkey(InternalLog log) {
-		final String[] partitions = log.getPartitions();
-		final Map<String, String> tags = log.getTags();
-		final SortedMap<Integer, Integer> tagHashMap = generateSortedTagMap(partitions, tags);
-		
-		// reverse timestamp
-		long ts = Long.MAX_VALUE - log.getTimestamp();
-		
-		List<Integer> partitionHashValues = new ArrayList<Integer>();
-		if (partitions != null) {
-			for (String partition : partitions) {
-				final String tagValue = tags.get(partition);
-				if (tagValue != null) {
-					partitionHashValues.add(tagValue.hashCode());
-				} else {
-					partitionHashValues.add(EMPTY_PARTITION_DEFAULT_HASH_CODE);
-				}
-			}
-		}
-		return buildRowkey(log.getPrefix().hashCode(), partitionHashValues, ts, tagHashMap);
-	}
-	
-	public static long getTimestamp(byte[] rowkey, EntityDefinition ed) {
-		if (!ed.isTimeSeries()) {
-			return EntityConstants.FIXED_WRITE_TIMESTAMP;
-		}
-		final int offset = (ed.getPartitions() == null) ? 4 : (4 + ed.getPartitions().length * 4);
-		return Long.MAX_VALUE - ByteUtil.bytesToLong(rowkey, offset);
-	}
-	
-	/**
-	 * Check if the tagName is one of the partition tags
-	 * @param partitions paritition tags of the entity
-	 * @param tagName the tag name that needs to check
-	 * @return
-	 */
-	private static boolean isPartitionTag(String[] partitions, String tagName) {
-		if (partitions != null) {
-			for (String partition : partitions) {
-				if (partition.equals(tagName)) {
-					return true;
-				}
-			}
-		}
-		return false;
-	}
 
-	/**
-	 * rowkey is: prefixHash:4 + (partitionValueHash:4)* + timestamp:8 + (tagnameHash:4 + tagvalueHash:4)*
-	 * partition fields are sorted by partition definition order, while tag fields are sorted by tag name's 
-	 * hash code values. 
-	 */
-	private static byte[] buildRowkey(int prefixHash, List<Integer> partitionHashValues, long timestamp, SortedMap<Integer, Integer> tags){
-		// allocate byte array for rowkey
-		final int len = 4 + 8 + tags.size() * (4 + 4) + (partitionHashValues.size() * 4);
-		final byte[] rowkey = new byte[len];
-		int offset = 0;
+    public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0;
 
-		// 1. set prefix
-		ByteUtil.intToBytes(prefixHash, rowkey, offset);
-		offset += 4;
-		
-		// 2. set partition
-		for (Integer partHash : partitionHashValues) {
-			ByteUtil.intToBytes(partHash, rowkey, offset);
-			offset += 4;
-		}
-		
-		// 3. set timestamp
-		ByteUtil.longToBytes(timestamp, rowkey, offset);
-		offset += 8;
+    /**
+     * Generate the internal sorted hashmap for tags. Please note the partition tags should not be included in
+     * the result map.
+     *
+     * @param partitions array of partition tags in order
+     * @param tags tags of the entity
+     * @return the sorted hash map of the tags
+     */
+    public static SortedMap<Integer, Integer> generateSortedTagMap(String[] partitions,
+                                                                   Map<String, String> tags) {
+        final SortedMap<Integer, Integer> tagHashMap = new TreeMap<Integer, Integer>();
+        for (Map.Entry<String, String> entry : tags.entrySet()) {
+            final String tagName = entry.getKey();
+            final String tagValue = entry.getValue();
+            // If it's a partition tag, we need to remove it from tag hash list. It need to
+            // put to the fix partition hash slot in rowkey.
+            if (tagValue == null || isPartitionTag(partitions, tagName)) {
+                continue;
+            }
+            tagHashMap.put(tagName.hashCode(), tagValue.hashCode());
+        }
+        return tagHashMap;
+    }
 
-		// 4. set tag key/value hashes
-		for (Map.Entry<Integer, Integer> entry : tags.entrySet()) {
-			ByteUtil.intToBytes(entry.getKey(), rowkey, offset);
-			offset += 4;
-			ByteUtil.intToBytes(entry.getValue(), rowkey, offset);
-			offset += 4;
-		}
-		
-		return rowkey;
-	}
+    /**
+     * build rowkey from InternalLog object
+     *
+     * @param log internal log entity to write
+     * @return the rowkey of the entity
+     */
+    public static byte[] buildRowkey(InternalLog log) {
+        final String[] partitions = log.getPartitions();
+        final Map<String, String> tags = log.getTags();
+        final SortedMap<Integer, Integer> tagHashMap = generateSortedTagMap(partitions, tags);
+
+        // reverse timestamp
+        long ts = Long.MAX_VALUE - log.getTimestamp();
+
+        List<Integer> partitionHashValues = new ArrayList<Integer>();
+        if (partitions != null) {
+            for (String partition : partitions) {
+                final String tagValue = tags.get(partition);
+                if (tagValue != null) {
+                    partitionHashValues.add(tagValue.hashCode());
+                } else {
+                    partitionHashValues.add(EMPTY_PARTITION_DEFAULT_HASH_CODE);
+                }
+            }
+        }
+        return buildRowkey(log.getPrefix().hashCode(), partitionHashValues, ts, tagHashMap);
+    }
+
+    /**
+     * rowkey is: prefixHash:4 + (partitionValueHash:4)* + timestamp:8 + (tagnameHash:4 + tagvalueHash:4)*
+     * partition fields are sorted by partition definition order, while tag fields are sorted by tag name's
+     * hash code values.
+     */
+    private static byte[] buildRowkey(int prefixHash, List<Integer> partitionHashValues, long timestamp,
+                                      SortedMap<Integer, Integer> tags) {
+        // allocate byte array for rowkey
+        final int len = 4 + 8 + tags.size() * (4 + 4) + (partitionHashValues.size() * 4);
+        final byte[] rowkey = new byte[len];
+        int offset = 0;
+
+        // 1. set prefix
+        ByteUtil.intToBytes(prefixHash, rowkey, offset);
+        offset += 4;
+
+        // 2. set partition
+        for (Integer partHash : partitionHashValues) {
+            ByteUtil.intToBytes(partHash, rowkey, offset);
+            offset += 4;
+        }
+
+        // 3. set timestamp
+        ByteUtil.longToBytes(timestamp, rowkey, offset);
+        offset += 8;
+
+        // 4. set tag key/value hashes
+        for (Map.Entry<Integer, Integer> entry : tags.entrySet()) {
+            ByteUtil.intToBytes(entry.getKey(), rowkey, offset);
+            offset += 4;
+            ByteUtil.intToBytes(entry.getValue(), rowkey, offset);
+            offset += 4;
+        }
+
+        return rowkey;
+    }
+
+    public static long getTimestamp(byte[] rowkey, EntityDefinition ed) {
+        if (!ed.isTimeSeries()) {
+            return EntityConstants.FIXED_WRITE_TIMESTAMP;
+        }
+        final int offset = (ed.getPartitions() == null) ? 4 : (4 + ed.getPartitions().length * 4);
+        return Long.MAX_VALUE - ByteUtil.bytesToLong(rowkey, offset);
+    }
+
+    /**
+     * Check if the tagName is one of the partition tags
+     *
+     * @param partitions paritition tags of the entity
+     * @param tagName the tag name that needs to check
+     * @return
+     */
+    private static boolean isPartitionTag(String[] partitions, String tagName) {
+        if (partitions != null) {
+            for (String partition : partitions) {
+                if (partition.equals(tagName)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java
index 953d12b..6189983 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/RowkeyQueryAPIResponseEntity.java
@@ -18,28 +18,33 @@
 
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 public class RowkeyQueryAPIResponseEntity {
-	private boolean success;
-	private String exception;
-	private Object obj;
+    private boolean success;
+    private String exception;
+    private Object obj;
 
-	public boolean isSuccess() {
-		return success;
-	}
-	public void setSuccess(boolean success) {
-		this.success = success;
-	}
-	public String getException() {
-		return exception;
-	}
-	public void setException(String exception) {
-		this.exception = exception;
-	}
-	public Object getObj() {
-		return obj;
-	}
-	public void setObj(Object obj) {
-		this.obj = obj;
-	}
-}
\ No newline at end of file
+    public boolean isSuccess() {
+        return success;
+    }
+
+    public void setSuccess(boolean success) {
+        this.success = success;
+    }
+
+    public String getException() {
+        return exception;
+    }
+
+    public void setException(String exception) {
+        this.exception = exception;
+    }
+
+    public Object getObj() {
+        return obj;
+    }
+
+    public void setObj(Object obj) {
+        this.obj = obj;
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java
index 68db2c4..e9c0dca 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/SearchCondition.java
@@ -23,126 +23,138 @@
 import java.util.Map;
 
 /**
- * search condition includes the following:
- * 1. prefix - part of rowkey
- * 2. startTime,endTime - timestamp, part of rowkey
- * 3. hbase filter converted from query 
- * 4. aggregate parameters
- * 4. sort options
- * 5. output fields and tags
- * 6. entityName
- * 7. pagination: pageSize and startRowkey
+ * search condition includes the following: 1. prefix - part of rowkey 2. startTime,endTime - timestamp, part
+ * of rowkey 3. hbase filter converted from query 4. aggregate parameters 4. sort options 5. output fields and
+ * tags 6. entityName 7. pagination: pageSize and startRowkey
  */
-public class SearchCondition{
-	private long startTime;
-	private long endTime;
-	private Filter filter;
-	private List<String> outputFields;
-	private boolean outputAll;
-	private long pageSize;
-	private String startRowkey;
-	private String entityName;
-	private List<String> partitionValues;
-	private ORExpression queryExpression;
+public class SearchCondition {
+    private long startTime;
+    private long endTime;
+    private Filter filter;
+    private List<String> outputFields;
+    private boolean outputAll;
+    private long pageSize;
+    private String startRowkey;
+    private String entityName;
+    private List<String> partitionValues;
+    private ORExpression queryExpression;
 
-	public boolean isOutputVerbose() {
-		return outputVerbose;
-	}
+    public boolean isOutputVerbose() {
+        return outputVerbose;
+    }
 
-	public void setOutputVerbose(boolean outputVerbose) {
-		this.outputVerbose = outputVerbose;
-	}
+    public void setOutputVerbose(boolean outputVerbose) {
+        this.outputVerbose = outputVerbose;
+    }
 
-	public Map<String, String> getOutputAlias() {
-		return outputAlias;
-	}
+    public Map<String, String> getOutputAlias() {
+        return outputAlias;
+    }
 
-	public void setOutputAlias(Map<String, String> outputAlias) {
-		this.outputAlias = outputAlias;
-	}
+    public void setOutputAlias(Map<String, String> outputAlias) {
+        this.outputAlias = outputAlias;
+    }
 
-	private boolean outputVerbose;
-	private Map<String,String> outputAlias;
+    private boolean outputVerbose;
+    private Map<String, String> outputAlias;
 
-	/**
-	 * copy constructor
-	 * @param sc
-	 */
-	public SearchCondition(SearchCondition sc){
-		this.startTime = sc.startTime;
-		this.endTime = sc.endTime;
-		this.filter = sc.filter;
-		this.outputFields = sc.outputFields;
-		this.pageSize = sc.pageSize;
-		this.startRowkey = sc.startRowkey;
-		this.entityName = sc.entityName;
-		this.partitionValues = sc.partitionValues;
-		this.queryExpression = sc.queryExpression;
-	}
-	
-	public SearchCondition(){
-	}
-	
-	public Filter getFilter() {
-		return filter;
-	}
-	public void setFilter(Filter filter) {
-		this.filter = filter;
-	}
-	public long getPageSize() {
-		return pageSize;
-	}
-	public void setPageSize(long pageSize) {
-		this.pageSize = pageSize;
-	}
-	public String getStartRowkey() {
-		return startRowkey;
-	}
-	public void setStartRowkey(String startRowkey) {
-		this.startRowkey = startRowkey;
-	}
-	public String getEntityName() {
-		return entityName;
-	}
-	public void setEntityName(String entityName) {
-		this.entityName = entityName;
-	}
-	public List<String> getOutputFields() {
-		return outputFields;
-	}
-	public void setOutputFields(List<String> outputFields) {
-		this.outputFields = outputFields;
-	}
-	public long getStartTime() {
-		return startTime;
-	}
-	public void setStartTime(long startTime) {
-		this.startTime = startTime;
-	}
-	public long getEndTime() {
-		return endTime;
-	}
-	public void setEndTime(long endTime) {
-		this.endTime = endTime;
-	}
-	public List<String> getPartitionValues() {
-		return partitionValues;
-	}
-	public void setPartitionValues(List<String> partitionValues) {
-		this.partitionValues = partitionValues;
-	}
-	public ORExpression getQueryExpression() {
-		return queryExpression;
-	}
-	public void setQueryExpression(ORExpression queryExpression) {
-		this.queryExpression = queryExpression;
-	}
+    /**
+     * copy constructor
+     * 
+     * @param sc
+     */
+    public SearchCondition(SearchCondition sc) {
+        this.startTime = sc.startTime;
+        this.endTime = sc.endTime;
+        this.filter = sc.filter;
+        this.outputFields = sc.outputFields;
+        this.pageSize = sc.pageSize;
+        this.startRowkey = sc.startRowkey;
+        this.entityName = sc.entityName;
+        this.partitionValues = sc.partitionValues;
+        this.queryExpression = sc.queryExpression;
+    }
 
-	public boolean isOutputAll() {
-		return outputAll;
-	}
+    public SearchCondition() {
+    }
 
-	public void setOutputAll(boolean outputAll) {
-		this.outputAll = outputAll;
-	}
+    public Filter getFilter() {
+        return filter;
+    }
+
+    public void setFilter(Filter filter) {
+        this.filter = filter;
+    }
+
+    public long getPageSize() {
+        return pageSize;
+    }
+
+    public void setPageSize(long pageSize) {
+        this.pageSize = pageSize;
+    }
+
+    public String getStartRowkey() {
+        return startRowkey;
+    }
+
+    public void setStartRowkey(String startRowkey) {
+        this.startRowkey = startRowkey;
+    }
+
+    public String getEntityName() {
+        return entityName;
+    }
+
+    public void setEntityName(String entityName) {
+        this.entityName = entityName;
+    }
+
+    public List<String> getOutputFields() {
+        return outputFields;
+    }
+
+    public void setOutputFields(List<String> outputFields) {
+        this.outputFields = outputFields;
+    }
+
+    public long getStartTime() {
+        return startTime;
+    }
+
+    public void setStartTime(long startTime) {
+        this.startTime = startTime;
+    }
+
+    public long getEndTime() {
+        return endTime;
+    }
+
+    public void setEndTime(long endTime) {
+        this.endTime = endTime;
+    }
+
+    public List<String> getPartitionValues() {
+        return partitionValues;
+    }
+
+    public void setPartitionValues(List<String> partitionValues) {
+        this.partitionValues = partitionValues;
+    }
+
+    public ORExpression getQueryExpression() {
+        return queryExpression;
+    }
+
+    public void setQueryExpression(ORExpression queryExpression) {
+        this.queryExpression = queryExpression;
+    }
+
+    public boolean isOutputAll() {
+        return outputAll;
+    }
+
+    public void setOutputAll(boolean outputAll) {
+        this.outputAll = outputAll;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java
index 005a2e2..36aa0b6 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/StreamReader.java
@@ -20,36 +20,39 @@
 import java.util.List;
 
 public abstract class StreamReader {
-	protected List<EntityCreationListener> _listeners = new ArrayList<EntityCreationListener>();
+    protected List<EntityCreationListener> listeners = new ArrayList<EntityCreationListener>();
 
-	/**
-	 * Listener can be only notified after it is added to listener list
-	 * @param listener
-	 */
-	public synchronized void register(EntityCreationListener listener){
-		_listeners.add(listener);
-	}
-	
-	/**
-	 * Listener can not get notification once after it is removed from listener list
-	 * @param listener
-	 */
-	public synchronized void unregister(EntityCreationListener listener){
-		_listeners.remove(listener);
-	}
-	
-	public abstract void readAsStream() throws Exception;
-	
-	/**
-	 * Get scanned last entity timestamp
-	 * 
-	 * @return
-	 */
-	public abstract long getLastTimestamp();
-	
-	/**
-	 * Get scanned first entity timestamp
-	 * @return
-	 */
-	public abstract long getFirstTimestamp();
-}
\ No newline at end of file
+    /**
+     * Listener can be only notified after it is added to listener list
+     *
+     * @param listener
+     */
+    public synchronized void register(EntityCreationListener listener) {
+        listeners.add(listener);
+    }
+
+    /**
+     * Listener can not get notification once after it is removed from listener list
+     *
+     * @param listener
+     */
+    public synchronized void unregister(EntityCreationListener listener) {
+        listeners.remove(listener);
+    }
+
+    public abstract void readAsStream() throws Exception;
+
+    /**
+     * Get scanned last entity timestamp
+     *
+     * @return
+     */
+    public abstract long getLastTimestamp();
+
+    /**
+     * Get scanned first entity timestamp
+     *
+     * @return
+     */
+    public abstract long getFirstTimestamp();
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java
index 0d71e10..6ecf93c 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/BooleanExpressionComparator.java
@@ -37,15 +37,11 @@
 import java.util.*;

 

 /**

- * BooleanExpressionComparator

- *

- * Currently support double expression only.

- *

- * TODO: 1) thread-safe? 2) Rewrite filter expression to evaluate once

- *

+ * BooleanExpressionComparator Currently support double expression only. TODO: 1) thread-safe? 2) Rewrite

+ * filter expression to evaluate once

  */

 public class BooleanExpressionComparator implements WritableComparable<List<KeyValue>> {

-    private final static Logger LOG = LoggerFactory.getLogger(BooleanExpressionComparator.class);

+    private static final Logger LOG = LoggerFactory.getLogger(BooleanExpressionComparator.class);

 

     // Should be Writable

     private QualifierFilterEntity filterEntity;

@@ -62,22 +58,23 @@
 

     private Set<String> requiredFields = new HashSet<String>();

 

-    public BooleanExpressionComparator(){}

+    public BooleanExpressionComparator() {

+    }

 

-    public BooleanExpressionComparator(QualifierFilterEntity entity,EntityDefinition ed){

-        this.filterEntity  = entity;

+    public BooleanExpressionComparator(QualifierFilterEntity entity, EntityDefinition ed) {

+        this.filterEntity = entity;

         this.ed = ed;

         try {

             this.init();

         } catch (Exception ex) {

             // Client side expression validation to fast fail if having error

-            LOG.error("Got exception: "+ex.getMessage(),ex);

-            throw new ExpressionEvaluationException(ex.getMessage(),ex);

+            LOG.error("Got exception: " + ex.getMessage(), ex);

+            throw new ExpressionEvaluationException(ex.getMessage(), ex);

         }

     }

 

     private void init() throws ParsiiInvalidException, ParseException {

-        LOG.info("Filter expression: "+filterEntity.toString());

+        LOG.info("Filter expression: " + filterEntity.toString());

         if (filterEntity.getKey() != null) {

             if (filterEntity.getKeyType() == TokenType.NUMBER) {

                 leftValue = Double.parseDouble(filterEntity.getKey());

@@ -100,11 +97,14 @@
             throw new IllegalStateException("QualifierFilterEntity value is null");

         }

 

-        if (this.filterEntity.getOp() == null)

+        if (this.filterEntity.getOp() == null) {

             throw new IllegalStateException("QualifierFilterEntity op is null");

+        }

         this.func = _opExprFuncMap.get(this.filterEntity.getOp());

-        if (this.func == null)

-            throw new IllegalStateException("No boolean evaluation function found for operation: " + this.filterEntity.getOp());

+        if (this.func == null) {

+            throw new IllegalStateException("No boolean evaluation function found for operation: "

+                                            + this.filterEntity.getOp());

+        }

     }

 

     /**

@@ -114,24 +114,26 @@
      * @return evaluation result as true (1) or false (0)

      * @throws Exception

      */

-    private boolean eval(Map<String,Double> context) throws Exception {

-        if(filterEntity.getKeyType() != TokenType.NUMBER){

-            leftValue = eval(filterEntity.getKey(),context);

+    private boolean eval(Map<String, Double> context) throws Exception {

+        if (filterEntity.getKeyType() != TokenType.NUMBER) {

+            leftValue = eval(filterEntity.getKey(), context);

         }

-        if(filterEntity.getValueType() != TokenType.NUMBER){

-            rightValue = eval(filterEntity.getValue(),context);

+        if (filterEntity.getValueType() != TokenType.NUMBER) {

+            rightValue = eval(filterEntity.getValue(), context);

         }

-        if(Double.isInfinite(leftValue) || Double.isInfinite(rightValue)){

-//            if(LOG.isDebugEnabled()) {

+        if (Double.isInfinite(leftValue) || Double.isInfinite(rightValue)) {

+            // if(LOG.isDebugEnabled()) {

             if (Double.isInfinite(leftValue)) {

-                LOG.warn("Evaluation result of key: " + this.filterEntity.getKey() + " is " + leftValue + " (Infinite), ignore");

+                LOG.warn("Evaluation result of key: " + this.filterEntity.getKey() + " is " + leftValue

+                         + " (Infinite), ignore");

             } else {

-                LOG.warn("Evaluation result of value: "+this.filterEntity.getValue()+" is "+rightValue+" (Infinite), ignore");

+                LOG.warn("Evaluation result of value: " + this.filterEntity.getValue() + " is " + rightValue

+                         + " (Infinite), ignore");

             }

-//            }

+            // }

             return false;

         }

-        return func.eval(leftValue,rightValue);

+        return func.eval(leftValue, rightValue);

     }

 

     /**

@@ -142,38 +144,40 @@
      * @return

      * @throws Exception

      */

-    private double eval(String expr,Map<String,Double> context) throws Exception {

+    private double eval(String expr, Map<String, Double> context) throws Exception {

         return ExpressionParser.parse(expr).eval(context);

     }

 

     /**

-     *

      * @param row List[KeyValue] All key values in a row

-     *

      * @return 0 to filter out row [false], otherwise to include row into scanner [true]

      */

     @Override

     public int compareTo(List<KeyValue> row) {

-        Map<String,Double> context = new HashMap<String, Double>();

-        for(KeyValue kv:row){

+        Map<String, Double> context = new HashMap<String, Double>();

+        for (KeyValue kv : row) {

             String qualifierName = new String(kv.getQualifier());

 

             // Because assume just handle about double value

             // so ignore tag whose value is String

-            if(!this.ed.isTag(qualifierName)){

+            if (!this.ed.isTag(qualifierName)) {

                 Qualifier qualifier = this.ed.getQualifierNameMap().get(qualifierName);

                 String displayName = qualifier.getDisplayName();

-                if(displayName == null) displayName = qualifierName;

+                if (displayName == null) {

+                    displayName = qualifierName;

+                }

                 try {

-                    if(this.requiredFields.contains(displayName)) {

+                    if (this.requiredFields.contains(displayName)) {

                         EntitySerDeser serDeser = qualifier.getSerDeser();

-                        double value = EntityQualifierUtils.convertObjToDouble(serDeser.deserialize(kv.getValue()));

+                        double value = EntityQualifierUtils

+                            .convertObjToDouble(serDeser.deserialize(kv.getValue()));

                         if (Double.isNaN(value)) {

                             context.put(displayName, value);

                         }

                     }

-                }catch (Exception ex){

-                    LOG.warn("Failed to parse value of field "+displayName+" as double, ignore: "+ex.getMessage(),ex);

+                } catch (Exception ex) {

+                    LOG.warn("Failed to parse value of field " + displayName + " as double, ignore: "

+                             + ex.getMessage(), ex);

                 }

             }

         }

@@ -182,22 +186,22 @@
 

     /**

      * @param context Map[String,Double]

-     *

      * @return context.keySet().containsAll(this.requiredFields) && eval(context) ? 1:0;

      */

-    int compareTo(Map<String,Double> context){

+    int compareTo(Map<String, Double> context) {

         try {

-            if(context.keySet().containsAll(this.requiredFields)){

-                return eval(context)? 1:0;

-            }else{

-                if(LOG.isDebugEnabled()) {

-                    LOG.debug("Require variables: [" + StringUtils.join(this.requiredFields, ",") + "], but just given: [" + StringUtils.join(context.keySet(), ",") + "]");

+            if (context.keySet().containsAll(this.requiredFields)) {

+                return eval(context) ? 1 : 0;

+            } else {

+                if (LOG.isDebugEnabled()) {

+                    LOG.debug("Require variables: [" + StringUtils.join(this.requiredFields, ",")

+                              + "], but just given: [" + StringUtils.join(context.keySet(), ",") + "]");

                 }

                 return 0;

             }

         } catch (Exception e) {

-            LOG.error(e.getMessage(),e);

-            throw new ExpressionEvaluationException(e.getMessage(),e);

+            LOG.error(e.getMessage(), e);

+            throw new ExpressionEvaluationException(e.getMessage(), e);

         }

     }

 

@@ -216,31 +220,31 @@
 

         try {

             this.init();

-        } catch (Exception ex){

-            LOG.error("Got exception: "+ex.getMessage(),ex);

-            throw new IOException(ex.getMessage(),ex);

+        } catch (Exception ex) {

+            LOG.error("Got exception: " + ex.getMessage(), ex);

+            throw new IOException(ex.getMessage(), ex);

         }

     }

 

-    private static Map<ComparisonOperator,BooleanExprFunc> _opExprFuncMap = new HashMap<ComparisonOperator, BooleanExprFunc>();

+    private static Map<ComparisonOperator, BooleanExprFunc> _opExprFuncMap = new HashMap<ComparisonOperator, BooleanExprFunc>();

 

     static {

-        _opExprFuncMap.put(ComparisonOperator.EQUAL,new EqualExprFunc());

-        _opExprFuncMap.put(ComparisonOperator.IS,new EqualExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.EQUAL, new EqualExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.IS, new EqualExprFunc());

 

-        _opExprFuncMap.put(ComparisonOperator.NOT_EQUAL,new NotEqualExprFunc());

-        _opExprFuncMap.put(ComparisonOperator.IS_NOT,new NotEqualExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.NOT_EQUAL, new NotEqualExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.IS_NOT, new NotEqualExprFunc());

 

-        _opExprFuncMap.put(ComparisonOperator.LESS,new LessExprFunc());

-        _opExprFuncMap.put(ComparisonOperator.LESS_OR_EQUAL,new LessOrEqualExprFunc());

-        _opExprFuncMap.put(ComparisonOperator.GREATER,new GreaterExprFunc());

-        _opExprFuncMap.put(ComparisonOperator.GREATER_OR_EQUAL,new GreaterOrEqualExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.LESS, new LessExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.LESS_OR_EQUAL, new LessOrEqualExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.GREATER, new GreaterExprFunc());

+        _opExprFuncMap.put(ComparisonOperator.GREATER_OR_EQUAL, new GreaterOrEqualExprFunc());

 

         // "Life should be much better with functional programming language" - Hao Chen Nov 18th, 2014

     }

 

     private static interface BooleanExprFunc {

-        boolean eval(double val1,double val2);

+        boolean eval(double val1, double val2);

     }

 

     private static class EqualExprFunc implements BooleanExprFunc {

@@ -249,6 +253,7 @@
             return val1 == val2;

         }

     }

+

     private static class NotEqualExprFunc implements BooleanExprFunc {

         @Override

         public boolean eval(double val1, double val2) {

@@ -262,18 +267,21 @@
             return val1 < val2;

         }

     }

+

     private static class LessOrEqualExprFunc implements BooleanExprFunc {

         @Override

         public boolean eval(double val1, double val2) {

             return val1 <= val2;

         }

     }

+

     private static class GreaterExprFunc implements BooleanExprFunc {

         @Override

         public boolean eval(double val1, double val2) {

             return val1 > val2;

         }

     }

+

     private static class GreaterOrEqualExprFunc implements BooleanExprFunc {

         @Override

         public boolean eval(double val1, double val2) {

@@ -281,13 +289,15 @@
         }

     }

 

-    public static class ExpressionEvaluationException extends RuntimeException{

+    public static class ExpressionEvaluationException extends RuntimeException {

         public ExpressionEvaluationException(String message, Throwable cause) {

             super(message, cause);

         }

+

         public ExpressionEvaluationException(String message) {

             super(message);

         }

+

         public ExpressionEvaluationException(Throwable cause) {

             super(cause);

         }

@@ -295,6 +305,6 @@
 

     @Override

     public String toString() {

-        return this.getClass().getSimpleName()+" ("+this.filterEntity.toString()+")";

+        return this.getClass().getSimpleName() + " (" + this.filterEntity.toString() + ")";

     }

-}
\ No newline at end of file
+}

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java
index 8209445..9e736ae 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/HBaseFilterBuilder.java
@@ -34,552 +34,542 @@
 import java.util.regex.Pattern;
 
 /**
- * the steps of building hbase filters
- * 1. receive ORExpression from eagle-antlr
- * 2. iterate all ANDExpression in ORExpression
- *    2.1 put each ANDExpression to a new filter list with MUST_PASS_ONE option
- *    2.2 iterate all AtomicExpression in ANDExpression
- *       2.2.1 group AtomicExpression into 2 groups by looking up metadata, one is for tag filters, the other is for column filters
- *       2.2.2 put the above 2 filters to a filter list with MUST_PASS_ALL option
+ * the steps of building hbase filters 1. receive ORExpression from eagle-antlr 2. iterate all ANDExpression
+ * in ORExpression 2.1 put each ANDExpression to a new filter list with MUST_PASS_ONE option 2.2 iterate all
+ * AtomicExpression in ANDExpression 2.2.1 group AtomicExpression into 2 groups by looking up metadata, one is
+ * for tag filters, the other is for column filters 2.2.2 put the above 2 filters to a filter list with
+ * MUST_PASS_ALL option
  */
 public class HBaseFilterBuilder {
-	private static final Logger LOG = LoggerFactory.getLogger(HBaseFilterBuilder.class);
-	
-	/**
-	 * syntax is @<fieldname>
-	 */
-//	private static final String fnRegex = "^@(.*)$";
-	private static final Pattern _fnPattern = TokenConstant.ID_PATTERN;// Pattern.compile(fnRegex);
-	private static final Charset _defaultCharset = Charset.forName("ISO-8859-1");
+    private static final Logger LOG = LoggerFactory.getLogger(HBaseFilterBuilder.class);
 
-	private ORExpression _orExpr;
-	private EntityDefinition _ed;
-	private boolean _filterIfMissing;
-	private Charset _charset = _defaultCharset;
+    /*
+     * syntax is @<fieldname>
+     */
+    // private static final String fnRegex = "^@(.*)$";
+    private static final Pattern _fnPattern = TokenConstant.ID_PATTERN;// Pattern.compile(fnRegex);
+    private static final Charset _defaultCharset = Charset.forName("ISO-8859-1");
 
-	/**
-	 * TODO: Verify performance impact
-	 *
-	 * @return
-	 */
-	public Set<String> getFilterFields() {
-		return _filterFields;
-	}
+    private ORExpression orExpr;
+    private EntityDefinition ed;
+    private boolean filterIfMissing;
+    private Charset charset = _defaultCharset;
 
-	/**
-	 * Just add filter fields for expression filter
-	 */
-	private Set<String> _filterFields;
+    /**
+     * TODO: Verify performance impact
+     *
+     * @return
+     */
+    public Set<String> getFilterFields() {
+        return filterFields;
+    }
 
-	public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr) {
-		this(ed, orExpr, false);
-	}
-	
-	public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr, boolean filterIfMissing) {
-		this._ed = ed;
-		this._orExpr = orExpr;
-		this._filterIfMissing = filterIfMissing;
-	}
-	
-	public void setCharset(String charsetName){
-		_charset = Charset.forName(charsetName);
-	}
-	
-	public Charset getCharset(){
-		return _charset;
-	}
-	
-	/**
-	 * Because we don't have metadata for tag, we regard non-qualifer field as tag. So one field possibly is not a real tag when this function return true. This happens
-	 * when a user input an wrong field name which is neither tag or qualifier
-	 *   
-	 * @param field
-	 */
-	private boolean isTag(String field){
-		return _ed.isTag(field);
-	}
-	
-	/**
-	 * check whether this field is one entity attribute or not 
-	 * @param fieldName
-	 * @return
-	 */
-	private String parseEntityAttribute(String fieldName){
-		Matcher m = _fnPattern.matcher(fieldName);
-		if(m.find()){
-			return m.group(1);
-		}
-		return null;
-	}
+    /**
+     * Just add filter fields for expression filter
+     */
+    private Set<String> filterFields;
 
-	/**
-	 * Return the partition values for each or expression. The size of the returned list should be equal to
-	 * the size of FilterList that {@link #buildFilters()} returns.
-	 * 
-	 * TODO: For now we don't support one query to query multiple partitions. In future if partition is defined, 
-	 * for the entity, internally We need to spawn multiple queries and send one query for each partition.
-	 * 
-	 * @return Return the partition values for each or expression. Return null if the entity doesn't support
-	 * partition
-	 */
-	public List<String[]> getPartitionValues() {
-		final String[] partitions = _ed.getPartitions();
-		if (partitions == null || partitions.length == 0) {
-			return null;
-		}
-		final List<String[]> result = new ArrayList<String[]>();
-		final Map<String, String> partitionKeyValueMap = new HashMap<String, String>();
-		for(ANDExpression andExpr : _orExpr.getANDExprList()) {
-			partitionKeyValueMap.clear();
-			for(AtomicExpression ae : andExpr.getAtomicExprList()) {
-				// TODO temporarily ignore those fields which are not for attributes
-				if(ae.getKeyType() == TokenType.ID) {
-					final String fieldName = parseEntityAttribute(ae.getKey());
-					if (fieldName == null) {
-						LOG.warn(fieldName + " field does not have format @<FieldName>, ignored");
-						continue;
-					}
-					if (_ed.isPartitionTag(fieldName) && ComparisonOperator.EQUAL.equals(ae.getOp())) {
-						final String value = ae.getValue();
-						partitionKeyValueMap.put(fieldName, value);
-					}
-				}
-			}
-			final String[] values = new String[partitions.length];
-			result.add(values);
-			for (int i = 0; i < partitions.length; ++i) {
-				final String partition = partitions[i];
-				final String value = partitionKeyValueMap.get(partition);
-				values[i] = value;
-			}
-		}
-		return result;
-	}
+    public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr) {
+        this(ed, orExpr, false);
+    }
 
-	/**
-	 * @see org.apache.eagle.query.parser.TokenType
-	 *
-	 * @return
-	 */
-	public FilterList buildFilters(){
-		// TODO: Optimize to select between row filter or column filter for better performance
-		// Use row key filter priority by default
-		boolean rowFilterPriority = true;
+    public HBaseFilterBuilder(EntityDefinition ed, ORExpression orExpr, boolean filterIfMissing) {
+        this.ed = ed;
+        this.orExpr = orExpr;
+        this.filterIfMissing = filterIfMissing;
+    }
 
-		FilterList fltList = new FilterList(Operator.MUST_PASS_ONE);
-		for(ANDExpression andExpr : _orExpr.getANDExprList()){
-			
-			FilterList list = new FilterList(Operator.MUST_PASS_ALL);
-			Map<String, List<String>> tagFilters = new HashMap<String, List<String>>();
-			List<QualifierFilterEntity> qualifierFilters = new ArrayList<QualifierFilterEntity>();
-//			List<QualifierFilterEntry> tagLikeQualifierFilters = new ArrayList<QualifierFilterEntry>();
+    public void setCharset(String charsetName) {
+        charset = Charset.forName(charsetName);
+    }
 
-			// TODO refactor not to use too much if/else
-			for(AtomicExpression ae : andExpr.getAtomicExprList()){
-				// TODO temporarily ignore those fields which are not for attributes
+    public Charset getCharset() {
+        return charset;
+    }
 
-				String fieldName = ae.getKey();
-				if(ae.getKeyType() == TokenType.ID){
-					fieldName = parseEntityAttribute(fieldName);
-					if(fieldName == null){
-						LOG.warn(fieldName + " field does not have format @<FieldName>, ignored");
-						continue;
-					}
-				}
+    /**
+     * Because we don't have metadata for tag, we regard non-qualifer field as tag. So one field possibly is
+     * not a real tag when this function return true. This happens when a user input an wrong field name which
+     * is neither tag or qualifier
+     *
+     * @param field
+     */
+    private boolean isTag(String field) {
+        return ed.isTag(field);
+    }
 
-				String value = ae.getValue();
-				ComparisonOperator op = ae.getOp();
-				TokenType keyType = ae.getKeyType();
-				TokenType valueType = ae.getValueType();
-				QualifierFilterEntity entry = new QualifierFilterEntity(fieldName,value,op,keyType,valueType);
+    /**
+     * check whether this field is one entity attribute or not
+     *
+     * @param fieldName
+     * @return
+     */
+    private String parseEntityAttribute(String fieldName) {
+        Matcher m = _fnPattern.matcher(fieldName);
+        if (m.find()) {
+            return m.group(1);
+        }
+        return null;
+    }
 
-				// TODO Exact match, need to add escape for those special characters here, including:
-				// "-", "[", "]", "/", "{", "}", "(", ")", "*", "+", "?", ".", "\\", "^", "$", "|"
+    /**
+     * Return the partition values for each or expression. The size of the returned list should be equal to
+     * the size of FilterList that {@link #buildFilters()} returns. TODO: For now we don't support one query
+     * to query multiple partitions. In future if partition is defined, for the entity, internally We need to
+     * spawn multiple queries and send one query for each partition.
+     *
+     * @return Return the partition values for each or expression. Return null if the entity doesn't support
+     *         partition
+     */
+    public List<String[]> getPartitionValues() {
+        final String[] partitions = ed.getPartitions();
+        if (partitions == null || partitions.length == 0) {
+            return null;
+        }
+        final List<String[]> result = new ArrayList<String[]>();
+        final Map<String, String> partitionKeyValueMap = new HashMap<String, String>();
+        for (ANDExpression andExpr : orExpr.getANDExprList()) {
+            partitionKeyValueMap.clear();
+            for (AtomicExpression ae : andExpr.getAtomicExprList()) {
+                // TODO temporarily ignore those fields which are not for attributes
+                if (ae.getKeyType() == TokenType.ID) {
+                    final String fieldName = parseEntityAttribute(ae.getKey());
+                    if (fieldName == null) {
+                        LOG.warn(fieldName + " field does not have format @<FieldName>, ignored");
+                        continue;
+                    }
+                    if (ed.isPartitionTag(fieldName) && ComparisonOperator.EQUAL.equals(ae.getOp())) {
+                        final String value = ae.getValue();
+                        partitionKeyValueMap.put(fieldName, value);
+                    }
+                }
+            }
+            final String[] values = new String[partitions.length];
+            result.add(values);
+            for (int i = 0; i < partitions.length; ++i) {
+                final String partition = partitions[i];
+                final String value = partitionKeyValueMap.get(partition);
+                values[i] = value;
+            }
+        }
+        return result;
+    }
 
-				if(keyType == TokenType.ID && isTag(fieldName)){
-					if ((ComparisonOperator.EQUAL.equals(op) || ComparisonOperator.IS.equals(op))
-							&& !TokenType.NULL.equals(valueType))
-					{
-						// Use RowFilter for equal TAG
-						if(tagFilters.get(fieldName) == null) tagFilters.put(fieldName, new ArrayList<String>());
-						tagFilters.get(fieldName).add(value);
-					} else if (rowFilterPriority && ComparisonOperator.IN.equals(op))
-					{
-						// Use RowFilter here by default
-						if(tagFilters.get(fieldName) == null) tagFilters.put(fieldName, new ArrayList<String>());
-						tagFilters.get(fieldName).addAll(EntityQualifierUtils.parseList(value));
-					} else if (ComparisonOperator.LIKE.equals(op)
-						|| ComparisonOperator.NOT_LIKE.equals(op)
-						|| ComparisonOperator.CONTAINS.equals(op)
-						|| ComparisonOperator.NOT_CONTAINS.equals(op)
-						|| ComparisonOperator.IN.equals(op)
-						|| ComparisonOperator.IS.equals(op)
-						|| ComparisonOperator.IS_NOT.equals(op)
-						|| ComparisonOperator.NOT_EQUAL.equals(op)
-						|| ComparisonOperator.EQUAL.equals(op)
-						|| ComparisonOperator.NOT_IN.equals(op))
-					{
-						qualifierFilters.add(entry);
-					} else
-					{
-						LOG.warn("Don't support operation: \"" + op + "\" on tag field: " + fieldName + " yet, going to ignore");
-						throw new IllegalArgumentException("Don't support operation: "+op+" on tag field: "+fieldName+", avaliable options: =, =!, =~, !=~, in, not in, contains, not contains");
-					}
-				}else{
-					qualifierFilters.add(entry);
-				}
-			}
+    /**
+     * @see org.apache.eagle.query.parser.TokenType
+     * @return
+     */
+    public FilterList buildFilters() {
+        // TODO: Optimize to select between row filter or column filter for better performance
+        // Use row key filter priority by default
+        boolean rowFilterPriority = true;
 
-			// Build RowFilter for equal tags
-			list.addFilter(buildTagFilter(tagFilters));
+        FilterList fltList = new FilterList(Operator.MUST_PASS_ONE);
+        for (ANDExpression andExpr : orExpr.getANDExprList()) {
 
-			// Build SingleColumnValueFilter
-			FilterList qualifierFilterList = buildQualifierFilter(qualifierFilters);
-			if(qualifierFilterList != null && qualifierFilterList.getFilters().size()>0){
-				list.addFilter(qualifierFilterList);
-			}else {
-				if(LOG.isDebugEnabled()) LOG.debug("Ignore empty qualifier filter from "+qualifierFilters.toString());
-			}
-			fltList.addFilter(list);
-		}
-		LOG.info("Query: " + _orExpr.toString() + " => Filter: " + fltList.toString());
-		return fltList;
-	}
-	
-	/**
-	 * _charset is used to decode the byte array, in hbase server, RegexStringComparator uses the same
-	 * charset to decode the byte array stored in qualifier
-	 * for tag filter regex, it's always ISO-8859-1 as it only comes from String's hashcode (Integer)
-	 * Note: regex comparasion is to compare String
-	 */
-	protected Filter buildTagFilter(Map<String, List<String>> tagFilters){
-		RegexStringComparator regexStringComparator = new RegexStringComparator(buildTagFilterRegex(tagFilters));
-		regexStringComparator.setCharset(_charset);
-		RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator);
-		return filter;
-	}
-	
-	/**
-	 * all qualifiers' condition must be satisfied.
-	 *
-	 * <H1>Use RegexStringComparator for:</H1>
-	 *      IN
-	 *      LIKE
-	 *      NOT_LIKE
-	 *
-	 * <H1>Use SubstringComparator for:</H1>
-	 *      CONTAINS
-	 *
-	 * <H1>Use EntityQualifierHelper for:</H1>
-	 *      EQUALS
-	 *      NOT_EUQALS
-	 *      LESS
-	 *      LESS_OR_EQUAL
-	 *      GREATER
-	 *      GREATER_OR_EQUAL
-	 *
-	 * <H2>
-	 *     TODO: Compare performance of RegexStringComparator ,SubstringComparator ,EntityQualifierHelper
-	 * </H2>
-	 *
-	 * @param qualifierFilters
-	 * @return
-	 */
-	protected FilterList buildQualifierFilter(List<QualifierFilterEntity> qualifierFilters){
-		FilterList list = new FilterList(Operator.MUST_PASS_ALL);
-		// iterate all the qualifiers
-		for(QualifierFilterEntity entry : qualifierFilters){
-			// if contains expression based filter
-			if(entry.getKeyType() == TokenType.EXP
-					|| entry.getValueType() == TokenType.EXP
-					|| entry.getKeyType() != TokenType.ID){
-				if(!EagleConfigFactory.load().isCoprocessorEnabled()) {
-					LOG.warn("Expression in filter may not support, because custom filter and coprocessor is disabled: " + entry.toString());
-				}
-				list.addFilter(buildExpressionBasedFilter(entry));
-				continue;
-			}
+            FilterList list = new FilterList(Operator.MUST_PASS_ALL);
+            Map<String, List<String>> tagFilters = new HashMap<String, List<String>>();
+            List<QualifierFilterEntity> qualifierFilters = new ArrayList<QualifierFilterEntity>();
+            // List<QualifierFilterEntry> tagLikeQualifierFilters = new ArrayList<QualifierFilterEntry>();
 
-			// else using SingleColumnValueFilter
-			String qualifierName = entry.getKey();
-			if(!isTag(entry.getKey())){
-				Qualifier qualifier = _ed.getDisplayNameMap().get(entry.getKey());
-				qualifierName = qualifier.getQualifierName();
-			}
+            // TODO refactor not to use too much if/else
+            for (AtomicExpression ae : andExpr.getAtomicExprList()) {
+                // TODO temporarily ignore those fields which are not for attributes
 
-			// Comparator to be used for building HBase Filter
-			// WritableByteArrayComparable comparator;
+                String fieldName = ae.getKey();
+                if (ae.getKeyType() == TokenType.ID) {
+                    fieldName = parseEntityAttribute(fieldName);
+                    if (fieldName == null) {
+                        LOG.warn(fieldName + " field does not have format @<FieldName>, ignored");
+                        continue;
+                    }
+                }
+
+                String value = ae.getValue();
+                ComparisonOperator op = ae.getOp();
+                TokenType keyType = ae.getKeyType();
+                TokenType valueType = ae.getValueType();
+                QualifierFilterEntity entry = new QualifierFilterEntity(fieldName, value, op, keyType,
+                                                                        valueType);
+
+                // TODO Exact match, need to add escape for those special characters here, including:
+                // "-", "[", "]", "/", "{", "}", "(", ")", "*", "+", "?", ".", "\\", "^", "$", "|"
+
+                if (keyType == TokenType.ID && isTag(fieldName)) {
+                    if ((ComparisonOperator.EQUAL.equals(op) || ComparisonOperator.IS.equals(op))
+                        && !TokenType.NULL.equals(valueType)) {
+                        // Use RowFilter for equal TAG
+                        if (tagFilters.get(fieldName) == null) {
+                            tagFilters.put(fieldName, new ArrayList<String>());
+                        }
+                        tagFilters.get(fieldName).add(value);
+                    } else if (rowFilterPriority && ComparisonOperator.IN.equals(op)) {
+                        // Use RowFilter here by default
+                        if (tagFilters.get(fieldName) == null) {
+                            tagFilters.put(fieldName, new ArrayList<String>());
+                        }
+                        tagFilters.get(fieldName).addAll(EntityQualifierUtils.parseList(value));
+                    } else if (ComparisonOperator.LIKE.equals(op) || ComparisonOperator.NOT_LIKE.equals(op)
+                               || ComparisonOperator.CONTAINS.equals(op)
+                               || ComparisonOperator.NOT_CONTAINS.equals(op)
+                               || ComparisonOperator.IN.equals(op) || ComparisonOperator.IS.equals(op)
+                               || ComparisonOperator.IS_NOT.equals(op)
+                               || ComparisonOperator.NOT_EQUAL.equals(op)
+                               || ComparisonOperator.EQUAL.equals(op)
+                               || ComparisonOperator.NOT_IN.equals(op)) {
+                        qualifierFilters.add(entry);
+                    } else {
+                        LOG.warn("Don't support operation: \"" + op + "\" on tag field: " + fieldName
+                                 + " yet, going to ignore");
+                        throw new IllegalArgumentException("Don't support operation: " + op
+                                                           + " on tag field: " + fieldName
+                                                           + ", avaliable options: =, =!, =~, !=~, in, not in, contains, not contains");
+                    }
+                } else {
+                    qualifierFilters.add(entry);
+                }
+            }
+
+            // Build RowFilter for equal tags
+            list.addFilter(buildTagFilter(tagFilters));
+
+            // Build SingleColumnValueFilter
+            FilterList qualifierFilterList = buildQualifierFilter(qualifierFilters);
+            if (qualifierFilterList != null && qualifierFilterList.getFilters().size() > 0) {
+                list.addFilter(qualifierFilterList);
+            } else {
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug("Ignore empty qualifier filter from " + qualifierFilters.toString());
+                }
+            }
+            fltList.addFilter(list);
+        }
+        LOG.info("Query: " + orExpr.toString() + " => Filter: " + fltList.toString());
+        return fltList;
+    }
+
+    /**
+     * charset is used to decode the byte array, in hbase server, RegexStringComparator uses the same charset
+     * to decode the byte array stored in qualifier for tag filter regex, it's always ISO-8859-1 as it only
+     * comes from String's hashcode (Integer) Note: regex comparasion is to compare String
+     */
+    protected Filter buildTagFilter(Map<String, List<String>> tagFilters) {
+        RegexStringComparator regexStringComparator = new RegexStringComparator(buildTagFilterRegex(tagFilters));
+        regexStringComparator.setCharset(charset);
+        RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator);
+        return filter;
+    }
+
+    /**
+     * all qualifiers' condition must be satisfied.
+     * <H1>Use RegexStringComparator for:</H1> IN LIKE NOT_LIKE
+     * <H1>Use SubstringComparator for:</H1> CONTAINS
+     * <H1>Use EntityQualifierHelper for:</H1> EQUALS NOT_EUQALS LESS LESS_OR_EQUAL GREATER GREATER_OR_EQUAL
+     * <H2>TODO: Compare performance of RegexStringComparator ,SubstringComparator ,EntityQualifierHelper</H2>
+     *
+     * @param qualifierFilters
+     * @return
+     */
+    protected FilterList buildQualifierFilter(List<QualifierFilterEntity> qualifierFilters) {
+        FilterList list = new FilterList(Operator.MUST_PASS_ALL);
+        // iterate all the qualifiers
+        for (QualifierFilterEntity entry : qualifierFilters) {
+            // if contains expression based filter
+            if (entry.getKeyType() == TokenType.EXP || entry.getValueType() == TokenType.EXP
+                || entry.getKeyType() != TokenType.ID) {
+                if (!EagleConfigFactory.load().isCoprocessorEnabled()) {
+                    LOG.warn("Expression in filter may not support, because custom filter and coprocessor is disabled: "
+                             + entry.toString());
+                }
+                list.addFilter(buildExpressionBasedFilter(entry));
+                continue;
+            }
+
+            // else using SingleColumnValueFilter
+            String qualifierName = entry.getKey();
+            if (!isTag(entry.getKey())) {
+                Qualifier qualifier = ed.getDisplayNameMap().get(entry.getKey());
+                qualifierName = qualifier.getQualifierName();
+            }
+
+            // Comparator to be used for building HBase Filter
+            // WritableByteArrayComparable comparator;
             ByteArrayComparable comparable;
-			if(ComparisonOperator.IN.equals(entry.getOp())
-				|| ComparisonOperator.NOT_IN.equals(entry.getOp())){
-				Filter setFilter = buildListQualifierFilter(entry);
-				if(setFilter!=null){
-					list.addFilter(setFilter);
-				}
-			}else{
-				// If [=,!=,is,is not] NULL, use NullComparator else throw exception
-				if(TokenType.NULL.equals(entry.getValueType())){
-					if(ComparisonOperator.EQUAL.equals(entry.getOp())
-						||ComparisonOperator.NOT_EQUAL.equals(entry.getOp())
-						||ComparisonOperator.IS.equals(entry.getOp())
-						||ComparisonOperator.IS_NOT.equals(entry.getOp()))
+            if (ComparisonOperator.IN.equals(entry.getOp())
+                || ComparisonOperator.NOT_IN.equals(entry.getOp())) {
+                Filter setFilter = buildListQualifierFilter(entry);
+                if (setFilter != null) {
+                    list.addFilter(setFilter);
+                }
+            } else {
+                // If [=,!=,is,is not] NULL, use NullComparator else throw exception
+                if (TokenType.NULL.equals(entry.getValueType())) {
+                    if (ComparisonOperator.EQUAL.equals(entry.getOp())
+                        || ComparisonOperator.NOT_EQUAL.equals(entry.getOp())
+                        || ComparisonOperator.IS.equals(entry.getOp())
+                        || ComparisonOperator.IS_NOT.equals(entry.getOp())) {
                         comparable = new NullComparator();
-					else
-						throw new IllegalArgumentException("Operation: "+entry.getOp()+" with NULL is not supported yet: "+entry.toString()+", avaliable options: [=, !=, is, is not] null|NULL");
-				}
-				// If [contains, not contains],use SubstringComparator
-				else if (ComparisonOperator.CONTAINS.equals(entry.getOp())
-					|| ComparisonOperator.NOT_CONTAINS.equals(entry.getOp())) {
+                    } else {
+                        throw new IllegalArgumentException("Operation: " + entry.getOp()
+                                                           + " with NULL is not supported yet: "
+                                                           + entry.toString()
+                                                           + ", avaliable options: [=, !=, is, is not] null|NULL");
+                    }
+                } else if (ComparisonOperator.CONTAINS.equals(entry.getOp())
+                         || ComparisonOperator.NOT_CONTAINS.equals(entry.getOp())) {
+                    // If [contains, not contains],use SubstringComparator
                     comparable = new SubstringComparator(entry.getValue());
-				}
-				// If [like, not like], use RegexStringComparator
-				else if (ComparisonOperator.LIKE.equals(entry.getOp())
-						|| ComparisonOperator.NOT_LIKE.equals(entry.getOp())){
-					// Use RegexStringComparator for LIKE / NOT_LIKE
-					RegexStringComparator _comparator = new RegexStringComparator(buildQualifierRegex(entry.getValue()));
-					_comparator.setCharset(_charset);
+                } else if (ComparisonOperator.LIKE.equals(entry.getOp())
+                         || ComparisonOperator.NOT_LIKE.equals(entry.getOp())) {
+                    // If [like, not like], use RegexStringComparator
+                    // Use RegexStringComparator for LIKE / NOT_LIKE
+                    RegexStringComparator _comparator = new RegexStringComparator(buildQualifierRegex(entry
+                        .getValue()));
+                    _comparator.setCharset(charset);
                     comparable = _comparator;
-				} else{
-					Class type = EntityQualifierUtils.getType(_ed, entry.getKey());
-					// if type is null (is Tag or not found) or not defined for TypedByteArrayComparator
-					if(!EagleConfigFactory.load().isCoprocessorEnabled() || type == null || TypedByteArrayComparator.get(type) == null){
-                        comparable = new BinaryComparator(EntityQualifierUtils.toBytes(_ed, entry.getKey(), entry.getValue()));
-					}else {
-                        comparable = new TypedByteArrayComparator(EntityQualifierUtils.toBytes(_ed, entry.getKey(), entry.getValue()),type);
-					}
-				}
+                } else {
+                    Class type = EntityQualifierUtils.getType(ed, entry.getKey());
+                    // if type is null (is Tag or not found) or not defined for TypedByteArrayComparator
+                    if (!EagleConfigFactory.load().isCoprocessorEnabled() || type == null
+                        || TypedByteArrayComparator.get(type) == null) {
+                        comparable = new BinaryComparator(EntityQualifierUtils.toBytes(ed, entry.getKey(),
+                                                                                       entry.getValue()));
+                    } else {
+                        comparable = new TypedByteArrayComparator(EntityQualifierUtils
+                            .toBytes(ed, entry.getKey(), entry.getValue()), type);
+                    }
+                }
 
-				SingleColumnValueFilter filter =
-						new SingleColumnValueFilter(_ed.getColumnFamily().getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()), comparable);
-				filter.setFilterIfMissing(_filterIfMissing);
-				list.addFilter(filter);
-			}
-		}
+                SingleColumnValueFilter filter = new SingleColumnValueFilter(ed.getColumnFamily()
+                    .getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()),
+                                                                             comparable);
+                filter.setFilterIfMissing(filterIfMissing);
+                list.addFilter(filter);
+            }
+        }
 
-		return list;
-	}
+        return list;
+    }
 
-	private Filter buildExpressionBasedFilter(QualifierFilterEntity entry) {
-		BooleanExpressionComparator expressionComparator  = new BooleanExpressionComparator(entry,_ed);
-		_filterFields = expressionComparator.getRequiredFields();
-		RowValueFilter filter = new RowValueFilter(expressionComparator);
-		return filter;
-	}
+    private Filter buildExpressionBasedFilter(QualifierFilterEntity entry) {
+        BooleanExpressionComparator expressionComparator = new BooleanExpressionComparator(entry, ed);
+        filterFields = expressionComparator.getRequiredFields();
+        RowValueFilter filter = new RowValueFilter(expressionComparator);
+        return filter;
+    }
 
-	/**
-	 * Currently use BinaryComparator only
-	 * <h2>TODO: </h2>
-	 * Possibility to tune performance by using: OR[BinaryComparator,...] instead of RegexStringComparator?
-	 *
-	 *<br/> <br/>
-	 *
-	 * ! Check op must be IN or NOTIN in caller
-	 *
-	 * @param entry
-	 * @return
-	 */
-	private Filter buildListQualifierFilter(QualifierFilterEntity entry){
-		List<String> valueSet = EntityQualifierUtils.parseList(entry.getValue());
-		Iterator<String> it = valueSet.iterator();
-		String fieldName = entry.getKey();
-		String qualifierName = fieldName;
-		if(!_ed.isTag(entry.getKey())){
-			qualifierName = _ed.getDisplayNameMap().get(entry.getKey()).getQualifierName();
-		}
+    /**
+     * Currently use BinaryComparator only
+     * <h2>TODO:</h2> Possibility to tune performance by using: OR[BinaryComparator,...] instead of
+     * RegexStringComparator? <br/>
+     * <br/>
+     * ! Check op must be IN or NOTIN in caller
+     *
+     * @param entry
+     * @return
+     */
+    private Filter buildListQualifierFilter(QualifierFilterEntity entry) {
+        List<String> valueSet = EntityQualifierUtils.parseList(entry.getValue());
+        Iterator<String> it = valueSet.iterator();
+        String fieldName = entry.getKey();
+        String qualifierName = fieldName;
+        if (!ed.isTag(entry.getKey())) {
+            qualifierName = ed.getDisplayNameMap().get(entry.getKey()).getQualifierName();
+        }
 
-// TODO: Try to use RegExp just work if possible
-// Because single SingleColumnValueFilter is much faster than multi SingleColumnValueFilters in OR list.
-//		Class qualifierType = EntityQualifierHelper.getType(_ed,fieldName);
-//		if( qualifierType == null || qualifierType == String.class){
-//			boolean first = true;
-//			StringBuilder filterRegex = new StringBuilder();
-//			filterRegex.append("^(");
-//			while(it.hasNext()) {
-//				String value = it.next();
-//				if(value == null) {
-//					logger.warn("ignore empty value in set qualifier filter: "+entry.toString());
-//					continue;
-//				}
-//				if(!first) filterRegex.append("|");
-//				filterRegex.append(value);
-//				first = false;
-//			}
-//			filterRegex.append(")$");
-//			RegexStringComparator regexStringComparator = new RegexStringComparator(filterRegex.toString());
-//			return new SingleColumnValueFilter(_ed.getColumnFamily().getBytes(), qualifierName.getBytes(),
-//					convertToHBaseCompareOp(entry.getOp()), regexStringComparator);
-//		}else{
-		FilterList setFilterList;
-		if(ComparisonOperator.IN.equals(entry.getOp())){
-			setFilterList = new FilterList(Operator.MUST_PASS_ONE);
-		}else if(ComparisonOperator.NOT_IN.equals(entry.getOp())) {
-			setFilterList = new FilterList(Operator.MUST_PASS_ALL);
-		}else{
-			throw new IllegalArgumentException(String.format("Don't support operation: %s on LIST type of value yet: %s, valid options: IN/NOT IN [LIST]",entry.getOp(),entry.toString()));
-		}
+        // TODO: Try to use RegExp just work if possible
+        // Because single SingleColumnValueFilter is much faster than multi SingleColumnValueFilters in OR
+        // list.
+        // Class qualifierType = EntityQualifierHelper.getType(ed,fieldName);
+        // if( qualifierType == null || qualifierType == String.class){
+        // boolean first = true;
+        // StringBuilder filterRegex = new StringBuilder();
+        // filterRegex.append("^(");
+        // while(it.hasNext()) {
+        // String value = it.next();
+        // if(value == null) {
+        // logger.warn("ignore empty value in set qualifier filter: "+entry.toString());
+        // continue;
+        // }
+        // if(!first) filterRegex.append("|");
+        // filterRegex.append(value);
+        // first = false;
+        // }
+        // filterRegex.append(")$");
+        // RegexStringComparator regexStringComparator = new RegexStringComparator(filterRegex.toString());
+        // return new SingleColumnValueFilter(ed.getColumnFamily().getBytes(), qualifierName.getBytes(),
+        // convertToHBaseCompareOp(entry.getOp()), regexStringComparator);
+        // }else{
+        FilterList setFilterList;
+        if (ComparisonOperator.IN.equals(entry.getOp())) {
+            setFilterList = new FilterList(Operator.MUST_PASS_ONE);
+        } else if (ComparisonOperator.NOT_IN.equals(entry.getOp())) {
+            setFilterList = new FilterList(Operator.MUST_PASS_ALL);
+        } else {
+            throw new IllegalArgumentException(String
+                .format("Don't support operation: %s on LIST type of value yet: %s, valid options: IN/NOT IN [LIST]",
+                        entry.getOp(), entry.toString()));
+        }
 
-		while(it.hasNext()) {
-			String value = it.next();
-			BinaryComparator comparator = new BinaryComparator(EntityQualifierUtils.toBytes(_ed, fieldName, value));
-			SingleColumnValueFilter filter =
-					new SingleColumnValueFilter(_ed.getColumnFamily().getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()), comparator);
-			filter.setFilterIfMissing(_filterIfMissing);
-			setFilterList.addFilter(filter);
-		}
+        while (it.hasNext()) {
+            String value = it.next();
+            BinaryComparator comparator = new BinaryComparator(EntityQualifierUtils.toBytes(ed, fieldName,
+                                                                                            value));
+            SingleColumnValueFilter filter = new SingleColumnValueFilter(ed.getColumnFamily()
+                .getBytes(), qualifierName.getBytes(), convertToHBaseCompareOp(entry.getOp()), comparator);
+            filter.setFilterIfMissing(filterIfMissing);
+            setFilterList.addFilter(filter);
+        }
 
-		return setFilterList;
-//		}
-	}
+        return setFilterList;
+        // }
+    }
 
-	/**
-	 * Just used for LIKE and NOT_LIKE
-	 *
-	 * @param qualifierValue
-	 * @return
-	 */
-	protected String buildQualifierRegex(String qualifierValue){
-		StringBuilder sb = new StringBuilder();
-//		sb.append("(?s)");
-		sb.append("^");
-		sb.append(qualifierValue);
-		sb.append("$");
-		return sb.toString();
-	}
-	
-	  /**
-	   * Appends the given ID to the given buffer, followed by "\\E".
-	   * [steal it from opentsdb, thanks opentsdb :) https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java]
-	   */
-	  private static void addId(final StringBuilder buf, final byte[] id) {
-		buf.append("\\Q");
-	    boolean backslash = false;
-	    for (final byte b : id) {
-	      buf.append((char) (b & 0xFF));
-	      if (b == 'E' && backslash) {  // If we saw a `\' and now we have a `E'.
-	        // So we just terminated the quoted section because we just added \E
-	        // to `buf'.  So let's put a litteral \E now and start quoting again.
-	        buf.append("\\\\E\\Q");
-	      } else {
-	        backslash = b == '\\';
-	      }
-	    }
-	    buf.append("\\E");
-	  }
-	  
-	  @SuppressWarnings("unused")
-	  private static void addId(final StringBuilder buf, final String id) {
-		    buf.append("\\Q");
-		  	int len = id.length()-1;
-		    boolean backslash = false;
-		    for (int i =0; i < len; i++) {
-		      char c = id.charAt(i);
-		      buf.append(c);
-		      if (c == 'E' && backslash) {  // If we saw a `\' and now we have a `E'.
-		        // So we just terminated the quoted section because we just added \E
-		        // to `buf'.  So let's put a litteral \E now and start quoting again.
-		        buf.append("\\\\E\\Q");
-		      } else {
-		        backslash = c == '\\';
-		      }
-		    }
-		    buf.append("\\E");
-		  }
-	
-	/**
-	 * one search tag may have multiple values which have OR relationship, and relationship between
-	 * different search tags is AND
-	 * the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2"
-	 * @param tags
-	 * @return
-	 */
-	protected String buildTagFilterRegex(Map<String, List<String>> tags){
-		// TODO need consider that \E could be part of tag, refer to https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java
-		final SortedMap<Integer, List<Integer>> tagHash = new TreeMap<Integer, List<Integer>>();
-		final int numOfPartitionFields = (_ed.getPartitions() == null) ? 0 : _ed.getPartitions().length;
-		for(Map.Entry<String, List<String>> entry : tags.entrySet()){
-			String tagName = entry.getKey();
-			// Ignore tag if the tag is one of partition fields
-			if (_ed.isPartitionTag(tagName)) {
-				continue;
-			}
-			List<String> stringValues = entry.getValue();
-			List<Integer> hashValues = new ArrayList<Integer>(stringValues.size());
-			for(String value : stringValues){
-				hashValues.add(value.hashCode());
-			}
-			tagHash.put(tagName.hashCode(), hashValues);
-		}
-		
-		// header = prefix(4 bytes) + partition_hashes(4*N bytes) + timestamp (8 bytes)
-		final int headerLength = 4 + numOfPartitionFields * 4 + 8;
+    /**
+     * Just used for LIKE and NOT_LIKE
+     *
+     * @param qualifierValue
+     * @return
+     */
+    protected String buildQualifierRegex(String qualifierValue) {
+        StringBuilder sb = new StringBuilder();
+        // sb.append("(?s)");
+        sb.append("^");
+        sb.append(qualifierValue);
+        sb.append("$");
+        return sb.toString();
+    }
 
-		// <tag1:4><value1:4> ... <tagn:4><valuen:4>
-		StringBuilder sb = new StringBuilder();
-		sb.append("(?s)");
-		sb.append("^(?:.{").append(headerLength).append("})");
-		sb.append("(?:.{").append(8).append("})*"); // for any number of tags
-		for (Map.Entry<Integer, List<Integer>> entry : tagHash.entrySet()) {
-			try {
-				addId(sb, ByteUtil.intToBytes(entry.getKey()));
-				List<Integer> hashValues = entry.getValue();
-				sb.append("(?:");
-				boolean first = true;
-				for(Integer value : hashValues){
-					if(!first){
-						sb.append('|');
-					}
-					addId(sb, ByteUtil.intToBytes(value));
-					first = false;
-				}
-				sb.append(")");
-				sb.append("(?:.{").append(8).append("})*"); // for any number of tags
-			} catch (Exception ex) {
-				LOG.error("constructing regex error", ex);
-			}
-		}
-		sb.append("$");
-		if(LOG.isDebugEnabled()) LOG.debug("Tag filter pattern is " + sb.toString());
-		return sb.toString();
-	}
+    /**
+     * Appends the given ID to the given buffer, followed by "\\E". [steal it from opentsdb, thanks opentsdb
+     * :) https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java]
+     */
+    private static void addId(final StringBuilder buf, final byte[] id) {
+        buf.append("\\Q");
+        boolean backslash = false;
+        for (final byte b : id) {
+            buf.append((char)(b & 0xFF));
+            if (b == 'E' && backslash) { // If we saw a `\' and now we have a `E'.
+                // So we just terminated the quoted section because we just added \E
+                // to `buf'. So let's put a litteral \E now and start quoting again.
+                buf.append("\\\\E\\Q");
+            } else {
+                backslash = b == '\\';
+            }
+        }
+        buf.append("\\E");
+    }
 
-	/**
-	 * Convert ComparisonOperator to native HBase CompareOp
-	 *
-	 * Support:
-	 *      =, =~,CONTAINS,<,<=,>,>=,!=,!=~
-	 *
-	 * @param comp
-	 * @return
-	 */
-	protected static CompareOp convertToHBaseCompareOp(ComparisonOperator comp) {
-		if(comp == ComparisonOperator.EQUAL || comp == ComparisonOperator.LIKE
-				|| comp == ComparisonOperator.CONTAINS
-				|| comp == ComparisonOperator.IN
-				|| comp == ComparisonOperator.IS
-				) {
-			return CompareOp.EQUAL;
-		}else if(comp == ComparisonOperator.LESS) {
-			return CompareOp.LESS;
-		} else if(comp == ComparisonOperator.LESS_OR_EQUAL){
-			return CompareOp.LESS_OR_EQUAL;
-		}else if(comp == ComparisonOperator.GREATER) {
-			return CompareOp.GREATER;
-		} else if(comp == ComparisonOperator.GREATER_OR_EQUAL){
-			return CompareOp.GREATER_OR_EQUAL;
-		} else if(comp == ComparisonOperator.NOT_EQUAL
-				|| comp == ComparisonOperator.NOT_LIKE
-				|| comp == ComparisonOperator.NOT_CONTAINS
-				|| comp == ComparisonOperator.IS_NOT
-				|| comp == ComparisonOperator.NOT_IN)
-		{
-			return CompareOp.NOT_EQUAL;
-		} else {
-			LOG.error("{} operation is not supported now\n", comp);
-			throw new IllegalArgumentException("Illegal operation: "+comp+ ", avaliable options: "+ Arrays.toString(ComparisonOperator.values()));
-		}
-	}
+    @SuppressWarnings("unused")
+    private static void addId(final StringBuilder buf, final String id) {
+        buf.append("\\Q");
+        int len = id.length() - 1;
+        boolean backslash = false;
+        for (int i = 0; i < len; i++) {
+            char c = id.charAt(i);
+            buf.append(c);
+            if (c == 'E' && backslash) { // If we saw a `\' and now we have a `E'.
+                // So we just terminated the quoted section because we just added \E
+                // to `buf'. So let's put a litteral \E now and start quoting again.
+                buf.append("\\\\E\\Q");
+            } else {
+                backslash = c == '\\';
+            }
+        }
+        buf.append("\\E");
+    }
 
-	protected static CompareOp getHBaseCompareOp(String comp) {
-		return convertToHBaseCompareOp(ComparisonOperator.locateOperator(comp));
-	}
+    /**
+     * one search tag may have multiple values which have OR relationship, and relationship between different
+     * search tags is AND the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2"
+     *
+     * @param tags
+     * @return
+     */
+    protected String buildTagFilterRegex(Map<String, List<String>> tags) {
+        // TODO need consider that \E could be part of tag, refer to
+        // https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java
+        final SortedMap<Integer, List<Integer>> tagHash = new TreeMap<Integer, List<Integer>>();
+        final int numOfPartitionFields = (ed.getPartitions() == null) ? 0 : ed.getPartitions().length;
+        for (Map.Entry<String, List<String>> entry : tags.entrySet()) {
+            String tagName = entry.getKey();
+            // Ignore tag if the tag is one of partition fields
+            if (ed.isPartitionTag(tagName)) {
+                continue;
+            }
+            List<String> stringValues = entry.getValue();
+            List<Integer> hashValues = new ArrayList<Integer>(stringValues.size());
+            for (String value : stringValues) {
+                hashValues.add(value.hashCode());
+            }
+            tagHash.put(tagName.hashCode(), hashValues);
+        }
+
+        // header = prefix(4 bytes) + partition_hashes(4*N bytes) + timestamp (8 bytes)
+        final int headerLength = 4 + numOfPartitionFields * 4 + 8;
+
+        // <tag1:4><value1:4> ... <tagn:4><valuen:4>
+        StringBuilder sb = new StringBuilder();
+        sb.append("(?s)");
+        sb.append("^(?:.{").append(headerLength).append("})");
+        sb.append("(?:.{").append(8).append("})*"); // for any number of tags
+        for (Map.Entry<Integer, List<Integer>> entry : tagHash.entrySet()) {
+            try {
+                addId(sb, ByteUtil.intToBytes(entry.getKey()));
+                List<Integer> hashValues = entry.getValue();
+                sb.append("(?:");
+                boolean first = true;
+                for (Integer value : hashValues) {
+                    if (!first) {
+                        sb.append('|');
+                    }
+                    addId(sb, ByteUtil.intToBytes(value));
+                    first = false;
+                }
+                sb.append(")");
+                sb.append("(?:.{").append(8).append("})*"); // for any number of tags
+            } catch (Exception ex) {
+                LOG.error("constructing regex error", ex);
+            }
+        }
+        sb.append("$");
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Tag filter pattern is " + sb.toString());
+        }
+        return sb.toString();
+    }
+
+    /**
+     * Convert ComparisonOperator to native HBase CompareOp Support: =, =~,CONTAINS,<,<=,>,>=,!=,!=~
+     *
+     * @param comp
+     * @return
+     */
+    protected static CompareOp convertToHBaseCompareOp(ComparisonOperator comp) {
+        if (comp == ComparisonOperator.EQUAL || comp == ComparisonOperator.LIKE
+            || comp == ComparisonOperator.CONTAINS || comp == ComparisonOperator.IN
+            || comp == ComparisonOperator.IS) {
+            return CompareOp.EQUAL;
+        } else if (comp == ComparisonOperator.LESS) {
+            return CompareOp.LESS;
+        } else if (comp == ComparisonOperator.LESS_OR_EQUAL) {
+            return CompareOp.LESS_OR_EQUAL;
+        } else if (comp == ComparisonOperator.GREATER) {
+            return CompareOp.GREATER;
+        } else if (comp == ComparisonOperator.GREATER_OR_EQUAL) {
+            return CompareOp.GREATER_OR_EQUAL;
+        } else if (comp == ComparisonOperator.NOT_EQUAL || comp == ComparisonOperator.NOT_LIKE
+                   || comp == ComparisonOperator.NOT_CONTAINS || comp == ComparisonOperator.IS_NOT
+                   || comp == ComparisonOperator.NOT_IN) {
+            return CompareOp.NOT_EQUAL;
+        } else {
+            LOG.error("{} operation is not supported now\n", comp);
+            throw new IllegalArgumentException("Illegal operation: " + comp + ", avaliable options: "
+                                               + Arrays.toString(ComparisonOperator.values()));
+        }
+    }
+
+    protected static CompareOp getHBaseCompareOp(String comp) {
+        return convertToHBaseCompareOp(ComparisonOperator.locateOperator(comp));
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java
index 6cdc77b..340c33b 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/QualifierFilterEntity.java
@@ -24,82 +24,86 @@
 import java.io.DataOutput;

 import java.io.IOException;

 

-public class QualifierFilterEntity implements Writable{

-	public String key;

-	public String value;

-	public ComparisonOperator op;

-	public TokenType valueType;

-	public TokenType keyType;

+public class QualifierFilterEntity implements Writable {

+    public String key;

+    public String value;

+    public ComparisonOperator op;

+    public TokenType valueType;

+    public TokenType keyType;

 

-	public QualifierFilterEntity(){}

-	public QualifierFilterEntity(String key, String value, ComparisonOperator comp, TokenType keyType, TokenType valueType) {

-		super();

-		this.key = key;

-		this.value = value;

-		this.op = comp;

-		this.keyType = keyType;

-		this.valueType = valueType;

-	}

+    public QualifierFilterEntity() {

+    }

 

-	public String getKey() {

-		return key;

-	}

+    public QualifierFilterEntity(String key, String value, ComparisonOperator comp, TokenType keyType,

+                                 TokenType valueType) {

+        super();

+        this.key = key;

+        this.value = value;

+        this.op = comp;

+        this.keyType = keyType;

+        this.valueType = valueType;

+    }

 

-	public void setKey(String key) {

-		this.key = key;

-	}

+    public String getKey() {

+        return key;

+    }

 

-	public String getValue() {

-		return value;

-	}

+    public void setKey(String key) {

+        this.key = key;

+    }

 

-	public void setValue(String value) {

-		this.value = value;

-	}

+    public String getValue() {

+        return value;

+    }

 

-	public ComparisonOperator getOp() {

-		return op;

-	}

+    public void setValue(String value) {

+        this.value = value;

+    }

 

-	public void setOp(ComparisonOperator op) {

-		this.op = op;

-	}

+    public ComparisonOperator getOp() {

+        return op;

+    }

 

-	public TokenType getValueType() {

-		return valueType;

-	}

+    public void setOp(ComparisonOperator op) {

+        this.op = op;

+    }

 

-	public void setValueType(TokenType valueType) {

-		this.valueType = valueType;

-	}

+    public TokenType getValueType() {

+        return valueType;

+    }

 

-	public void setKeyType(TokenType keyType){

-		this.keyType = keyType;

-	}

-	public TokenType getKeyType(){

-		return this.keyType;

-	}

+    public void setValueType(TokenType valueType) {

+        this.valueType = valueType;

+    }

 

-	@Override

-	public String toString() {

-		return String.format("%s %s %s",this.key,this.op,this.value);

-	}

+    public void setKeyType(TokenType keyType) {

+        this.keyType = keyType;

+    }

 

-	@Override

-	public void write(DataOutput out) throws IOException {

-		out.writeUTF(this.key);

-		out.writeUTF(this.getValue());

-		out.writeUTF(this.op.name());

-		out.writeUTF(this.keyType.name());

-		out.writeUTF(this.valueType.name());

-	}

+    public TokenType getKeyType() {

+        return this.keyType;

+    }

 

-	@Override

-	public void readFields(DataInput in) throws IOException {

-		this.key = in.readUTF();

-		this.value = in.readUTF();

-		this.op = ComparisonOperator.valueOf(in.readUTF());

-		this.keyType = TokenType.valueOf(in.readUTF());

-		this.valueType = TokenType.valueOf(in.readUTF());

-	}

-}
\ No newline at end of file
+    @Override

+    public String toString() {

+        return String.format("%s %s %s", this.key, this.op, this.value);

+    }

+

+    @Override

+    public void write(DataOutput out) throws IOException {

+        out.writeUTF(this.key);

+        out.writeUTF(this.getValue());

+        out.writeUTF(this.op.name());

+        out.writeUTF(this.keyType.name());

+        out.writeUTF(this.valueType.name());

+    }

+

+    @Override

+    public void readFields(DataInput in) throws IOException {

+        this.key = in.readUTF();

+        this.value = in.readUTF();

+        this.op = ComparisonOperator.valueOf(in.readUTF());

+        this.keyType = TokenType.valueOf(in.readUTF());

+        this.valueType = TokenType.valueOf(in.readUTF());

+    }

+}

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java
index a4b97ea..91a6939 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/RowValueFilter.java
@@ -33,32 +33,34 @@
 import java.util.List;

 

 /**

- * TODO: Critical performance problem!!!

- * TODO: Refactor to specified multi-column filter so that avoid return all qualifier columns from region server to client side

+ * TODO: Critical performance problem!!! TODO: Refactor to specified multi-column filter so that avoid return

+ * all qualifier columns from region server to client side

  *

  * @since 2014/11/17

  */

 public class RowValueFilter extends FilterBase {

-    private final static Logger LOG = LoggerFactory.getLogger(RowValueFilter.class);

+    private static final Logger LOG = LoggerFactory.getLogger(RowValueFilter.class);

     private boolean filterOutRow = false;

     private WritableComparable<List<KeyValue>> comparator;

 

     // TODO: Use qualifiers to reduce network tranfer

-//    private List<byte[]> qualifiers;

-    public RowValueFilter(){}

+    // private List<byte[]> qualifiers;

+    public RowValueFilter() {

+    }

 

     /**

      * Filter out row if WritableComparable.compareTo return 0

+     *

      * @param comparator <code>WritableComparable[List[KeyValue]]</code>

      */

-    public RowValueFilter(WritableComparable<List<KeyValue>> comparator){

+    public RowValueFilter(WritableComparable<List<KeyValue>> comparator) {

         this.comparator = comparator;

     }

 

-//    public RowValueFilter(List<byte[]> qualifiers,WritableComparable<List<KeyValue>> comparator){

-//        this.qualifiers = qualifiers;

-//        this.comparator = comparator;

-//    }

+    // public RowValueFilter(List<byte[]> qualifiers,WritableComparable<List<KeyValue>> comparator){

+    // this.qualifiers = qualifiers;

+    // this.comparator = comparator;

+    // }

 

     /**

      * Old interface in hbase-0.94

@@ -77,7 +79,7 @@
      * @param in

      * @throws IOException

      */

-//    @Override

+    // @Override

     @Deprecated

     public void readFields(DataInput in) throws IOException {

         this.comparator = new BooleanExpressionComparator();

@@ -85,7 +87,8 @@
     }

 

     /**

-     * TODO: Currently still use older serialization method from hbase-0.94, need to migrate into ProtoBuff based

+     * TODO: Currently still use older serialization method from hbase-0.94, need to migrate into ProtoBuff

+     * based

      *

      * @return

      * @throws IOException

@@ -98,23 +101,24 @@
     }

 

     /**

-     * TODO: Currently still use older serialization method from hbase-0.94, need to migrate into ProtoBuff based

+     * TODO: Currently still use older serialization method from hbase-0.94, need to migrate into ProtoBuff

+     * based

      */

     // Override static method

-    public static Filter parseFrom(final byte [] pbBytes) throws DeserializationException {

+    public static Filter parseFrom(final byte[] pbBytes) throws DeserializationException {

         ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(pbBytes);

         RowValueFilter filter = new RowValueFilter();

         try {

             filter.readFields(byteArrayDataInput);

         } catch (IOException e) {

-            LOG.error("Got error to deserialize RowValueFilter from PB bytes",e);

+            LOG.error("Got error to deserialize RowValueFilter from PB bytes", e);

             throw new DeserializationException(e);

         }

         return filter;

     }

 

     @Override

-    public boolean hasFilterRow(){

+    public boolean hasFilterRow() {

         return true;

     }

 

@@ -124,21 +128,21 @@
     }

 

     @Override

+    public boolean filterRow() {

+        return filterOutRow;

+    }

+

+    @Override

     public void reset() {

         this.filterOutRow = false;

     }

 

     @Override

-    public boolean filterRow(){

-        return filterOutRow;

-    }

-

-    @Override

     public String toString() {

-        return super.toString()+" ( "+this.comparator.toString()+" )";

+        return super.toString() + " ( " + this.comparator.toString() + " )";

     }

 

-//    public List<byte[]> getQualifiers() {

-//        return qualifiers;

-//    }

-}
\ No newline at end of file
+    // public List<byte[]> getQualifiers() {

+    // return qualifiers;

+    // }

+}

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java
index ecaf8cc..74a13c1 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/filter/TypedByteArrayComparator.java
@@ -32,15 +32,12 @@
 import java.util.Map;

 

 /**

- * <h1>TypedByteArrayComparator</h1>

- *

- * Compare byte array: <code>byte[] value</code> with class type: <code>Class type</code>

- *

- * <br/>

+ * <h1>TypedByteArrayComparator</h1> Compare byte array: <code>byte[] value</code> with class type:

+ * <code>Class type</code> <br/>

  * <br/>

  * Built-in support:

  *

- *  <pre>

+ * <pre>

  *    Double

  *    double

  *    Integer

@@ -51,14 +48,14 @@
  *    short

  *    Boolean

  *    boolean

- *  </pre>

+ * </pre>

  *

- *  And can be extend by defining new {@link RawComparator} and register with  {@link #define(Class type, RawComparator comparator)}

- * <br/>

+ * And can be extend by defining new {@link RawComparator} and register with

+ * {@link #define(Class type, RawComparator comparator)} <br/>

  * <br/>

  */

 public class TypedByteArrayComparator extends ByteArrayComparable {

-    private final static Logger LOG = LoggerFactory.getLogger(TypedByteArrayComparator.class);

+    private static final Logger LOG = LoggerFactory.getLogger(TypedByteArrayComparator.class);

 

     private Class type;

 

@@ -69,34 +66,38 @@
      * Default constructor for writable

      */

     @SuppressWarnings("unused")

-    public TypedByteArrayComparator(){

+    public TypedByteArrayComparator() {

         super(null);

     }

 

-    public TypedByteArrayComparator(byte[] value, Class type){

+    public TypedByteArrayComparator(byte[] value, Class type) {

         super(value);

         this.type = type;

         this.comparator = get(this.type);

-        if(this.comparator == null) throw new IllegalArgumentException("No comparator found for class: "+type);

+        if (this.comparator == null) {

+            throw new IllegalArgumentException("No comparator found for class: " + type);

+        }

     }

 

     /**

      * @param in hbase-0.94 interface

      * @throws IOException

      */

-//    @Override

+    // @Override

     public void readFields(DataInput in) throws IOException {

-//        super.readFields(in);

+        // super.readFields(in);

         try {

             String _type = in.readUTF();

             type = _primitiveTypeClassMap.get(_type);

-            if(type == null) {

+            if (type == null) {

                 type = Class.forName(_type);

             }

             comparator = get(type);

-            if(comparator == null) throw new IllegalArgumentException("No comparator found for class: "+type);

+            if (comparator == null) {

+                throw new IllegalArgumentException("No comparator found for class: " + type);

+            }

         } catch (ClassNotFoundException e) {

-            throw new IOException(e.getMessage(),e);

+            throw new IOException(e.getMessage(), e);

         }

     }

 

@@ -104,9 +105,9 @@
      * @param out hbase-0.94 interface

      * @throws IOException

      */

-//    @Override

+    // @Override

     public void write(DataOutput out) throws IOException {

-//        super.write(out);

+        // super.write(out);

         String typeName = type.getName();

         out.writeUTF(typeName);

     }

@@ -123,7 +124,7 @@
             this.write(byteArrayDataOutput);

             return byteArrayDataOutput.toByteArray();

         } catch (IOException e) {

-            LOG.error("Failed to serialize due to: "+e.getMessage(),e);

+            LOG.error("Failed to serialize due to: " + e.getMessage(), e);

             throw new RuntimeException(e);

         }

     }

@@ -135,14 +136,13 @@
      * @return Comparator instance

      * @throws DeserializationException

      */

-    public static TypedByteArrayComparator parseFrom(final byte [] bytes)

-            throws DeserializationException {

+    public static TypedByteArrayComparator parseFrom(final byte[] bytes) throws DeserializationException {

         TypedByteArrayComparator comparator = new TypedByteArrayComparator();

         ByteArrayDataInput byteArrayDataInput = ByteStreams.newDataInput(bytes);

         try {

             comparator.readFields(byteArrayDataInput);

         } catch (IOException e) {

-            LOG.error("Got error to deserialize TypedByteArrayComparator from PB bytes",e);

+            LOG.error("Got error to deserialize TypedByteArrayComparator from PB bytes", e);

             throw new DeserializationException(e);

         }

         return comparator;

@@ -158,33 +158,35 @@
      * <li>Try registered comparator</li>

      * <li>If not found, try all possible WritableComparator</li>

      * </ol>

-     *

      * If not found finally, throw new IllegalArgumentException("unable to get comparator for class: "+type);

      *

      * @param type value type class

      * @return RawComparator

      */

-    public static RawComparator get(Class type){

+    public static RawComparator get(Class type) {

         RawComparator comparator = null;

         try {

             comparator = _typedClassComparator.get(type);

-        }catch (ClassCastException ex){

+        } catch (ClassCastException ex) {

             // ignore

         }

         try {

-            if (comparator == null) comparator = WritableComparator.get(type);

-        }catch (ClassCastException ex){

+            if (comparator == null) {

+                comparator = WritableComparator.get(type);

+            }

+        } catch (ClassCastException ex) {

             // ignore

         }

         return comparator;

     }

 

-    private final static Map<Class,RawComparator> _typedClassComparator = new HashMap<Class, RawComparator>();

-    public static void define(Class type, RawComparator comparator){

-        _typedClassComparator.put(type,comparator);

+    private static final Map<Class, RawComparator> _typedClassComparator = new HashMap<Class, RawComparator>();

+

+    public static void define(Class type, RawComparator comparator) {

+        _typedClassComparator.put(type, comparator);

     }

 

-    static{

+    static {

         define(Double.class, WritableComparator.get(DoubleWritable.class));

         define(double.class, WritableComparator.get(DoubleWritable.class));

         define(Integer.class, WritableComparator.get(IntWritable.class));

@@ -200,14 +202,15 @@
     /**

      * Because {@link Class#forName } can't find class for primitive type

      */

-    private final static Map<String,Class> _primitiveTypeClassMap = new HashMap<String, Class>();

+    private static final Map<String, Class> _primitiveTypeClassMap = new HashMap<String, Class>();

+

     static {

-        _primitiveTypeClassMap.put(int.class.getName(),int.class);

-        _primitiveTypeClassMap.put(double.class.getName(),double.class);

-        _primitiveTypeClassMap.put(long.class.getName(),long.class);

-        _primitiveTypeClassMap.put(short.class.getName(),short.class);

-        _primitiveTypeClassMap.put(boolean.class.getName(),boolean.class);

-        _primitiveTypeClassMap.put(char.class.getName(),char.class);

-        _primitiveTypeClassMap.put(byte.class.getName(),byte.class);

+        _primitiveTypeClassMap.put(int.class.getName(), int.class);

+        _primitiveTypeClassMap.put(double.class.getName(), double.class);

+        _primitiveTypeClassMap.put(long.class.getName(), long.class);

+        _primitiveTypeClassMap.put(short.class.getName(), short.class);

+        _primitiveTypeClassMap.put(boolean.class.getName(), boolean.class);

+        _primitiveTypeClassMap.put(char.class.getName(), char.class);

+        _primitiveTypeClassMap.put(byte.class.getName(), byte.class);

     }

-}
\ No newline at end of file
+}

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java
index 418ab33..7a32077 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexLogReader.java
@@ -24,22 +24,24 @@
 
 public abstract class IndexLogReader implements LogReader {
 
-	// TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation should use SingleColumnValueExcludeFilter, 
-	// but it's complicated in current implementation. 
-	protected static void workaroundHBASE2198(Get get, Filter filter,byte[][] qualifiers) {
-		if (filter instanceof SingleColumnValueFilter) {
-			if(qualifiers == null) {
-				get.addFamily(((SingleColumnValueFilter) filter).getFamily());
-			}else{
-				get.addColumn(((SingleColumnValueFilter) filter).getFamily(), ((SingleColumnValueFilter) filter).getQualifier());
-			}
-			return;
-		}
-		if (filter instanceof FilterList) {
-			for (Filter f : ((FilterList)filter).getFilters()) {
-				workaroundHBASE2198(get, f,qualifiers);
-			}
-		}
-	}
+    // TODO: Work around https://issues.apache.org/jira/browse/HBASE-2198. More graceful implementation should
+    // use SingleColumnValueExcludeFilter,
+    // but it's complicated in current implementation.
+    protected static void workaroundHBASE2198(Get get, Filter filter, byte[][] qualifiers) {
+        if (filter instanceof SingleColumnValueFilter) {
+            if (qualifiers == null) {
+                get.addFamily(((SingleColumnValueFilter)filter).getFamily());
+            } else {
+                get.addColumn(((SingleColumnValueFilter)filter).getFamily(),
+                              ((SingleColumnValueFilter)filter).getQualifier());
+            }
+            return;
+        }
+        if (filter instanceof FilterList) {
+            for (Filter f : ((FilterList)filter).getFilters()) {
+                workaroundHBASE2198(get, f, qualifiers);
+            }
+        }
+    }
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java
index 9e059f2..579755f 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/IndexStreamReader.java
@@ -26,69 +26,69 @@
 import java.io.IOException;
 import java.util.List;
 
-public abstract class IndexStreamReader  extends StreamReader {
-	protected final IndexDefinition indexDef;
-	protected final SearchCondition condition;
-	protected final List<byte[]> indexRowkeys;
-	protected LogReader<InternalLog> reader;
-	protected long lastTimestamp = 0;
-	protected long firstTimestamp = 0;
-	
-	protected static final Logger LOG = LoggerFactory.getLogger(IndexStreamReader.class);
+public abstract class IndexStreamReader extends StreamReader {
+    protected final IndexDefinition indexDef;
+    protected final SearchCondition condition;
+    protected final List<byte[]> indexRowkeys;
+    protected LogReader<InternalLog> reader;
+    protected long lastTimestamp = 0;
+    protected long firstTimestamp = 0;
 
-	public IndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List<byte[]> indexRowkeys) {
-		this.indexDef = indexDef;
-		this.condition = condition;
-		this.indexRowkeys = indexRowkeys;
-		this.reader = null;
-	}
+    protected static final Logger LOG = LoggerFactory.getLogger(IndexStreamReader.class);
 
-	@Override
-	public long getLastTimestamp() {
-		return lastTimestamp;
-	}
+    public IndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List<byte[]> indexRowkeys) {
+        this.indexDef = indexDef;
+        this.condition = condition;
+        this.indexRowkeys = indexRowkeys;
+        this.reader = null;
+    }
 
-	@Override
-	public long getFirstTimestamp() {
-		return this.firstTimestamp;
-	}
+    @Override
+    public long getLastTimestamp() {
+        return lastTimestamp;
+    }
 
-	@Override
-	public void readAsStream() throws Exception {
-		if (reader == null) {
-			reader = createIndexReader();
-		}
-		final EntityDefinition entityDef = indexDef.getEntityDefinition();
-		try{
-			reader.open();
-			InternalLog log;
-			int count = 0;
-			while ((log = reader.read()) != null) {
-				TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(log, entityDef);
-				entity.setSerializeAlias(condition.getOutputAlias());
-				entity.setSerializeVerbose(condition.isOutputVerbose());
+    @Override
+    public long getFirstTimestamp() {
+        return this.firstTimestamp;
+    }
 
-				if (lastTimestamp == 0 || lastTimestamp < entity.getTimestamp()) {
-					lastTimestamp = entity.getTimestamp();
-				}
-				if(firstTimestamp == 0 || firstTimestamp > entity.getTimestamp()){
-					firstTimestamp = entity.getTimestamp();
-				}
-				for(EntityCreationListener l : _listeners){
-					l.entityCreated(entity);
-				}
-				if(++count == condition.getPageSize()) {
-					break;
-				}
-			}
-		}catch(IOException ioe){
-			LOG.error("Fail reading log", ioe);
-			throw ioe;
-		}finally{
-			reader.close();
-		}		
-	}
+    @Override
+    public void readAsStream() throws Exception {
+        if (reader == null) {
+            reader = createIndexReader();
+        }
+        final EntityDefinition entityDef = indexDef.getEntityDefinition();
+        try {
+            reader.open();
+            InternalLog log;
+            int count = 0;
+            while ((log = reader.read()) != null) {
+                TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(log, entityDef);
+                entity.setSerializeAlias(condition.getOutputAlias());
+                entity.setSerializeVerbose(condition.isOutputVerbose());
 
-	protected abstract LogReader createIndexReader();
-	
+                if (lastTimestamp == 0 || lastTimestamp < entity.getTimestamp()) {
+                    lastTimestamp = entity.getTimestamp();
+                }
+                if (firstTimestamp == 0 || firstTimestamp > entity.getTimestamp()) {
+                    firstTimestamp = entity.getTimestamp();
+                }
+                for (EntityCreationListener l : listeners) {
+                    l.entityCreated(entity);
+                }
+                if (++count == condition.getPageSize()) {
+                    break;
+                }
+            }
+        } catch (IOException ioe) {
+            LOG.error("Fail reading log", ioe);
+            throw ioe;
+        } finally {
+            reader.close();
+        }
+    }
+
+    protected abstract LogReader createIndexReader();
+
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java
index e6a5c96..4daf695 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexLogReader.java
@@ -28,170 +28,172 @@
 import java.util.ArrayList;
 import java.util.List;
 
-
 public class NonClusteredIndexLogReader extends IndexLogReader {
-	private final IndexDefinition indexDef;
-	private final List<byte[]> indexRowkeys;
-	private final byte[][] qualifiers;
-	private final Filter filter;
-	private HTableInterface tbl;
-	private boolean isOpen = false;
-	private Result[] results;
-	private int index = -1;
-	private final List<Scan> scans;
-	private int currentScanIndex = 0;
-	private ResultScanner currentResultScanner;
+    private final IndexDefinition indexDef;
+    private final List<byte[]> indexRowkeys;
+    private final byte[][] qualifiers;
+    private final Filter filter;
+    private HTableInterface tbl;
+    private boolean isOpen = false;
+    private Result[] results;
+    private int index = -1;
+    private final List<Scan> scans;
+    private int currentScanIndex = 0;
+    private ResultScanner currentResultScanner;
 
-	// Max tag key/value. 
-	private static final byte[] MAX_TAG_VALUE_BYTES = {(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF,(byte) 0XFF};
-	private static final int BATCH_MULTIGET_SIZE = 1000;
+    // Max tag key/value.
+    private static final byte[] MAX_TAG_VALUE_BYTES = {
+        (byte)0XFF, (byte)0XFF, (byte)0XFF, (byte)0XFF,
+        (byte)0XFF, (byte)0XFF, (byte)0XFF, (byte)0XFF,
+        (byte)0XFF
+    };
+    private static final int BATCH_MULTIGET_SIZE = 1000;
 
-	public NonClusteredIndexLogReader(IndexDefinition indexDef, List<byte[]> indexRowkeys, byte[][] qualifiers, Filter filter) {
-		this.indexDef = indexDef;
-		this.indexRowkeys = indexRowkeys;
-		this.qualifiers = qualifiers;
-		this.filter = filter;
-		this.scans = buildScans();
-	}
-	
+    public NonClusteredIndexLogReader(IndexDefinition indexDef, List<byte[]> indexRowkeys,
+                                      byte[][] qualifiers, Filter filter) {
+        this.indexDef = indexDef;
+        this.indexRowkeys = indexRowkeys;
+        this.qualifiers = qualifiers;
+        this.filter = filter;
+        this.scans = buildScans();
+    }
 
-	private List<Scan> buildScans() {
-		final ArrayList<Scan> result = new ArrayList<Scan>(indexRowkeys.size());
-		for (byte[] rowkey : indexRowkeys) {
-			Scan s = new Scan();
-			s.setStartRow(rowkey);
-			// In rowkey the tag key/value is sorted by the hash code of the key, so MAX_TAG_VALUE_BYTES is enough as the end key
-			final byte[] stopRowkey = ByteUtil.concat(rowkey, MAX_TAG_VALUE_BYTES);
-			s.setStopRow(stopRowkey);
-			// TODO the # of cached rows should be minimum of (pagesize and 100)
-			int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize();
-			s.setCaching(cs);
-			// TODO not optimized for all applications
-			s.setCacheBlocks(true);
-			// scan specified columnfamily for all qualifiers
-			s.addFamily(indexDef.getEntityDefinition().getColumnFamily().getBytes());
-			result.add(s);
-		}
-		return result;
-	}
+    private List<Scan> buildScans() {
+        final ArrayList<Scan> result = new ArrayList<Scan>(indexRowkeys.size());
+        for (byte[] rowkey : indexRowkeys) {
+            Scan s = new Scan();
+            s.setStartRow(rowkey);
+            // In rowkey the tag key/value is sorted by the hash code of the key, so MAX_TAG_VALUE_BYTES is
+            // enough as the end key
+            final byte[] stopRowkey = ByteUtil.concat(rowkey, MAX_TAG_VALUE_BYTES);
+            s.setStopRow(stopRowkey);
+            // TODO the # of cached rows should be minimum of (pagesize and 100)
+            int cs = EagleConfigFactory.load().getHBaseClientScanCacheSize();
+            s.setCaching(cs);
+            // TODO not optimized for all applications
+            s.setCacheBlocks(true);
+            // scan specified columnfamily for all qualifiers
+            s.addFamily(indexDef.getEntityDefinition().getColumnFamily().getBytes());
+            result.add(s);
+        }
+        return result;
+    }
 
-	@Override
-	public void open() throws IOException {
-		if (isOpen)
-			return; // silently return
-		try {
-			tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable());
-		} catch (RuntimeException ex) {
-			throw new IOException(ex);
-		}
-		currentScanIndex = 0;
-		openNewScan();
-		fillResults();
-	}
+    @Override
+    public void open() throws IOException {
+        if (isOpen) {
+            return; // silently return
+        }
+        try {
+            tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable());
+        } catch (RuntimeException ex) {
+            throw new IOException(ex);
+        }
+        currentScanIndex = 0;
+        openNewScan();
+        fillResults();
+    }
 
-	private boolean openNewScan() throws IOException {
-		closeCurrentScanResult();
-		if (currentScanIndex >= scans.size()) {
-			return false;
-		}
-		final Scan scan = scans.get(currentScanIndex++);
-		currentResultScanner = tbl.getScanner(scan);
-		return true;
-	}
+    private boolean openNewScan() throws IOException {
+        closeCurrentScanResult();
+        if (currentScanIndex >= scans.size()) {
+            return false;
+        }
+        final Scan scan = scans.get(currentScanIndex++);
+        currentResultScanner = tbl.getScanner(scan);
+        return true;
+    }
 
-	private void fillResults() throws IOException {
-		if (currentResultScanner == null) {
-			return;
-		}
-		index = 0;
-		int count = 0;
-		Result r = null;
+    private void fillResults() throws IOException {
+        if (currentResultScanner == null) {
+            return;
+        }
+        index = 0;
+        int count = 0;
+        Result r = null;
         final List<Get> gets = new ArrayList<Get>(BATCH_MULTIGET_SIZE);
-		final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes();
-		while (count < BATCH_MULTIGET_SIZE) {
-			r = currentResultScanner.next();
-			if (r == null) {
-				if (openNewScan()) {
-					continue;
-				} else {
-					break;
-				}
-			}
-			for (byte[] rowkey : r.getFamilyMap(family).keySet()) {
-				if (rowkey.length == 0) {	// invalid rowkey
-					continue;
-				}
-				final Get get = new Get(rowkey);
-                if (filter != null) {
-                	get.setFilter(filter);
+        final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes();
+        while (count < BATCH_MULTIGET_SIZE) {
+            r = currentResultScanner.next();
+            if (r == null) {
+                if (openNewScan()) {
+                    continue;
+                } else {
+                    break;
                 }
-				if(qualifiers != null) {
-					for (int j = 0; j < qualifiers.length; ++j) {
-						// Return the specified qualifiers
-						get.addColumn(family, qualifiers[j]);
-					}
-				}else {
-					get.addFamily(family);
-				}
-        		workaroundHBASE2198(get, filter,qualifiers);
-				gets.add(get);
-				++count;
-			}
-		}
-		if (count == 0) {
-			results = null;
-			return;
-		}
-		results = tbl.get(gets);
-		if (results == null || results.length == 0) {
-			fillResults();
-		}
-	}
+            }
+            for (byte[] rowkey : r.getFamilyMap(family).keySet()) {
+                if (rowkey.length == 0) { // invalid rowkey
+                    continue;
+                }
+                final Get get = new Get(rowkey);
+                if (filter != null) {
+                    get.setFilter(filter);
+                }
+                if (qualifiers != null) {
+                    for (int j = 0; j < qualifiers.length; ++j) {
+                        // Return the specified qualifiers
+                        get.addColumn(family, qualifiers[j]);
+                    }
+                } else {
+                    get.addFamily(family);
+                }
+                workaroundHBASE2198(get, filter, qualifiers);
+                gets.add(get);
+                ++count;
+            }
+        }
+        if (count == 0) {
+            results = null;
+            return;
+        }
+        results = tbl.get(gets);
+        if (results == null || results.length == 0) {
+            fillResults();
+        }
+    }
 
+    private void closeCurrentScanResult() {
+        if (currentResultScanner != null) {
+            currentResultScanner.close();
+            currentResultScanner = null;
+        }
+    }
 
-	private void closeCurrentScanResult() {
-		if (currentResultScanner != null) {
-			currentResultScanner.close();
-			currentResultScanner = null;
-		}
-	}
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+        closeCurrentScanResult();
+    }
 
+    @Override
+    public InternalLog read() throws IOException {
+        if (tbl == null) {
+            throw new IllegalArgumentException("Haven't open before reading");
+        }
 
-	@Override
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-		closeCurrentScanResult();
-	}
+        Result r = null;
+        InternalLog t = null;
+        while ((r = getNextResult()) != null) {
+            if (r.getRow() == null) {
+                continue;
+            }
+            t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers);
+            break;
+        }
+        return t;
+    }
 
-	@Override
-	public InternalLog read() throws IOException {
-		if (tbl == null) {
-			throw new IllegalArgumentException("Haven't open before reading");
-		}
-		
-		Result r = null;
-		InternalLog t = null;
-		while ((r = getNextResult()) != null) {
-			if (r.getRow() == null) {
-				continue;
-			}
-			t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers);
-			break;
-		}
-		return t;
-	}
+    private Result getNextResult() throws IOException {
+        if (results == null || results.length == 0 || index >= results.length) {
+            fillResults();
+        }
+        if (results == null || results.length == 0 || index >= results.length) {
+            return null;
+        }
+        return results[index++];
+    }
 
-
-	private Result getNextResult() throws IOException {
-		if (results == null || results.length == 0 || index >= results.length) {
-			fillResults();
-		}
-		if (results == null || results.length == 0 || index >= results.length) {
-			return null;
-		}
-		return results[index++];
-	}
-	
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java
index ec5631a..8df2773 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/NonClusteredIndexStreamReader.java
@@ -27,25 +27,29 @@
 import java.util.List;
 
 public class NonClusteredIndexStreamReader extends IndexStreamReader {
-	public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) {
-		super(indexDef, condition, new ArrayList<byte[]>());
-		final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys);
-		if (!IndexType.NON_CLUSTER_INDEX.equals(type)) {
-			throw new IllegalArgumentException("This query can't go through index: " + condition.getQueryExpression());
-		}
-	}
+    public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) {
+        super(indexDef, condition, new ArrayList<byte[]>());
+        final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys);
+        if (!IndexType.NON_CLUSTER_INDEX.equals(type)) {
+            throw new IllegalArgumentException("This query can't go through index: "
+                                               + condition.getQueryExpression());
+        }
+    }
 
-	public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List<byte[]> indexRowkeys) {
-		super(indexDef, condition, indexRowkeys);
-	}
+    public NonClusteredIndexStreamReader(IndexDefinition indexDef, SearchCondition condition,
+                                         List<byte[]> indexRowkeys) {
+        super(indexDef, condition, indexRowkeys);
+    }
 
-	@Override
-	protected LogReader createIndexReader() {
-		final EntityDefinition entityDef = indexDef.getEntityDefinition();
-		byte[][] outputQualifiers = null;
-		if(!condition.isOutputAll()) {
-			outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields());
-		}
-		return new NonClusteredIndexLogReader(indexDef, indexRowkeys, outputQualifiers, condition.getFilter());
-	}
+    @Override
+    protected LogReader createIndexReader() {
+        final EntityDefinition entityDef = indexDef.getEntityDefinition();
+        byte[][] outputQualifiers = null;
+        if (!condition.isOutputAll()) {
+            outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef,
+                                                                          condition.getOutputFields());
+        }
+        return new NonClusteredIndexLogReader(indexDef, indexRowkeys, outputQualifiers,
+                                              condition.getFilter());
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java
index 1c16dc8..1f29121 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/RowKeyLogReader.java
@@ -31,12 +31,12 @@
 import org.apache.eagle.log.entity.meta.EntityDefinition;
 
 public class RowKeyLogReader extends IndexLogReader {
-	private final EntityDefinition ed;
-	private final List<byte[]> rowkeys;
+    private final EntityDefinition ed;
+    private final List<byte[]> rowkeys;
     private final byte[][] qualifiers;
     private HTableInterface tbl;
-	private boolean isOpen = false;
-	private Result[] entityResult;
+    private boolean isOpen = false;
+    private Result[] entityResult;
     private int getIndex = -1;
 
     public RowKeyLogReader(EntityDefinition ed, byte[] rowkey) {
@@ -46,38 +46,39 @@
         this.qualifiers = null;
     }
 
-	public RowKeyLogReader(EntityDefinition ed, byte[] rowkey,byte[][] qualifiers) {
-		this.ed = ed;
-		this.rowkeys = new ArrayList<>();
+    public RowKeyLogReader(EntityDefinition ed, byte[] rowkey, byte[][] qualifiers) {
+        this.ed = ed;
+        this.rowkeys = new ArrayList<>();
         this.rowkeys.add(rowkey);
         this.qualifiers = qualifiers;
-	}
+    }
 
-	public RowKeyLogReader(EntityDefinition ed, List<byte[]> rowkeys,byte[][] qualifiers) {
-		this.ed = ed;
-		this.rowkeys = rowkeys;
+    public RowKeyLogReader(EntityDefinition ed, List<byte[]> rowkeys, byte[][] qualifiers) {
+        this.ed = ed;
+        this.rowkeys = rowkeys;
         this.qualifiers = qualifiers;
-	}
+    }
 
-	@Override
-	public void open() throws IOException {
-		if (isOpen)
-			return; // silently return
-		try {
-			tbl = EagleConfigFactory.load().getHTable(ed.getTable());
-		} catch (RuntimeException ex) {
-			throw new IOException(ex);
-		}
-		final byte[] family = ed.getColumnFamily().getBytes();
+    @Override
+    public void open() throws IOException {
+        if (isOpen) {
+            return; // silently return
+        }
+        try {
+            tbl = EagleConfigFactory.load().getHTable(ed.getTable());
+        } catch (RuntimeException ex) {
+            throw new IOException(ex);
+        }
+        final byte[] family = ed.getColumnFamily().getBytes();
         List<Get> gets = new ArrayList<>(this.rowkeys.size());
 
-        for(byte[] rowkey:rowkeys) {
+        for (byte[] rowkey : rowkeys) {
             Get get = new Get(rowkey);
             get.addFamily(family);
 
-            if(qualifiers != null) {
-                for(byte[] qualifier: qualifiers){
-                    get.addColumn(family,qualifier);
+            if (qualifiers != null) {
+                for (byte[] qualifier : qualifiers) {
+                    get.addColumn(family, qualifier);
                 }
             }
 
@@ -85,23 +86,23 @@
         }
 
         entityResult = tbl.get(gets);
-		isOpen = true;
-	}
+        isOpen = true;
+    }
 
-	@Override
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-	}
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+    }
 
-	@Override
-	public InternalLog read() throws IOException {
-        if(entityResult == null || entityResult.length == 0 || this.getIndex >= entityResult.length - 1){
+    @Override
+    public InternalLog read() throws IOException {
+        if (entityResult == null || entityResult.length == 0 || this.getIndex >= entityResult.length - 1) {
             return null;
         }
-        getIndex ++;
-		InternalLog t = HBaseInternalLogHelper.parse(ed, entityResult[getIndex], this.qualifiers);
-		return t;
-	}
-}
\ No newline at end of file
+        getIndex++;
+        InternalLog t = HBaseInternalLogHelper.parse(ed, entityResult[getIndex], this.qualifiers);
+        return t;
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java
index 8ff3448..855beaf 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexLogReader.java
@@ -33,32 +33,34 @@
 
 public class UniqueIndexLogReader extends IndexLogReader {
 
-	private final IndexDefinition indexDef;
-	private final List<byte[]> indexRowkeys; 
-	private final byte[][] qualifiers;
-	private final Filter filter;
-	private HTableInterface tbl;
-	private boolean isOpen = false;
-	private Result[] entityResults;
-	private int index = -1;
+    private final IndexDefinition indexDef;
+    private final List<byte[]> indexRowkeys;
+    private final byte[][] qualifiers;
+    private final Filter filter;
+    private HTableInterface tbl;
+    private boolean isOpen = false;
+    private Result[] entityResults;
+    private int index = -1;
 
-	public UniqueIndexLogReader(IndexDefinition indexDef, List<byte[]> indexRowkeys, byte[][] qualifiers, Filter filter) {
-		this.indexDef = indexDef;
-		this.indexRowkeys = indexRowkeys;
-		this.qualifiers = qualifiers;
-		this.filter = filter;
-	}
+    public UniqueIndexLogReader(IndexDefinition indexDef, List<byte[]> indexRowkeys, byte[][] qualifiers,
+                                Filter filter) {
+        this.indexDef = indexDef;
+        this.indexRowkeys = indexRowkeys;
+        this.qualifiers = qualifiers;
+        this.filter = filter;
+    }
 
-	@Override
-	public void open() throws IOException {
-		if (isOpen)
-			return; // silently return
-		try {
-			tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable());
-		} catch (RuntimeException ex) {
-			throw new IOException(ex);
-		}
-		final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes();
+    @Override
+    public void open() throws IOException {
+        if (isOpen) {
+            return; // silently return
+        }
+        try {
+            tbl = EagleConfigFactory.load().getHTable(indexDef.getEntityDefinition().getTable());
+        } catch (RuntimeException ex) {
+            throw new IOException(ex);
+        }
+        final byte[] family = indexDef.getEntityDefinition().getColumnFamily().getBytes();
         final List<Get> indexGets = new ArrayList<>();
         for (byte[] rowkey : indexRowkeys) {
             Get get = new Get(rowkey);
@@ -69,56 +71,56 @@
         final Result[] indexResults = tbl.get(indexGets);
         indexGets.clear();
         for (Result indexResult : indexResults) {
-        	final NavigableMap<byte[], byte[]> map = indexResult.getFamilyMap(family);
-        	if (map == null) {
-        		continue;
-        	}
-        	for (byte[] entityRowkey : map.keySet()) {
+            final NavigableMap<byte[], byte[]> map = indexResult.getFamilyMap(family);
+            if (map == null) {
+                continue;
+            }
+            for (byte[] entityRowkey : map.keySet()) {
                 Get get = new Get(entityRowkey);
                 if (filter != null) {
-                	get.setFilter(filter);
+                    get.setFilter(filter);
                 }
-				if(qualifiers == null) {
-					// filter all qualifiers if output qualifiers are null
-					get.addFamily(family);
-				}else {
-					for (int i = 0; i < qualifiers.length; ++i) {
-						// Return the specified qualifiers
-						get.addColumn(family, qualifiers[i]);
-					}
-				}
-				workaroundHBASE2198(get, filter,qualifiers);
-        		indexGets.add(get);
-        	}
+                if (qualifiers == null) {
+                    // filter all qualifiers if output qualifiers are null
+                    get.addFamily(family);
+                } else {
+                    for (int i = 0; i < qualifiers.length; ++i) {
+                        // Return the specified qualifiers
+                        get.addColumn(family, qualifiers[i]);
+                    }
+                }
+                workaroundHBASE2198(get, filter, qualifiers);
+                indexGets.add(get);
+            }
         }
         entityResults = tbl.get(indexGets);
-		isOpen = true;
-	}
+        isOpen = true;
+    }
 
-	@Override
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-	}
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+    }
 
-	@Override
-	public InternalLog read() throws IOException {
-		if (entityResults == null) {
-			throw new IllegalArgumentException("entityResults haven't been initialized before reading");
-		}
-		InternalLog t = null;
-		while (entityResults.length > ++index) {
-			Result r = entityResults[index];
-			if (r != null) {
-				if (r.getRow() == null) {
-					continue;
-				}
-				t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers);
-				break;
-			}
-		}
-		return t;
-	}
+    @Override
+    public InternalLog read() throws IOException {
+        if (entityResults == null) {
+            throw new IllegalArgumentException("entityResults haven't been initialized before reading");
+        }
+        InternalLog t = null;
+        while (entityResults.length > ++index) {
+            Result r = entityResults[index];
+            if (r != null) {
+                if (r.getRow() == null) {
+                    continue;
+                }
+                t = HBaseInternalLogHelper.parse(indexDef.getEntityDefinition(), r, qualifiers);
+                break;
+            }
+        }
+        return t;
+    }
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java
index 0391d57..cf94c11 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/index/UniqueIndexStreamReader.java
@@ -27,26 +27,29 @@
 import java.util.List;
 
 public class UniqueIndexStreamReader extends IndexStreamReader {
-	public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) {
-		super(indexDef, condition, new ArrayList<byte[]>());
-		final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys);
-		if (!IndexType.UNIQUE_INDEX.equals(type)) {
-			throw new IllegalArgumentException("This query can't go through index: " + condition.getQueryExpression());
-		}
-	}
+    public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition) {
+        super(indexDef, condition, new ArrayList<byte[]>());
+        final IndexType type = indexDef.canGoThroughIndex(condition.getQueryExpression(), indexRowkeys);
+        if (!IndexType.UNIQUE_INDEX.equals(type)) {
+            throw new IllegalArgumentException("This query can't go through index: "
+                                               + condition.getQueryExpression());
+        }
+    }
 
-	public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition, List<byte[]> indexRowkeys) {
-		super(indexDef, condition, indexRowkeys);
-	}
+    public UniqueIndexStreamReader(IndexDefinition indexDef, SearchCondition condition,
+                                   List<byte[]> indexRowkeys) {
+        super(indexDef, condition, indexRowkeys);
+    }
 
-	@Override
-	protected LogReader createIndexReader() {
-		final EntityDefinition entityDef = indexDef.getEntityDefinition();
-//		final
-		byte[][] outputQualifiers = null;
-		if(!condition.isOutputAll()) {
-			outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef, condition.getOutputFields());
-		}
-		return new UniqueIndexLogReader(indexDef, indexRowkeys, outputQualifiers, condition.getFilter());
-	}
+    @Override
+    protected LogReader createIndexReader() {
+        final EntityDefinition entityDef = indexDef.getEntityDefinition();
+        // final
+        byte[][] outputQualifiers = null;
+        if (!condition.isOutputAll()) {
+            outputQualifiers = HBaseInternalLogHelper.getOutputQualifiers(entityDef,
+                                                                          condition.getOutputFields());
+        }
+        return new UniqueIndexLogReader(indexDef, indexRowkeys, outputQualifiers, condition.getFilter());
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java
index cf40e31..2e0882b 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/BooleanSerDeser.java
@@ -21,34 +21,35 @@
  */
 public class BooleanSerDeser implements EntitySerDeser<Boolean> {
 
-	public BooleanSerDeser(){}
+    public BooleanSerDeser() {
+    }
 
-	@Override
-	public Boolean deserialize(byte[] bytes){
-		if(bytes != null && bytes.length > 0){
-			if(bytes[0] == 0){
-				return false;
-			}else if(bytes[0] == 1){
-				return true;
-			}
-		}
-		return null;
-	}
+    @Override
+    public Boolean deserialize(byte[] bytes) {
+        if (bytes != null && bytes.length > 0) {
+            if (bytes[0] == 0) {
+                return false;
+            } else if (bytes[0] == 1) {
+                return true;
+            }
+        }
+        return null;
+    }
 
-	@Override
-	public byte[] serialize(Boolean obj){
-		if(obj != null){
-			if(obj){
-				return new byte[]{1};
-			}else{
-				return new byte[]{0};
-			}
-		}
-		return null;
-	}
+    @Override
+    public byte[] serialize(Boolean obj) {
+        if (obj != null) {
+            if (obj) {
+                return new byte[] { 1 };
+            } else {
+                return new byte[] { 0 };
+            }
+        }
+        return null;
+    }
 
-	@Override
-	public Class<Boolean> type() {
-		return Boolean.class;
-	}
+    @Override
+    public Class<Boolean> type() {
+        return Boolean.class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java
index b64e528..73978e9 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Column.java
@@ -21,8 +21,10 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.FIELD})
+@Target({
+         ElementType.FIELD
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Column {
-	String value() default "";
+    String value() default "";
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java
index 6e3e9c6..cb03e65 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ColumnFamily.java
@@ -21,8 +21,10 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface ColumnFamily {
-	String value() default "f";
+    String value() default "f";
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java
index 24385a9..36fc63f 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DefaultJavaObjctSerDeser.java
@@ -22,11 +22,11 @@
 import org.apache.eagle.common.SerializableUtils;
 
 public class DefaultJavaObjctSerDeser implements EntitySerDeser<Object> {
-    public final static  EntitySerDeser<Object> INSTANCE = new DefaultJavaObjctSerDeser();
+    public static final EntitySerDeser<Object> INSTANCE = new DefaultJavaObjctSerDeser();
 
     @Override
     public Object deserialize(byte[] bytes) {
-        return SerializableUtils.deserializeFromByteArray(bytes,"Deserialize from java object bytes");
+        return SerializableUtils.deserializeFromByteArray(bytes, "Deserialize from java object bytes");
     }
 
     @Override
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java
index 27b011c..b3af8a6 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Double2DArraySerDeser.java
@@ -25,22 +25,23 @@
  * @since 7/22/15
  */
 public class Double2DArraySerDeser implements EntitySerDeser<double[][]> {
-    private final int SIZE = 8;
+    private static final int SIZE = 8;
+
     @Override
-    public double[][] deserialize(byte[] bytes){
-//        if((bytes.length-4) % SIZE != 0)
-//            return null;
+    public double[][] deserialize(byte[] bytes) {
+        // if((bytes.length-4) % SIZE != 0)
+        // return null;
         int offset = 0;
         // get size of int array
         int rowSize = ByteUtil.bytesToInt(bytes, offset);
         offset += 4;
 
         double[][] data = new double[rowSize][];
-        for(int i=0; i<rowSize; i++) {
+        for (int i = 0; i < rowSize; i++) {
             int colSize = ByteUtil.bytesToInt(bytes, offset);
             offset += 4;
             double[] values = null;
-            if (colSize >= 0){
+            if (colSize >= 0) {
                 values = new double[colSize];
                 for (int j = 0; j < colSize; j++) {
                     values[j] = ByteUtil.bytesToDouble(bytes, offset);
@@ -54,27 +55,28 @@
     }
 
     /**
-     *
      * @param obj
      * @return
      */
     @Override
-    public byte[] serialize(double[][] obj){
-        if(obj == null) return null;
+    public byte[] serialize(double[][] obj) {
+        if (obj == null) {
+            return null;
+        }
         ByteArrayOutputStream data = new ByteArrayOutputStream();
         int size = obj.length;
         byte[] sizeBytes = ByteUtil.intToBytes(size);
-        data.write(sizeBytes,0,sizeBytes.length);
+        data.write(sizeBytes, 0, sizeBytes.length);
 
-        try{
-            for(double[] o:obj){
-                if(o!=null){
+        try {
+            for (double[] o : obj) {
+                if (o != null) {
                     data.write(ByteUtil.intToBytes(o.length));
-                    for(double d:o){
-                        data.write(ByteUtil.doubleToBytes(d),0,SIZE);
+                    for (double d : o) {
+                        data.write(ByteUtil.doubleToBytes(d), 0, SIZE);
                     }
-                }else{
-                    data.write(ByteUtil.intToBytes(-1),0,4);
+                } else {
+                    data.write(ByteUtil.intToBytes(-1), 0, 4);
                 }
             }
         } catch (IOException e) {
@@ -94,4 +96,4 @@
     public Class<double[][]> type() {
         return double[][].class;
     }
-}
\ No newline at end of file
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java
index d87e31c..a8eb965 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleArraySerDeser.java
@@ -18,51 +18,54 @@
 
 import org.apache.eagle.common.ByteUtil;
 
-public class DoubleArraySerDeser implements EntitySerDeser<double[]>{
+public class DoubleArraySerDeser implements EntitySerDeser<double[]> {
 
-	public DoubleArraySerDeser(){}
+    private static final int SIZE = 8;
 
-	private final int SIZE = 8;
-		@Override
-		public double[] deserialize(byte[] bytes){
-			if((bytes.length-4) % SIZE != 0)
-				return null;
-			int offset = 0;
-			// get size of int array
-			int size = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-			double[] values = new double[size];
-			for(int i=0; i<size; i++){
-				values[i] = ByteUtil.bytesToDouble(bytes, offset);
-				offset += SIZE;
-			}
-			return values;
-		}
-		
-		/**
-		 * 
-		 * @param obj
-		 * @return
-		 */
-		@Override
-		public byte[] serialize(double[] obj){
-			if(obj == null)
-				return null;
-			int size = obj.length;
-			byte[] array = new byte[4 + SIZE*size];
-			byte[] first = ByteUtil.intToBytes(size);
-			int offset = 0;
-			System.arraycopy(first, 0, array, offset, first.length);
-			offset += first.length;
-			for(int i=0; i<size; i++){
-				System.arraycopy(ByteUtil.doubleToBytes(obj[i]), 0, array, offset, SIZE);
-				offset += SIZE;
-			}
-			return array;
-		}
+    public DoubleArraySerDeser() {
+    }
 
-	@Override
-	public Class<double[]> type() {
-		return double[].class;
-	}
+    @Override
+    public double[] deserialize(byte[] bytes) {
+        if ((bytes.length - 4) % SIZE != 0) {
+            return null;
+        }
+        int offset = 0;
+        // get size of int array
+        int size = ByteUtil.bytesToInt(bytes, offset);
+        offset += 4;
+        double[] values = new double[size];
+        for (int i = 0; i < size; i++) {
+            values[i] = ByteUtil.bytesToDouble(bytes, offset);
+            offset += SIZE;
+        }
+        return values;
+    }
+
+    /**
+     * @param obj
+     * @return
+     */
+    @Override
+    public byte[] serialize(double[] obj) {
+        if (obj == null) {
+            return null;
+        }
+        int size = obj.length;
+        byte[] array = new byte[4 + SIZE * size];
+        byte[] first = ByteUtil.intToBytes(size);
+        int offset = 0;
+        System.arraycopy(first, 0, array, offset, first.length);
+        offset += first.length;
+        for (int i = 0; i < size; i++) {
+            System.arraycopy(ByteUtil.doubleToBytes(obj[i]), 0, array, offset, SIZE);
+            offset += SIZE;
+        }
+        return array;
+    }
+
+    @Override
+    public Class<double[]> type() {
+        return double[].class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java
index 330a99d..bf58d39 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/DoubleSerDeser.java
@@ -18,24 +18,26 @@
 
 import org.apache.eagle.common.ByteUtil;
 
-public class DoubleSerDeser implements EntitySerDeser<Double>{
+public class DoubleSerDeser implements EntitySerDeser<Double> {
 
-	@Override
-	public Double deserialize(byte[] bytes){
-		if(bytes.length < 8)
-			return null;
-		return ByteUtil.bytesToDouble(bytes);
-	}
-	
-	@Override
-	public byte[] serialize(Double obj){
-		if(obj == null)
-			return null;
-		return ByteUtil.doubleToBytes(obj);
-	}
+    @Override
+    public Double deserialize(byte[] bytes) {
+        if (bytes.length < 8) {
+            return null;
+        }
+        return ByteUtil.bytesToDouble(bytes);
+    }
 
-	@Override
-	public Class<Double> type(){
-		return Double.class;
-	}
+    @Override
+    public byte[] serialize(Double obj) {
+        if (obj == null) {
+            return null;
+        }
+        return ByteUtil.doubleToBytes(obj);
+    }
+
+    @Override
+    public Class<Double> type() {
+        return Double.class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java
index d2d9eef..c1f4a24 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinition.java
@@ -32,295 +32,333 @@
 import java.util.Map;
 
 /**
- *
- * This object should be regarded as read-only metadata for an entity as it will be shared across all entity object
- * with the same entity name, so don't try to set different values for any of the fields, 
- * otherwise it's not thread safe
+ * This object should be regarded as read-only metadata for an entity as it will be shared across all entity
+ * object with the same entity name, so don't try to set different values for any of the fields, otherwise
+ * it's not thread safe
  */
-public class EntityDefinition implements Writable{
-	private final static Logger LOG = LoggerFactory.getLogger(EntityDefinition.class);
+public class EntityDefinition implements Writable {
+    private static final Logger LOG = LoggerFactory.getLogger(EntityDefinition.class);
 
-	private Class<? extends TaggedLogAPIEntity> entityClass;
-	private String table;
-	private String columnFamily;
-	// TODO prefix be within search/get condition instead of entity definition. Topology entity should have pre-defined prefix. 
-	private String prefix;
-	private String service;
-	private String serviceCreationPath;
-	private String serviceDeletionPath;
-	private String[] partitions;
-	private Map<String, Qualifier> displayNameMap = new HashMap<String, Qualifier>();
-	private Map<String, Qualifier> qualifierNameMap = new HashMap<String, Qualifier>();
-	private Map<String, Method> qualifierGetterMap = new HashMap<String, Method>();
-	private String[] tags;
-	private boolean isTimeSeries;
-	private MetricDefinition metricDefinition;
-	private IndexDefinition[] indexes;
-	
+    private Class<? extends TaggedLogAPIEntity> entityClass;
+    private String table;
+    private String columnFamily;
+    // TODO prefix be within search/get condition instead of entity definition. Topology entity should have
+    // pre-defined prefix.
+    private String prefix;
+    private String service;
+    private String serviceCreationPath;
+    private String serviceDeletionPath;
+    private String[] partitions;
+    private Map<String, Qualifier> displayNameMap = new HashMap<String, Qualifier>();
+    private Map<String, Qualifier> qualifierNameMap = new HashMap<String, Qualifier>();
+    private Map<String, Method> qualifierGetterMap = new HashMap<String, Method>();
+    private String[] tags;
+    private boolean isTimeSeries;
+    private MetricDefinition metricDefinition;
+    private IndexDefinition[] indexes;
 
-	public EntityDefinition(){}
-	
-	public MetricDefinition getMetricDefinition() {
-		return metricDefinition;
-	}
-	public void setMetricDefinition(MetricDefinition metricDefinition) {
-		this.metricDefinition = metricDefinition;
-	}
-	public boolean isTimeSeries() {
-		return isTimeSeries;
-	}
-	public void setTimeSeries(boolean isTimeSeries) {
-		this.isTimeSeries = isTimeSeries;
-	}
-	public String getColumnFamily() {
-		return columnFamily;
-	}
-	public void setColumnFamily(String columnFamily) {
-		this.columnFamily = columnFamily;
-	}
-	public Class<? extends TaggedLogAPIEntity> getEntityClass() {
-		return entityClass;
-	}
-	public void setEntityClass(Class<? extends TaggedLogAPIEntity> entityClass) {
-		this.entityClass = entityClass;
-	}
-	public String getTable() {
-		return table;
-	}
-	public void setTable(String table) {
-		this.table = table;
-	}
-	public Map<String, Qualifier> getDisplayNameMap() {
-		return displayNameMap;
-	}
-	public void setDisplayNameMap(Map<String, Qualifier> displayNameMap) {
-		this.displayNameMap = displayNameMap;
-	}
-	public Map<String, Qualifier> getQualifierNameMap() {
-		return qualifierNameMap;
-	}
-	public void setQualifierNameMap(Map<String, Qualifier> qualifierNameMap) {
-		this.qualifierNameMap = qualifierNameMap;
-	}
-	public String getPrefix() {
-		return prefix;
-	}
-	public void setPrefix(String prefix) {
-		this.prefix = prefix;
-	}
-	public String getService() {
-		return service;
-	}
-	public void setService(String service) {
-		this.service = service;
-	}
-	public String getServiceCreationPath() {
-		return serviceCreationPath;
-	}
-	public void setServiceCreationPath(String serviceCreationPath) {
-		this.serviceCreationPath = serviceCreationPath;
-	}
-	public String getServiceDeletionPath() {
-		return serviceDeletionPath;
-	}
-	public void setServiceDeletionPath(String serviceDeletionPath) {
-		this.serviceDeletionPath = serviceDeletionPath;
-	}
-	public String[] getPartitions() {
-		return partitions;
-	}
-	public void setPartitions(String[] partitions) {
-		this.partitions = partitions;
-	}
-	public IndexDefinition[] getIndexes() {
-		return indexes;
-	}
-	public void setIndexes(IndexDefinition[] indexes) {
-		this.indexes = indexes;
-	}
-	public Map<String, Method> getQualifierGetterMap() {
-		return qualifierGetterMap;
-	}
-	public void setQualifierGetterMap(Map<String, Method> qualifierGetterMap) {
-		this.qualifierGetterMap = qualifierGetterMap;
-	}
-	public String[] getTags(){
-		return tags;
-	}
-	public void setTags(String[] tags){
-		this.tags = tags;
-	}
+    public EntityDefinition() {
+    }
 
-//	public Map<String,String> getQualifierDisplayNameMap(){
-//		Map<String,String> qualifierDisplayNameMap = new HashMap<String, String>();
-//		for(Map.Entry<String,Qualifier> entry: qualifierNameMap.entrySet()){
-//			qualifierDisplayNameMap.put(entry.getKey(),entry.getValue().getDisplayName());
-//		}
-//		return qualifierDisplayNameMap;
-//	}
-	
-	/**
-	 * a filed is a tag when this field is neither in qualifierNameMap nor in displayNameMap
-	 * @param field
-	 * @return
-	 */
-	public boolean isTag(String field){
-		return (qualifierNameMap.get(field) == null && displayNameMap.get(field) == null);
-//		return (qualifierNameMap.get(field) == null);
-	}
+    public MetricDefinition getMetricDefinition() {
+        return metricDefinition;
+    }
 
-	/**
-	 * Check if the specified field is a partition tag field
-	 */
-	public boolean isPartitionTag(String field) {
-		if (partitions == null || (!isTag(field))) {
-			return false;
-		}
-		for (String partition : partitions) {
-			if (partition.equals(field)) {
-				return true;
-			}
-		}
-		return false;
+    public void setMetricDefinition(MetricDefinition metricDefinition) {
+        this.metricDefinition = metricDefinition;
+    }
 
-	}
-	
-	public Object getValue(TaggedLogAPIEntity entity, String field) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
-		if (!entityClass.equals(entity.getClass())) {
-			if ((entityClass.equals(GenericMetricEntity.class) && entity.getClass().equals(GenericMetricShadowEntity.class))) {
-				GenericMetricShadowEntity e = (GenericMetricShadowEntity)entity;
-				return e.getValue();
-			} else {
-				throw new IllegalArgumentException("Invalid entity type: " + entity.getClass().getSimpleName());
-			}
-		}
-		final Method m = qualifierGetterMap.get(field);
-		if (m == null) {
-			// The field is a tag
-			if (entity.getTags() != null) {
-				return entity.getTags().get(field);
-			}
-		}
-		if (m != null) {
-			return m.invoke(entity);
-		}
-		return null;
-	}
+    public boolean isTimeSeries() {
+        return isTimeSeries;
+    }
 
+    public void setTimeSeries(boolean isTimeSeries) {
+        this.isTimeSeries = isTimeSeries;
+    }
 
-	@Override
-	public void write(DataOutput out) throws IOException {
-		out.writeUTF(entityClass.getName());
-		out.writeUTF(table);
-		out.writeUTF(columnFamily);
-		out.writeUTF(prefix);
-		out.writeUTF(service);
+    public String getColumnFamily() {
+        return columnFamily;
+    }
 
-		int partitionsLen = 0;
-		if(partitions != null) partitionsLen =partitions.length;
-		out.writeInt(partitionsLen);
-		for (int i = 0; i < partitionsLen; i++) {
-			out.writeUTF(partitions[i]);
-		}
+    public void setColumnFamily(String columnFamily) {
+        this.columnFamily = columnFamily;
+    }
 
-		int displayNameMapSize = displayNameMap.size();
-		out.writeInt(displayNameMapSize);
-		for(Map.Entry<String,Qualifier> entry: displayNameMap.entrySet()){
-			out.writeUTF(entry.getKey());
-			entry.getValue().write(out);
-		}
+    public Class<? extends TaggedLogAPIEntity> getEntityClass() {
+        return entityClass;
+    }
 
-		int qualifierNameMapSize = qualifierNameMap.size();
-		out.writeInt(qualifierNameMapSize);
-		for(Map.Entry<String,Qualifier> entry: qualifierNameMap.entrySet()){
-			out.writeUTF(entry.getKey());
-			entry.getValue().write(out);
-		}
+    public void setEntityClass(Class<? extends TaggedLogAPIEntity> entityClass) {
+        this.entityClass = entityClass;
+    }
 
-		// TODO: write qualifierGetterMap
-		out.writeBoolean(isTimeSeries);
+    public String getTable() {
+        return table;
+    }
 
-		boolean hasMetricDefinition = metricDefinition != null;
-		out.writeBoolean(hasMetricDefinition);
-		if(hasMetricDefinition) {
-			// write MetricDefinition
-			metricDefinition.write(out);
-		}
+    public void setTable(String table) {
+        this.table = table;
+    }
 
-		// TODO: write indexes
-	}
+    public Map<String, Qualifier> getDisplayNameMap() {
+        return displayNameMap;
+    }
 
+    public void setDisplayNameMap(Map<String, Qualifier> displayNameMap) {
+        this.displayNameMap = displayNameMap;
+    }
 
-	public void setEntityDefinition(EntityDefinition ed){
-		this.entityClass = ed.getEntityClass();
-		this.table = ed.getTable();
-		this.columnFamily = ed.getColumnFamily();
-		this.prefix = ed.getPrefix();
-		this.service = ed.getService();
-		this.partitions = ed.getPartitions();
-		this.displayNameMap = ed.getDisplayNameMap();
-		this.qualifierGetterMap = ed.getQualifierGetterMap();
+    public Map<String, Qualifier> getQualifierNameMap() {
+        return qualifierNameMap;
+    }
+
+    public void setQualifierNameMap(Map<String, Qualifier> qualifierNameMap) {
+        this.qualifierNameMap = qualifierNameMap;
+    }
+
+    public String getPrefix() {
+        return prefix;
+    }
+
+    public void setPrefix(String prefix) {
+        this.prefix = prefix;
+    }
+
+    public String getService() {
+        return service;
+    }
+
+    public void setService(String service) {
+        this.service = service;
+    }
+
+    public String getServiceCreationPath() {
+        return serviceCreationPath;
+    }
+
+    public void setServiceCreationPath(String serviceCreationPath) {
+        this.serviceCreationPath = serviceCreationPath;
+    }
+
+    public String getServiceDeletionPath() {
+        return serviceDeletionPath;
+    }
+
+    public void setServiceDeletionPath(String serviceDeletionPath) {
+        this.serviceDeletionPath = serviceDeletionPath;
+    }
+
+    public String[] getPartitions() {
+        return partitions;
+    }
+
+    public void setPartitions(String[] partitions) {
+        this.partitions = partitions;
+    }
+
+    public IndexDefinition[] getIndexes() {
+        return indexes;
+    }
+
+    public void setIndexes(IndexDefinition[] indexes) {
+        this.indexes = indexes;
+    }
+
+    public Map<String, Method> getQualifierGetterMap() {
+        return qualifierGetterMap;
+    }
+
+    public void setQualifierGetterMap(Map<String, Method> qualifierGetterMap) {
+        this.qualifierGetterMap = qualifierGetterMap;
+    }
+
+    public String[] getTags() {
+        return tags;
+    }
+
+    public void setTags(String[] tags) {
+        this.tags = tags;
+    }
+
+    // public Map<String,String> getQualifierDisplayNameMap(){
+    // Map<String,String> qualifierDisplayNameMap = new HashMap<String, String>();
+    // for(Map.Entry<String,Qualifier> entry: qualifierNameMap.entrySet()){
+    // qualifierDisplayNameMap.put(entry.getKey(),entry.getValue().getDisplayName());
+    // }
+    // return qualifierDisplayNameMap;
+    // }
+
+    /**
+     * a filed is a tag when this field is neither in qualifierNameMap nor in displayNameMap
+     * 
+     * @param field
+     * @return
+     */
+    public boolean isTag(String field) {
+        return (qualifierNameMap.get(field) == null && displayNameMap.get(field) == null);
+        // return (qualifierNameMap.get(field) == null);
+    }
+
+    /**
+     * Check if the specified field is a partition tag field
+     */
+    public boolean isPartitionTag(String field) {
+        if (partitions == null || (!isTag(field))) {
+            return false;
+        }
+        for (String partition : partitions) {
+            if (partition.equals(field)) {
+                return true;
+            }
+        }
+        return false;
+
+    }
+
+    public Object getValue(TaggedLogAPIEntity entity, String field)
+        throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
+        if (!entityClass.equals(entity.getClass())) {
+            if ((entityClass.equals(GenericMetricEntity.class)
+                 && entity.getClass().equals(GenericMetricShadowEntity.class))) {
+                GenericMetricShadowEntity e = (GenericMetricShadowEntity)entity;
+                return e.getValue();
+            } else {
+                throw new IllegalArgumentException("Invalid entity type: "
+                                                   + entity.getClass().getSimpleName());
+            }
+        }
+        final Method m = qualifierGetterMap.get(field);
+        if (m == null) {
+            // The field is a tag
+            if (entity.getTags() != null) {
+                return entity.getTags().get(field);
+            }
+        }
+        if (m != null) {
+            return m.invoke(entity);
+        }
+        return null;
+    }
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+        out.writeUTF(entityClass.getName());
+        out.writeUTF(table);
+        out.writeUTF(columnFamily);
+        out.writeUTF(prefix);
+        out.writeUTF(service);
+
+        int partitionsLen = 0;
+        if (partitions != null) {
+            partitionsLen = partitions.length;
+        }
+        out.writeInt(partitionsLen);
+        for (int i = 0; i < partitionsLen; i++) {
+            out.writeUTF(partitions[i]);
+        }
+
+        int displayNameMapSize = displayNameMap.size();
+        out.writeInt(displayNameMapSize);
+        for (Map.Entry<String, Qualifier> entry : displayNameMap.entrySet()) {
+            out.writeUTF(entry.getKey());
+            entry.getValue().write(out);
+        }
+
+        int qualifierNameMapSize = qualifierNameMap.size();
+        out.writeInt(qualifierNameMapSize);
+        for (Map.Entry<String, Qualifier> entry : qualifierNameMap.entrySet()) {
+            out.writeUTF(entry.getKey());
+            entry.getValue().write(out);
+        }
+
+        // TODO: write qualifierGetterMap
+        out.writeBoolean(isTimeSeries);
+
+        boolean hasMetricDefinition = metricDefinition != null;
+        out.writeBoolean(hasMetricDefinition);
+        if (hasMetricDefinition) {
+            // write MetricDefinition
+            metricDefinition.write(out);
+        }
+
+        // TODO: write indexes
+    }
+
+    public void setEntityDefinition(EntityDefinition ed) {
+        this.entityClass = ed.getEntityClass();
+        this.table = ed.getTable();
+        this.columnFamily = ed.getColumnFamily();
+        this.prefix = ed.getPrefix();
+        this.service = ed.getService();
+        this.partitions = ed.getPartitions();
+        this.displayNameMap = ed.getDisplayNameMap();
+        this.qualifierGetterMap = ed.getQualifierGetterMap();
         this.qualifierNameMap = ed.getQualifierNameMap();
-		this.isTimeSeries = ed.isTimeSeries();
-		this.metricDefinition = ed.metricDefinition;
-		this.indexes = ed.getIndexes();
-	}
+        this.isTimeSeries = ed.isTimeSeries();
+        this.metricDefinition = ed.metricDefinition;
+        this.indexes = ed.getIndexes();
+    }
 
-	//////////////////////////////////////////////
-	// 	TODO: Cache object for reading in region side
-	//////////////////////////////////////////////
-	//	private final static Map<String,EntityDefinition> _classEntityDefinitionCache = new HashMap<String, EntityDefinition>();
+    //////////////////////////////////////////////
+    // TODO: Cache object for reading in region side
+    //////////////////////////////////////////////
+    // private static final Map<String,EntityDefinition> _classEntityDefinitionCache = new HashMap<String,
+    ////////////////////////////////////////////// EntityDefinition>();
 
-	@Override
-	public void readFields(DataInput in) throws IOException {
-		String entityClassName = in.readUTF();
-//		EntityDefinition _cached = _classEntityDefinitionCache.get(entityClassName);
-//		if(_cached !=null){
-//			setEntityDefinition(_cached);
-//			LOG.info("Got cached definition for entity: "+entityClassName);
-//			return;
-//		}
-		if(LOG.isDebugEnabled()) LOG.debug("Reading EntityDefinition entity: "+entityClassName);
-		try {
-			entityClass = (Class<? extends TaggedLogAPIEntity>) Class.forName(entityClassName);
-		} catch (Exception e) {
-			// ignore
-		}
-		table = in.readUTF();
-		columnFamily = in.readUTF();
-		prefix = in.readUTF();
-		service = in.readUTF();
+    @Override
+    public void readFields(DataInput in) throws IOException {
+        String entityClassName = in.readUTF();
+        // EntityDefinition _cached = _classEntityDefinitionCache.get(entityClassName);
+        // if(_cached !=null){
+        // setEntityDefinition(_cached);
+        // LOG.info("Got cached definition for entity: "+entityClassName);
+        // return;
+        // }
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Reading EntityDefinition entity: " + entityClassName);
+        }
+        try {
+            entityClass = (Class<? extends TaggedLogAPIEntity>)Class.forName(entityClassName);
+        } catch (Exception e) {
+            // ignore
+        }
+        table = in.readUTF();
+        columnFamily = in.readUTF();
+        prefix = in.readUTF();
+        service = in.readUTF();
 
-		int partitionsLen = in.readInt();
-		partitions = new String[partitionsLen];
-		for (int i = 0; i < partitionsLen; i++) {
-			partitions[i] = in.readUTF();
-		}
-		int displayNameMapSize = in.readInt();
-		for(int i=0;i<displayNameMapSize;i++){
-			String key = in.readUTF();
-			Qualifier value = new Qualifier();
-			value.readFields(in);
-			displayNameMap.put(key,value);
-		}
-		int qualifierNameMapSize = in.readInt();
-		for(int i=0;i<qualifierNameMapSize;i++){
-			String key = in.readUTF();
-			Qualifier value = new Qualifier();
-			value.readFields(in);
-			qualifierNameMap.put(key,value);
-		}
-		// TODO: readFields qualifierGetterMap
-		isTimeSeries = in.readBoolean();
+        int partitionsLen = in.readInt();
+        partitions = new String[partitionsLen];
+        for (int i = 0; i < partitionsLen; i++) {
+            partitions[i] = in.readUTF();
+        }
+        int displayNameMapSize = in.readInt();
+        for (int i = 0; i < displayNameMapSize; i++) {
+            String key = in.readUTF();
+            Qualifier value = new Qualifier();
+            value.readFields(in);
+            displayNameMap.put(key, value);
+        }
+        int qualifierNameMapSize = in.readInt();
+        for (int i = 0; i < qualifierNameMapSize; i++) {
+            String key = in.readUTF();
+            Qualifier value = new Qualifier();
+            value.readFields(in);
+            qualifierNameMap.put(key, value);
+        }
+        // TODO: readFields qualifierGetterMap
+        isTimeSeries = in.readBoolean();
 
-		// readFields MetricDefinition
-		boolean hasMetricDefinition = in.readBoolean();
-		if(hasMetricDefinition) {
-			if(LOG.isDebugEnabled()) LOG.debug("reading metricDefinition");
-			metricDefinition = new MetricDefinition();
-			metricDefinition.readFields(in);
-		}
-		// TODO: readFields indexes
-//		_classEntityDefinitionCache.put(entityClassName,this);
-	}
-}
\ No newline at end of file
+        // readFields MetricDefinition
+        boolean hasMetricDefinition = in.readBoolean();
+        if (hasMetricDefinition) {
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("reading metricDefinition");
+            }
+            metricDefinition = new MetricDefinition();
+            metricDefinition.readFields(in);
+        }
+        // TODO: readFields indexes
+        // _classEntityDefinitionCache.put(entityClassName,this);
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java
index 8795ba0..a4557f2 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntityDefinitionManager.java
@@ -35,440 +35,483 @@
  * static initialization of all registered entities. As of now, dynamic registration is not supported
  */
 public class EntityDefinitionManager {
-	private static final Logger LOG = LoggerFactory.getLogger(EntityDefinitionManager.class);
-	private static volatile boolean initialized = false;
-	/**
-	 * using concurrent hashmap is due to the fact that entity can be registered any time from any thread
-	 */
-	private static Map<String, EntityDefinition> entityServiceMap = new ConcurrentHashMap<String, EntityDefinition>();
-	private static Map<Class<? extends TaggedLogAPIEntity>, EntityDefinition> classMap = new ConcurrentHashMap<Class<? extends TaggedLogAPIEntity>, EntityDefinition>();
-	private static Map<Class<?>, EntitySerDeser<?>> _serDeserMap = new ConcurrentHashMap<Class<?>, EntitySerDeser<?>>(); 
-	private static Map<Class<?>, Integer> _serDeserClassIDMap = new ConcurrentHashMap<Class<?>, Integer>(); 
-	private static Map<Integer, Class<?>> _serIDDeserClassMap = new ConcurrentHashMap<Integer, Class<?>>(); 
-	private static Map<String, Map<Integer, EntityDefinition>> entityPrefixMap = new ConcurrentHashMap<String, Map<Integer, EntityDefinition>>();
-	private static Map<String, Map<Integer, IndexDefinition>> indexPrefixMap = new ConcurrentHashMap<String, Map<Integer, IndexDefinition>>();
+    private static final Logger LOG = LoggerFactory.getLogger(EntityDefinitionManager.class);
+    private static volatile boolean initialized = false;
+    /**
+     * using concurrent hashmap is due to the fact that entity can be registered any time from any thread
+     */
+    private static Map<String, EntityDefinition> entityServiceMap = new ConcurrentHashMap<String, EntityDefinition>();
+    private static Map<Class<? extends TaggedLogAPIEntity>, EntityDefinition> classMap = new ConcurrentHashMap<Class<? extends TaggedLogAPIEntity>, EntityDefinition>();
+    private static Map<Class<?>, EntitySerDeser<?>> _serDeserMap = new ConcurrentHashMap<Class<?>, EntitySerDeser<?>>();
+    private static Map<Class<?>, Integer> _serDeserClassIDMap = new ConcurrentHashMap<Class<?>, Integer>();
+    private static Map<Integer, Class<?>> _serIDDeserClassMap = new ConcurrentHashMap<Integer, Class<?>>();
+    private static Map<String, Map<Integer, EntityDefinition>> entityPrefixMap = new ConcurrentHashMap<String, Map<Integer, EntityDefinition>>();
+    private static Map<String, Map<Integer, IndexDefinition>> indexPrefixMap = new ConcurrentHashMap<String, Map<Integer, IndexDefinition>>();
 
-	static{
-		int id = 0;
-		_serDeserMap.put(NullObject.class, new NullSerDeser());
-		_serIDDeserClassMap.put(id, NullObject.class);
-		_serDeserClassIDMap.put(NullObject.class, id++);
-		
-		_serDeserMap.put(String.class, new StringSerDeser());
-		_serIDDeserClassMap.put(id, String.class);
-		_serDeserClassIDMap.put(String.class, id++);
-		
-		_serDeserMap.put(long.class, new LongSerDeser());
-		_serIDDeserClassMap.put(id, long.class);
-		_serDeserClassIDMap.put(long.class, id++);
-		
-		_serDeserMap.put(Long.class, new LongSerDeser());
-		_serIDDeserClassMap.put(id, Long.class);
-		_serDeserClassIDMap.put(Long.class, id++);
-		
-		_serDeserMap.put(int.class, new IntSerDeser());
-		_serIDDeserClassMap.put(id, int.class);
-		_serDeserClassIDMap.put(int.class, id++);
-		
-		_serDeserMap.put(Integer.class, new IntSerDeser());
-		_serIDDeserClassMap.put(id, Integer.class);
-		_serDeserClassIDMap.put(Integer.class, id++);
-		
-		_serDeserMap.put(Double.class, new DoubleSerDeser());
-		_serIDDeserClassMap.put(id, Double.class);
-		_serDeserClassIDMap.put(Double.class, id++);
-		
-		_serDeserMap.put(double.class, new DoubleSerDeser());
-		_serIDDeserClassMap.put(id, double.class);
-		_serDeserClassIDMap.put(double.class, id++);
-		
-		_serDeserMap.put(int[].class, new IntArraySerDeser());
-		_serIDDeserClassMap.put(id, int[].class);
-		_serDeserClassIDMap.put(int[].class, id++);
-		
-		_serDeserMap.put(double[].class, new DoubleArraySerDeser());
-		_serIDDeserClassMap.put(id, double[].class);
-		_serDeserClassIDMap.put(double[].class, id++);
+    static {
+        int id = 0;
+        _serDeserMap.put(NullObject.class, new NullSerDeser());
+        _serIDDeserClassMap.put(id, NullObject.class);
+        _serDeserClassIDMap.put(NullObject.class, id++);
 
-		_serDeserMap.put(double[][].class, new Double2DArraySerDeser());
-		_serIDDeserClassMap.put(id, double[][].class);
-		_serDeserClassIDMap.put(double[][].class, id++);
-		
-		_serDeserMap.put(Boolean.class, new BooleanSerDeser());
-		_serIDDeserClassMap.put(id, Boolean.class);
-		_serDeserClassIDMap.put(Boolean.class, id++);
-		
-		_serDeserMap.put(boolean.class, new BooleanSerDeser());
-		_serIDDeserClassMap.put(id, boolean.class);
-		_serDeserClassIDMap.put(boolean.class, id++);
-		
-		_serDeserMap.put(String[].class, new StringArraySerDeser());
-		_serIDDeserClassMap.put(id, String[].class);
-		_serDeserClassIDMap.put(String[].class, id++);
-		
-		_serDeserMap.put(Map.class, new MapSerDeser());
-		_serIDDeserClassMap.put(id, Map.class);
-		_serDeserClassIDMap.put(Map.class, id++);
-		
-		_serDeserMap.put(List.class, new ListSerDeser());
-		_serIDDeserClassMap.put(id, List.class);
-		_serDeserClassIDMap.put(List.class, id++);
-	}
-	
-	
+        _serDeserMap.put(String.class, new StringSerDeser());
+        _serIDDeserClassMap.put(id, String.class);
+        _serDeserClassIDMap.put(String.class, id++);
 
-	@SuppressWarnings("rawtypes")
-	public static EntitySerDeser getSerDeser(Class<?> clazz){
-		return _serDeserMap.get(clazz);
-	}
+        _serDeserMap.put(long.class, new LongSerDeser());
+        _serIDDeserClassMap.put(id, long.class);
+        _serDeserClassIDMap.put(long.class, id++);
 
-	/**
-	 * Get internal ID by the predefined registered class
-	 * @param clazz original for serialization/deserialization 
-	 * @return the internal id if the input class has been registered, otherwise return -1
-	 */
-	public static int getIDBySerDerClass(Class<?> clazz) {
-		final Integer id = _serDeserClassIDMap.get(clazz);
-		if (id == null) {
-			return -1;
-		}
-		return id;
-	}
-	
+        _serDeserMap.put(Long.class, new LongSerDeser());
+        _serIDDeserClassMap.put(id, Long.class);
+        _serDeserClassIDMap.put(Long.class, id++);
 
-	/**
-	 * Get the predefined registered class by internal ID
-	 * @param id the internal class ID
-	 * @return the predefined registered class, if the class hasn't been registered, return null
-	 */
-	public static Class<?> getClassByID(int id) {
-		return _serIDDeserClassMap.get(id);
-	}
-	
-	/**
-	 * it is allowed that user can register their own entity
-	 * @param clazz entity class
-	 * @throws IllegalArgumentException
-	 */
-	public static void registerEntity(Class<? extends TaggedLogAPIEntity> clazz) throws IllegalArgumentException{
-		registerEntity(createEntityDefinition(clazz));
-	}
-	
-	/**
-	 * it is allowed that user can register their own entity
-	 * @deprecated This API is deprecated since we need to use Service annotation to define service name for entities
-	 * @param serviceName entity service name
-	 * @param clazz entity class
-	 * @throws IllegalArgumentException
-	 * 
-	 */
-    @Deprecated
-	public static void registerEntity(String serviceName, Class<? extends TaggedLogAPIEntity> clazz) throws IllegalArgumentException{
-		registerEntity(serviceName, createEntityDefinition(clazz));
-	}
-	
-	/**
-	 * it is allowed that user can register their own entity definition
-	 * @param entityDef entity definition
-	 * @throws IllegalArgumentException
-	 */
-	public static void registerEntity(EntityDefinition entityDef) {
-		registerEntity(entityDef.getService(), entityDef);
-	}
-	
-	/**
-	 * it is allowed that user can register their own entity definition
-	 * @deprecated This API is deprecated since we need to use Service annotation to define service name for entities. 
-	 * 
-	 * @param entityDef entity definition
-	 * @throws IllegalArgumentException
-	 */
-	public static void registerEntity(String serviceName, EntityDefinition entityDef) {
-		final String table = entityDef.getTable();
-		if (entityServiceMap.containsKey(serviceName)) {
-			final EntityDefinition existing = entityServiceMap.get(serviceName);
-			if (entityDef.getClass().equals(existing.getClass())) {
-				return;
-			}
-			throw new IllegalArgumentException("Service " + serviceName + " has already been registered by " + existing.getClass().getName() + ", so class " + entityDef.getClass() + " can NOT be registered");
-		}
-		synchronized (EntityDefinitionManager.class) {
-			checkPrefix(entityDef);
-			entityServiceMap.put(serviceName, entityDef);
-			Map<Integer, EntityDefinition> entityHashMap = entityPrefixMap.get(table);
-			if (entityHashMap == null) {
-				entityHashMap = new ConcurrentHashMap<Integer, EntityDefinition>();
-				entityPrefixMap.put(table, entityHashMap);
-			}
-			entityHashMap.put(entityDef.getPrefix().hashCode(), entityDef);
-			final IndexDefinition[] indexes = entityDef.getIndexes();
-			if (indexes != null) {
-				for (IndexDefinition index : indexes) {
-					Map<Integer, IndexDefinition> indexHashMap = indexPrefixMap.get(table);
-					if (indexHashMap == null) {
-						indexHashMap = new ConcurrentHashMap<Integer, IndexDefinition>();
-						indexPrefixMap.put(table, indexHashMap);
-					}
-					indexHashMap.put(index.getIndexPrefix().hashCode(), index);
-				}
-			}
-			classMap.put(entityDef.getEntityClass(), entityDef);
-		}
-        if(LOG.isDebugEnabled()) {
-            LOG.debug(entityDef.getEntityClass().getSimpleName() + " entity registered successfully, table name: " + entityDef.getTable() +
-					", prefix: " + entityDef.getPrefix() + ", service: " + serviceName + ", CF: " + entityDef.getColumnFamily());
-        }else{
-            LOG.info(String.format("Registered %s (%s)", entityDef.getEntityClass().getSimpleName(), serviceName));
+        _serDeserMap.put(int.class, new IntSerDeser());
+        _serIDDeserClassMap.put(id, int.class);
+        _serDeserClassIDMap.put(int.class, id++);
+
+        _serDeserMap.put(Integer.class, new IntSerDeser());
+        _serIDDeserClassMap.put(id, Integer.class);
+        _serDeserClassIDMap.put(Integer.class, id++);
+
+        _serDeserMap.put(Double.class, new DoubleSerDeser());
+        _serIDDeserClassMap.put(id, Double.class);
+        _serDeserClassIDMap.put(Double.class, id++);
+
+        _serDeserMap.put(double.class, new DoubleSerDeser());
+        _serIDDeserClassMap.put(id, double.class);
+        _serDeserClassIDMap.put(double.class, id++);
+
+        _serDeserMap.put(int[].class, new IntArraySerDeser());
+        _serIDDeserClassMap.put(id, int[].class);
+        _serDeserClassIDMap.put(int[].class, id++);
+
+        _serDeserMap.put(double[].class, new DoubleArraySerDeser());
+        _serIDDeserClassMap.put(id, double[].class);
+        _serDeserClassIDMap.put(double[].class, id++);
+
+        _serDeserMap.put(double[][].class, new Double2DArraySerDeser());
+        _serIDDeserClassMap.put(id, double[][].class);
+        _serDeserClassIDMap.put(double[][].class, id++);
+
+        _serDeserMap.put(Boolean.class, new BooleanSerDeser());
+        _serIDDeserClassMap.put(id, Boolean.class);
+        _serDeserClassIDMap.put(Boolean.class, id++);
+
+        _serDeserMap.put(boolean.class, new BooleanSerDeser());
+        _serIDDeserClassMap.put(id, boolean.class);
+        _serDeserClassIDMap.put(boolean.class, id++);
+
+        _serDeserMap.put(String[].class, new StringArraySerDeser());
+        _serIDDeserClassMap.put(id, String[].class);
+        _serDeserClassIDMap.put(String[].class, id++);
+
+        _serDeserMap.put(Map.class, new MapSerDeser());
+        _serIDDeserClassMap.put(id, Map.class);
+        _serDeserClassIDMap.put(Map.class, id++);
+
+        _serDeserMap.put(List.class, new ListSerDeser());
+        _serIDDeserClassMap.put(id, List.class);
+        _serDeserClassIDMap.put(List.class, id++);
+    }
+
+    @SuppressWarnings("rawtypes")
+    public static EntitySerDeser getSerDeser(Class<?> clazz) {
+        return _serDeserMap.get(clazz);
+    }
+
+    /**
+     * Get internal ID by the predefined registered class
+     * 
+     * @param clazz original for serialization/deserialization
+     * @return the internal id if the input class has been registered, otherwise return -1
+     */
+    public static int getIDBySerDerClass(Class<?> clazz) {
+        final Integer id = _serDeserClassIDMap.get(clazz);
+        if (id == null) {
+            return -1;
         }
-	}
+        return id;
+    }
 
-	private static void checkPrefix(EntityDefinition entityDef) {
-		final Integer entityPrefixHashcode = entityDef.getPrefix().hashCode();
-		if (entityPrefixMap.containsKey(entityDef.getTable())) {
-			final Map<Integer, EntityDefinition> entityHashMap = entityPrefixMap.get(entityDef.getTable());
-			if (entityHashMap.containsKey(entityPrefixHashcode) && (!entityDef.equals(entityHashMap.get(entityPrefixHashcode)))) {
-				throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The entity prefix " + entityDef.getPrefix() + " has already been registered by entity service " + entityHashMap.get(entityPrefixHashcode).getService());
-			}
-			final IndexDefinition[] indexes = entityDef.getIndexes();
-			if (indexes != null) {
-				for (IndexDefinition index : indexes) {
-					final Integer indexPrefixHashcode = index.getIndexPrefix().hashCode();
-					if (entityHashMap.containsKey(indexPrefixHashcode)) {
-						throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The index prefix " + index.getIndexPrefix() + " has already been registered by entity " + entityHashMap.get(indexPrefixHashcode).getService());
-					}
-					final Map<Integer, IndexDefinition> indexHashMap = indexPrefixMap.get(entityDef.getTable());
-					if (indexHashMap != null && indexHashMap.containsKey(indexPrefixHashcode) && (!index.equals(indexHashMap.get(indexPrefixHashcode)))) {
-						throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass().getName() + ", because of the prefix hash code conflict! The index prefix " + index.getIndexPrefix() + " has already been registered by entity " + indexHashMap.get(indexPrefixHashcode).getEntityDefinition().getService());
-					}
-				}
-			}
-		}
-	}
-	
-	/**
-	 * Get entity definition by name
-	 * @param serviceName
-	 * @return
-	 * @throws IllegalAccessException 
-	 * @throws InstantiationException 
-	 */
-	public static EntityDefinition getEntityByServiceName(String serviceName) throws InstantiationException, IllegalAccessException{
-		checkInit();
-		return entityServiceMap.get(serviceName);
-	}
-	
-	public static EntityDefinition getEntityDefinitionByEntityClass(Class<? extends TaggedLogAPIEntity> clazz) throws InstantiationException, IllegalAccessException {
-		checkInit();
-		return classMap.get(clazz);
-	}
+    /**
+     * Get the predefined registered class by internal ID
+     * 
+     * @param id the internal class ID
+     * @return the predefined registered class, if the class hasn't been registered, return null
+     */
+    public static Class<?> getClassByID(int id) {
+        return _serIDDeserClassMap.get(id);
+    }
 
-	private static void checkInit() throws InstantiationException, IllegalAccessException {
-		if (!initialized) {
-			synchronized (EntityDefinitionManager.class) {
-				if (!initialized) {
-					EntityRepositoryScanner.scan();
-					initialized = true;
-				}
-			}
-		}
-	}
+    /**
+     * it is allowed that user can register their own entity
+     * 
+     * @param clazz entity class
+     * @throws IllegalArgumentException
+     */
+    public static void registerEntity(Class<? extends TaggedLogAPIEntity> clazz)
+        throws IllegalArgumentException {
+        registerEntity(createEntityDefinition(clazz));
+    }
 
-	public static void load() throws IllegalAccessException, InstantiationException {
-		checkInit();
-	}
+    /**
+     * it is allowed that user can register their own entity
+     * 
+     * @deprecated This API is deprecated since we need to use Service annotation to define service name for
+     *             entities
+     * @param serviceName entity service name
+     * @param clazz entity class
+     * @throws IllegalArgumentException
+     */
+    @Deprecated
+    public static void registerEntity(String serviceName, Class<? extends TaggedLogAPIEntity> clazz)
+        throws IllegalArgumentException {
+        registerEntity(serviceName, createEntityDefinition(clazz));
+    }
 
-	/**
-	 * UserPrincipal can register their own field SerDeser
-	 * @param clazz class of the the SerDeser 
-	 * @param entitySerDeser entity or field SerDeser
-	 * @throws IllegalArgumentException
-	 */
-	public static void registerSerDeser(Class<?> clazz, EntitySerDeser<?> entitySerDeser) {
-		_serDeserMap.put(clazz, entitySerDeser);
-	}
+    /**
+     * it is allowed that user can register their own entity definition
+     * 
+     * @param entityDef entity definition
+     * @throws IllegalArgumentException
+     */
+    public static void registerEntity(EntityDefinition entityDef) {
+        registerEntity(entityDef.getService(), entityDef);
+    }
 
-	/**
-	 * Check whether the entity class is time series, false by default
-	 * @param clazz
-	 * @return
-	 */
-	public static boolean isTimeSeries(Class<? extends TaggedLogAPIEntity> clazz){
-		TimeSeries ts = clazz.getAnnotation(TimeSeries.class);
-		return ts != null && ts.value();
-	}
+    /**
+     * it is allowed that user can register their own entity definition
+     * 
+     * @deprecated This API is deprecated since we need to use Service annotation to define service name for
+     *             entities.
+     * @param entityDef entity definition
+     * @throws IllegalArgumentException
+     */
+    @Deprecated
+    public static void registerEntity(String serviceName, EntityDefinition entityDef) {
+        final String table = entityDef.getTable();
+        if (entityServiceMap.containsKey(serviceName)) {
+            final EntityDefinition existing = entityServiceMap.get(serviceName);
+            if (entityDef.getClass().equals(existing.getClass())) {
+                return;
+            }
+            throw new IllegalArgumentException("Service " + serviceName + " has already been registered by "
+                                               + existing.getClass().getName() + ", so class "
+                                               + entityDef.getClass() + " can NOT be registered");
+        }
+        synchronized (EntityDefinitionManager.class) {
+            checkPrefix(entityDef);
+            entityServiceMap.put(serviceName, entityDef);
+            Map<Integer, EntityDefinition> entityHashMap = entityPrefixMap.get(table);
+            if (entityHashMap == null) {
+                entityHashMap = new ConcurrentHashMap<Integer, EntityDefinition>();
+                entityPrefixMap.put(table, entityHashMap);
+            }
+            entityHashMap.put(entityDef.getPrefix().hashCode(), entityDef);
+            final IndexDefinition[] indexes = entityDef.getIndexes();
+            if (indexes != null) {
+                for (IndexDefinition index : indexes) {
+                    Map<Integer, IndexDefinition> indexHashMap = indexPrefixMap.get(table);
+                    if (indexHashMap == null) {
+                        indexHashMap = new ConcurrentHashMap<Integer, IndexDefinition>();
+                        indexPrefixMap.put(table, indexHashMap);
+                    }
+                    indexHashMap.put(index.getIndexPrefix().hashCode(), index);
+                }
+            }
+            classMap.put(entityDef.getEntityClass(), entityDef);
+        }
+        if (LOG.isDebugEnabled()) {
+            LOG.debug(entityDef.getEntityClass().getSimpleName()
+                      + " entity registered successfully, table name: " + entityDef.getTable() + ", prefix: "
+                      + entityDef.getPrefix() + ", service: " + serviceName + ", CF: "
+                      + entityDef.getColumnFamily());
+        } else {
+            LOG.info(String.format("Registered %s (%s)", entityDef.getEntityClass().getSimpleName(),
+                                   serviceName));
+        }
+    }
 
-	@SuppressWarnings("unchecked")
-	public static EntityDefinition createEntityDefinition(Class<? extends TaggedLogAPIEntity> cls) {
-		
-		final EntityDefinition ed = new EntityDefinition();
+    private static void checkPrefix(EntityDefinition entityDef) {
+        final Integer entityPrefixHashcode = entityDef.getPrefix().hashCode();
+        if (entityPrefixMap.containsKey(entityDef.getTable())) {
+            final Map<Integer, EntityDefinition> entityHashMap = entityPrefixMap.get(entityDef.getTable());
+            if (entityHashMap.containsKey(entityPrefixHashcode)
+                && (!entityDef.equals(entityHashMap.get(entityPrefixHashcode)))) {
+                throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass()
+                    .getName() + ", because of the prefix hash code conflict! The entity prefix "
+                                                   + entityDef.getPrefix()
+                                                   + " has already been registered by entity service "
+                                                   + entityHashMap.get(entityPrefixHashcode).getService());
+            }
+            final IndexDefinition[] indexes = entityDef.getIndexes();
+            if (indexes != null) {
+                for (IndexDefinition index : indexes) {
+                    final Integer indexPrefixHashcode = index.getIndexPrefix().hashCode();
+                    if (entityHashMap.containsKey(indexPrefixHashcode)) {
+                        throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass()
+                            .getName() + ", because of the prefix hash code conflict! The index prefix "
+                                                           + index.getIndexPrefix()
+                                                           + " has already been registered by entity "
+                                                           + entityHashMap.get(indexPrefixHashcode)
+                                                               .getService());
+                    }
+                    final Map<Integer, IndexDefinition> indexHashMap = indexPrefixMap
+                        .get(entityDef.getTable());
+                    if (indexHashMap != null && indexHashMap.containsKey(indexPrefixHashcode)
+                        && (!index.equals(indexHashMap.get(indexPrefixHashcode)))) {
+                        throw new IllegalArgumentException("Failed to register entity " + entityDef.getClass()
+                            .getName() + ", because of the prefix hash code conflict! The index prefix "
+                                                           + index.getIndexPrefix()
+                                                           + " has already been registered by entity "
+                                                           + indexHashMap.get(indexPrefixHashcode)
+                                                               .getEntityDefinition().getService());
+                    }
+                }
+            }
+        }
+    }
 
-		ed.setEntityClass(cls);
-		// parse cls' annotations
-		Table table = cls.getAnnotation(Table.class);
-		if(table == null || table.value().isEmpty()){
-			throw new IllegalArgumentException("Entity class must have a non-empty table name annotated with @Table");
-		}
-		String tableName = table.value();
-		if(EagleConfigFactory.load().isTableNamePrefixedWithEnvironment()){
-			tableName = EagleConfigFactory.load().getEnv() + "_" + tableName;
-		}
-		ed.setTable(tableName);
-		
-		ColumnFamily family = cls.getAnnotation(ColumnFamily.class);
-		if(family == null || family.value().isEmpty()){
-			throw new IllegalArgumentException("Entity class must have a non-empty column family name annotated with @ColumnFamily");
-		}
-		ed.setColumnFamily(family.value());
-		
-		Prefix prefix = cls.getAnnotation(Prefix.class);
-		if(prefix == null || prefix.value().isEmpty()){
-			throw new IllegalArgumentException("Entity class must have a non-empty prefix name annotated with @Prefix");
-		}
-		ed.setPrefix(prefix.value());
-		
-		TimeSeries ts = cls.getAnnotation(TimeSeries.class);
-		if(ts == null){
-			throw new IllegalArgumentException("Entity class must have a non-empty timeseries name annotated with @TimeSeries");
-		}
-		ed.setTimeSeries(ts.value());
-		
-		Service service = cls.getAnnotation(Service.class);
-		if(service == null || service.value().isEmpty()){
-			ed.setService(cls.getSimpleName());
-		} else {
-			ed.setService(service.value());
-		}
+    /**
+     * Get entity definition by name
+     * 
+     * @param serviceName
+     * @return
+     * @throws IllegalAccessException
+     * @throws InstantiationException
+     */
+    public static EntityDefinition getEntityByServiceName(String serviceName)
+        throws InstantiationException, IllegalAccessException {
+        checkInit();
+        return entityServiceMap.get(serviceName);
+    }
 
-		Metric m = cls.getAnnotation(Metric.class);
-		Map<String, Class<?>> dynamicFieldTypes = new HashMap<String, Class<?>>();
-		if(m != null){
-			// metric has to be timeseries
-			if(!ts.value()){
-				throw new IllegalArgumentException("Metric entity must be time series as well");
-			}
-			MetricDefinition md = new MetricDefinition();
-			md.setInterval(m.interval());
-			ed.setMetricDefinition(md);
-		}
+    public static EntityDefinition getEntityDefinitionByEntityClass(Class<? extends TaggedLogAPIEntity> clazz)
+        throws InstantiationException, IllegalAccessException {
+        checkInit();
+        return classMap.get(clazz);
+    }
 
-		java.lang.reflect.Field[] fields = cls.getDeclaredFields();
-		for(java.lang.reflect.Field f : fields){
-			Column column = f.getAnnotation(Column.class); 
-			if(column == null || column.value().isEmpty()){
-				continue;
-			}
-			Class<?> fldCls = f.getType();
-			// intrusive check field type for metric entity
-			checkFieldTypeForMetric(ed.getMetricDefinition(), f.getName(), fldCls, dynamicFieldTypes);
-			Qualifier q = new Qualifier();
-			q.setDisplayName(f.getName());
-			q.setQualifierName(column.value());
-			EntitySerDeser<?> serDeser = _serDeserMap.get(fldCls); 
-			if(serDeser == null){
-//				throw new IllegalArgumentException(fldCls.getName() + " in field " + f.getName() +
-//						" of entity " + cls.getSimpleName() + " has no serializer associated ");
-				serDeser = DefaultJavaObjctSerDeser.INSTANCE;
-			}
+    private static void checkInit() throws InstantiationException, IllegalAccessException {
+        if (!initialized) {
+            synchronized (EntityDefinitionManager.class) {
+                if (!initialized) {
+                    EntityRepositoryScanner.scan();
+                    initialized = true;
+                }
+            }
+        }
+    }
 
-			q.setSerDeser((EntitySerDeser<Object>)serDeser);
-			ed.getQualifierNameMap().put(q.getQualifierName(), q);
-			ed.getDisplayNameMap().put(q.getDisplayName(), q);
-			// TODO: should refine rules, consider fields like "hCol", getter method should be gethCol() according to org.apache.commons.beanutils.PropertyUtils
-			final String propertyName = f.getName().substring(0,1).toUpperCase() + f.getName().substring(1);
-			String getterName = "get" + propertyName;
-			try {
-				Method method = cls.getMethod(getterName);
-				ed.getQualifierGetterMap().put(f.getName(), method);
-			} catch (Exception e) {
-				// Check if the type is boolean
-				getterName = "is" + propertyName;
-				try {
-					Method method = cls.getMethod(getterName);
-					ed.getQualifierGetterMap().put(f.getName(), method);
-				} catch (Exception e1) {
-					throw new IllegalArgumentException("Field " + f.getName() + " hasn't defined valid getter method: " + getterName, e);
-				}
-			}
-			if(LOG.isDebugEnabled()) LOG.debug("Field registered " + q);
-		}
+    public static void load() throws IllegalAccessException, InstantiationException {
+        checkInit();
+    }
 
-		// TODO: Lazy create because not used at all
-		// dynamically create bean class
-		if(ed.getMetricDefinition() != null){
-			Class<?> metricCls = createDynamicClassForMetric(cls.getName()+"_SingleTimestamp", dynamicFieldTypes);
-			ed.getMetricDefinition().setSingleTimestampEntityClass(metricCls);
-		}
-		
-		final Partition partition = cls.getAnnotation(Partition.class);
-		if (partition != null) {
-			final String[] partitions = partition.value();
-			ed.setPartitions(partitions);
-			// Check if partition fields are all tag fields. Partition field can't be column field, must be tag field.
-			for (String part : partitions) {
-				if (!ed.isTag(part)) {
-					throw new IllegalArgumentException("Partition field can't be column field, must be tag field. "
-							+ "Partition name: " + part);
-				}
-			}
-		}
-		
-		final Indexes indexes = cls.getAnnotation(Indexes.class);
-		if (indexes != null) {
-			final Index[] inds = indexes.value();
-			final IndexDefinition[] indexDefinitions = new IndexDefinition[inds.length];
-			for (int i = 0; i < inds.length; ++i) {
-				final Index ind = inds[i];
-				indexDefinitions[i] = new IndexDefinition(ed, ind);
-			}
-			ed.setIndexes(indexDefinitions);
-		}
-		
-		final ServicePath path = cls.getAnnotation(ServicePath.class);
-		if (path != null) {
-			if (path.path() != null && (!path.path().isEmpty())) {
-				ed.setServiceCreationPath(path.path());
-			}
-		}
+    /**
+     * UserPrincipal can register their own field SerDeser
+     * 
+     * @param clazz class of the the SerDeser
+     * @param entitySerDeser entity or field SerDeser
+     * @throws IllegalArgumentException
+     */
+    public static void registerSerDeser(Class<?> clazz, EntitySerDeser<?> entitySerDeser) {
+        _serDeserMap.put(clazz, entitySerDeser);
+    }
 
-		final Tags tags = cls.getAnnotation(Tags.class);
-		if(tags != null) {
-			String[] tagNames = tags.value();
-			ed.setTags(tagNames);
-		}
+    /**
+     * Check whether the entity class is time series, false by default
+     * 
+     * @param clazz
+     * @return
+     */
+    public static boolean isTimeSeries(Class<? extends TaggedLogAPIEntity> clazz) {
+        TimeSeries ts = clazz.getAnnotation(TimeSeries.class);
+        return ts != null && ts.value();
+    }
 
-		return ed;
-	}
-	
-	private static void checkFieldTypeForMetric(MetricDefinition md, String fieldName, Object fldCls, Map<String, Class<?>> dynamicFieldTypes){
-		if(md != null){
-			if(fldCls.equals(int[].class)){
-				dynamicFieldTypes.put(fieldName, int.class);
-				return;
-			}else if(fldCls.equals(long[].class)){
-				dynamicFieldTypes.put(fieldName, long.class);
-				return;
-			}else if(fldCls.equals(double[].class)){
-				dynamicFieldTypes.put(fieldName, double.class);
-				return;
-			}
-			throw new IllegalArgumentException("Fields for metric entity must be one of int[], long[] or double[]");
-		}
-	}
-	
-	private static Class<?> createDynamicClassForMetric(final String className, Map<String, Class<?>> dynamicFieldTypes){
-		BeanGenerator beanGenerator = new BeanGenerator();
-		beanGenerator.setNamingPolicy(new NamingPolicy(){
-	        @Override 
-	        public String getClassName(String prefix,String source, Object key, Predicate names){
-	            return className;
-	        }});
-	    BeanGenerator.addProperties(beanGenerator, dynamicFieldTypes);
-	    beanGenerator.setSuperclass(TaggedLogAPIEntity.class);
-	    return (Class<?>) beanGenerator.createClass();
-	}
-	
-	public static Map<String, EntityDefinition> entities() throws Exception{
-		checkInit();
-		return entityServiceMap;
-	}
+    @SuppressWarnings("unchecked")
+    public static EntityDefinition createEntityDefinition(Class<? extends TaggedLogAPIEntity> cls) {
+
+        final EntityDefinition ed = new EntityDefinition();
+
+        ed.setEntityClass(cls);
+        // parse cls' annotations
+        Table table = cls.getAnnotation(Table.class);
+        if (table == null || table.value().isEmpty()) {
+            throw new IllegalArgumentException("Entity class must have a non-empty table name annotated with @Table");
+        }
+        String tableName = table.value();
+        if (EagleConfigFactory.load().isTableNamePrefixedWithEnvironment()) {
+            tableName = EagleConfigFactory.load().getEnv() + "_" + tableName;
+        }
+        ed.setTable(tableName);
+
+        ColumnFamily family = cls.getAnnotation(ColumnFamily.class);
+        if (family == null || family.value().isEmpty()) {
+            throw new IllegalArgumentException("Entity class must have a non-empty column family name annotated with @ColumnFamily");
+        }
+        ed.setColumnFamily(family.value());
+
+        Prefix prefix = cls.getAnnotation(Prefix.class);
+        if (prefix == null || prefix.value().isEmpty()) {
+            throw new IllegalArgumentException("Entity class must have a non-empty prefix name annotated with @Prefix");
+        }
+        ed.setPrefix(prefix.value());
+
+        TimeSeries ts = cls.getAnnotation(TimeSeries.class);
+        if (ts == null) {
+            throw new IllegalArgumentException("Entity class must have a non-empty timeseries name annotated with @TimeSeries");
+        }
+        ed.setTimeSeries(ts.value());
+
+        Service service = cls.getAnnotation(Service.class);
+        if (service == null || service.value().isEmpty()) {
+            ed.setService(cls.getSimpleName());
+        } else {
+            ed.setService(service.value());
+        }
+
+        Metric m = cls.getAnnotation(Metric.class);
+        Map<String, Class<?>> dynamicFieldTypes = new HashMap<String, Class<?>>();
+        if (m != null) {
+            // metric has to be timeseries
+            if (!ts.value()) {
+                throw new IllegalArgumentException("Metric entity must be time series as well");
+            }
+            MetricDefinition md = new MetricDefinition();
+            md.setInterval(m.interval());
+            ed.setMetricDefinition(md);
+        }
+
+        java.lang.reflect.Field[] fields = cls.getDeclaredFields();
+        for (java.lang.reflect.Field f : fields) {
+            Column column = f.getAnnotation(Column.class);
+            if (column == null || column.value().isEmpty()) {
+                continue;
+            }
+            Class<?> fldCls = f.getType();
+            // intrusive check field type for metric entity
+            checkFieldTypeForMetric(ed.getMetricDefinition(), f.getName(), fldCls, dynamicFieldTypes);
+            Qualifier q = new Qualifier();
+            q.setDisplayName(f.getName());
+            q.setQualifierName(column.value());
+            EntitySerDeser<?> serDeser = _serDeserMap.get(fldCls);
+            if (serDeser == null) {
+                // throw new IllegalArgumentException(fldCls.getName() + " in field " + f.getName() +
+                // " of entity " + cls.getSimpleName() + " has no serializer associated ");
+                serDeser = DefaultJavaObjctSerDeser.INSTANCE;
+            }
+
+            q.setSerDeser((EntitySerDeser<Object>)serDeser);
+            ed.getQualifierNameMap().put(q.getQualifierName(), q);
+            ed.getDisplayNameMap().put(q.getDisplayName(), q);
+            // TODO: should refine rules, consider fields like "hCol", getter method should be gethCol()
+            // according to org.apache.commons.beanutils.PropertyUtils
+            final String propertyName = f.getName().substring(0, 1).toUpperCase() + f.getName().substring(1);
+            String getterName = "get" + propertyName;
+            try {
+                Method method = cls.getMethod(getterName);
+                ed.getQualifierGetterMap().put(f.getName(), method);
+            } catch (Exception e) {
+                // Check if the type is boolean
+                getterName = "is" + propertyName;
+                try {
+                    Method method = cls.getMethod(getterName);
+                    ed.getQualifierGetterMap().put(f.getName(), method);
+                } catch (Exception e1) {
+                    throw new IllegalArgumentException("Field " + f.getName()
+                                                       + " hasn't defined valid getter method: " + getterName,
+                                                       e);
+                }
+            }
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("Field registered " + q);
+            }
+        }
+
+        // TODO: Lazy create because not used at all
+        // dynamically create bean class
+        if (ed.getMetricDefinition() != null) {
+            Class<?> metricCls = createDynamicClassForMetric(cls.getName() + "_SingleTimestamp",
+                                                             dynamicFieldTypes);
+            ed.getMetricDefinition().setSingleTimestampEntityClass(metricCls);
+        }
+
+        final Partition partition = cls.getAnnotation(Partition.class);
+        if (partition != null) {
+            final String[] partitions = partition.value();
+            ed.setPartitions(partitions);
+            // Check if partition fields are all tag fields. Partition field can't be column field, must be
+            // tag field.
+            for (String part : partitions) {
+                if (!ed.isTag(part)) {
+                    throw new IllegalArgumentException("Partition field can't be column field, must be tag field. "
+                                                       + "Partition name: " + part);
+                }
+            }
+        }
+
+        final Indexes indexes = cls.getAnnotation(Indexes.class);
+        if (indexes != null) {
+            final Index[] inds = indexes.value();
+            final IndexDefinition[] indexDefinitions = new IndexDefinition[inds.length];
+            for (int i = 0; i < inds.length; ++i) {
+                final Index ind = inds[i];
+                indexDefinitions[i] = new IndexDefinition(ed, ind);
+            }
+            ed.setIndexes(indexDefinitions);
+        }
+
+        final ServicePath path = cls.getAnnotation(ServicePath.class);
+        if (path != null) {
+            if (path.path() != null && (!path.path().isEmpty())) {
+                ed.setServiceCreationPath(path.path());
+            }
+        }
+
+        final Tags tags = cls.getAnnotation(Tags.class);
+        if (tags != null) {
+            String[] tagNames = tags.value();
+            ed.setTags(tagNames);
+        }
+
+        return ed;
+    }
+
+    private static void checkFieldTypeForMetric(MetricDefinition md, String fieldName, Object fldCls,
+                                                Map<String, Class<?>> dynamicFieldTypes) {
+        if (md != null) {
+            if (fldCls.equals(int[].class)) {
+                dynamicFieldTypes.put(fieldName, int.class);
+                return;
+            } else if (fldCls.equals(long[].class)) {
+                dynamicFieldTypes.put(fieldName, long.class);
+                return;
+            } else if (fldCls.equals(double[].class)) {
+                dynamicFieldTypes.put(fieldName, double.class);
+                return;
+            }
+            throw new IllegalArgumentException("Fields for metric entity must be one of int[], long[] or double[]");
+        }
+    }
+
+    private static Class<?> createDynamicClassForMetric(final String className,
+                                                        Map<String, Class<?>> dynamicFieldTypes) {
+        BeanGenerator beanGenerator = new BeanGenerator();
+        beanGenerator.setNamingPolicy(new NamingPolicy() {
+            @Override
+            public String getClassName(String prefix, String source, Object key, Predicate names) {
+                return className;
+            }
+        });
+        BeanGenerator.addProperties(beanGenerator, dynamicFieldTypes);
+        beanGenerator.setSuperclass(TaggedLogAPIEntity.class);
+        return (Class<?>)beanGenerator.createClass();
+    }
+
+    public static Map<String, EntityDefinition> entities() throws Exception {
+        checkInit();
+        return entityServiceMap;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java
index 25d55e0..08caeab 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeser.java
@@ -17,7 +17,9 @@
 package org.apache.eagle.log.entity.meta;
 
 public interface EntitySerDeser<T> {
-	public T deserialize(byte[] bytes);
-	public byte[] serialize(T t);
-	public Class<T> type();
+    public T deserialize(byte[] bytes);
+
+    public byte[] serialize(T t);
+
+    public Class<T> type();
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java
index a7ec4e4..1e1ca48 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/EntitySerDeserializer.java
@@ -26,54 +26,54 @@
 import java.util.Map;
 
 public class EntitySerDeserializer {
-	private static final Logger LOG = LoggerFactory.getLogger(EntitySerDeserializer.class);
-	
-	// TODO throws seperate exceptions
-	@SuppressWarnings("unchecked")
-	public <T> T readValue(Map<String, byte[]> qualifierValues, EntityDefinition ed) throws Exception{
-		Class<? extends TaggedLogAPIEntity> clazz = ed.getEntityClass();
-		if(clazz == null){
-			throw new NullPointerException("Entity class of service "+ed.getService()+" is null");
-		}
-		TaggedLogAPIEntity obj = clazz.newInstance();
-		Map<String, Qualifier> map = ed.getQualifierNameMap();
-		for(Map.Entry<String, byte[]> entry : qualifierValues.entrySet()){
-			Qualifier q = map.get(entry.getKey());
-			if(q == null){
-				// if it's not pre-defined qualifier, it must be tag unless it's a bug
-				if(obj.getTags() == null){
-					obj.setTags(new HashMap<String, String>());
-				}
-				obj.getTags().put(entry.getKey(), new StringSerDeser().deserialize(entry.getValue()));
-				continue;
-			}
-			
-			// TODO performance loss compared with new operator
-			// parse different types of qualifiers
-			String fieldName = q.getDisplayName();
-			PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(obj, fieldName);
-			if(entry.getValue() != null){
-				Object args = q.getSerDeser().deserialize(entry.getValue());
-				pd.getWriteMethod().invoke(obj, args);
-//				if (logger.isDebugEnabled()) {
-//					logger.debug(entry.getKey() + ":" + args + " is deserialized");
-//				}
-			}
-		}
-		return (T)obj;
-	}
-	
-	public Map<String, byte[]> writeValue(TaggedLogAPIEntity entity, EntityDefinition ed) throws Exception{
-		Map<String, byte[]> qualifierValues = new HashMap<String, byte[]>();
-		// iterate all modified qualifiers
-		for(String fieldName : entity.modifiedQualifiers()){
-			PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(entity, fieldName);
-			Object obj = pd.getReadMethod().invoke(entity);
-			Qualifier q = ed.getDisplayNameMap().get(fieldName);
-			EntitySerDeser<Object> ser = q.getSerDeser();
-			byte[] value = ser.serialize(obj);
-			qualifierValues.put(q.getQualifierName(), value);
-		}
-		return qualifierValues;
-	}
+    private static final Logger LOG = LoggerFactory.getLogger(EntitySerDeserializer.class);
+
+    // TODO throws seperate exceptions
+    @SuppressWarnings("unchecked")
+    public <T> T readValue(Map<String, byte[]> qualifierValues, EntityDefinition ed) throws Exception {
+        Class<? extends TaggedLogAPIEntity> clazz = ed.getEntityClass();
+        if (clazz == null) {
+            throw new NullPointerException("Entity class of service " + ed.getService() + " is null");
+        }
+        TaggedLogAPIEntity obj = clazz.newInstance();
+        Map<String, Qualifier> map = ed.getQualifierNameMap();
+        for (Map.Entry<String, byte[]> entry : qualifierValues.entrySet()) {
+            Qualifier q = map.get(entry.getKey());
+            if (q == null) {
+                // if it's not pre-defined qualifier, it must be tag unless it's a bug
+                if (obj.getTags() == null) {
+                    obj.setTags(new HashMap<String, String>());
+                }
+                obj.getTags().put(entry.getKey(), new StringSerDeser().deserialize(entry.getValue()));
+                continue;
+            }
+
+            // TODO performance loss compared with new operator
+            // parse different types of qualifiers
+            String fieldName = q.getDisplayName();
+            PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(obj, fieldName);
+            if (entry.getValue() != null) {
+                Object args = q.getSerDeser().deserialize(entry.getValue());
+                pd.getWriteMethod().invoke(obj, args);
+                // if (logger.isDebugEnabled()) {
+                // logger.debug(entry.getKey() + ":" + args + " is deserialized");
+                // }
+            }
+        }
+        return (T)obj;
+    }
+
+    public Map<String, byte[]> writeValue(TaggedLogAPIEntity entity, EntityDefinition ed) throws Exception {
+        Map<String, byte[]> qualifierValues = new HashMap<String, byte[]>();
+        // iterate all modified qualifiers
+        for (String fieldName : entity.modifiedQualifiers()) {
+            PropertyDescriptor pd = PropertyUtils.getPropertyDescriptor(entity, fieldName);
+            Object obj = pd.getReadMethod().invoke(entity);
+            Qualifier q = ed.getDisplayNameMap().get(fieldName);
+            EntitySerDeser<Object> ser = q.getSerDeser();
+            byte[] value = ser.serialize(obj);
+            qualifierValues.put(q.getQualifierName(), value);
+        }
+        return qualifierValues;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java
index c7dc113..d13e550 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Index.java
@@ -21,12 +21,16 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Index {
 
     public String name();
+
     public String[] columns();
+
     public boolean unique();
-//	boolean unique() default true;
+    // boolean unique() default true;
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java
index 2e62420..810ad6b 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IndexDefinition.java
@@ -39,297 +39,304 @@
 import org.apache.eagle.common.ByteUtil;
 
 /**
- * Eagle index schema definition.
- * 
- * 1. Index schema can be defined in entity class by annotation.
- * 2. One index schema can contain multiple fields/tags, defined in order
- * 3. We only support immutable indexing for now
- * 4. When entity is created or deleted, the corresponding index entity should be created or deleted at the same time
- * 5. Index transparency to queries. Queries go through index when and only when index can serve all search conditions after query rewrite
- * 
- *
+ * Eagle index schema definition. 1. Index schema can be defined in entity class by annotation. 2. One index
+ * schema can contain multiple fields/tags, defined in order 3. We only support immutable indexing for now 4.
+ * When entity is created or deleted, the corresponding index entity should be created or deleted at the same
+ * time 5. Index transparency to queries. Queries go through index when and only when index can serve all
+ * search conditions after query rewrite
  */
 public class IndexDefinition {
-	
-	public enum IndexType {
-		UNIQUE_INDEX,
-		NON_CLUSTER_INDEX,
-		NON_INDEX
-	}
 
-	private final EntityDefinition entityDef;
-	private final Index index;
-	private final IndexColumn[] columns;
-	private final String indexPrefix;
-	
-	private static final byte[] EMPTY_VALUE = new byte[0];
-	private static final Charset UTF_8_CHARSET = Charset.forName("UTF-8");
-	public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0;
-	public static final int MAX_INDEX_VALUE_BYTE_LENGTH = 65535;
-	
-	private static final String FIELD_NAME_PATTERN_STRING = "^@(.*)$";
-	private static final Pattern FIELD_NAME_PATTERN = Pattern.compile(FIELD_NAME_PATTERN_STRING);
-	private final static Logger LOG = LoggerFactory.getLogger(IndexDefinition.class);
+    public enum IndexType {
+        UNIQUE_INDEX,
+        NON_CLUSTER_INDEX,
+        NON_INDEX
+    }
 
-	public IndexDefinition(EntityDefinition entityDef, Index index) {
-		this.entityDef = entityDef;
-		this.index = index;
-		this.indexPrefix = entityDef.getPrefix() + "_" + index.name();
-		final String[] indexColumns = index.columns();
-		this.columns = new IndexColumn[indexColumns.length];
-		for (int i = 0; i < indexColumns.length; ++i) {
-			final String name = indexColumns[i];
-			final boolean isTag = entityDef.isTag(name);
-			final Qualifier qualifier = isTag ? null : entityDef.getDisplayNameMap().get(name);
-			columns[i] = new IndexColumn(name, isTag, qualifier);
-		}
-		LOG.info("Created index " + index.name() + " for " + entityDef.getEntityClass().getSimpleName());
-	}
+    private final EntityDefinition entityDef;
+    private final Index index;
+    private final IndexColumn[] columns;
+    private final String indexPrefix;
 
-	public EntityDefinition getEntityDefinition() {
-		return entityDef;
-	}
-	
-	public Index getIndex() {
-		return index;
-	}
-	
-	public String getIndexName() {
-		return index.name();
-	}
-	
-	public IndexColumn[] getIndexColumns() {
-		return columns;
-	}
-	
-	public String getIndexPrefix() {
-		return indexPrefix;
-	}
-	
-	public boolean isUnique() {
-		return index.unique();
-	}
-	
-	/**
-	 * Check if the query is suitable to go through index. If true, then return the value of index fields in order. Otherwise return null.
-	 * TODO: currently index fields should be string type.
-	 * 
-	 * @param query query expression after re-write
-	 * @param rowkeys if the query can go through the index, all rowkeys will be added into rowkeys.
-	 * @return true if the query can go through the index, otherwise return false
-	 */
-	public IndexType canGoThroughIndex(ORExpression query, List<byte[]> rowkeys) {
-		if (query == null || query.getANDExprList() == null || query.getANDExprList().isEmpty()) 
-			return IndexType.NON_CLUSTER_INDEX;
-		if (rowkeys != null) {
-			rowkeys.clear();
-		}
-		final Map<String, String> indexfieldMap = new HashMap<String, String>();
-		for(ANDExpression andExpr : query.getANDExprList()) {
-			indexfieldMap.clear();
-			for(AtomicExpression ae : andExpr.getAtomicExprList()) {
-				// TODO temporarily ignore those fields which are not for attributes
-				final String fieldName = parseEntityAttribute(ae.getKey());
-				if(fieldName != null && ComparisonOperator.EQUAL.equals(ae.getOp())){
-					indexfieldMap.put(fieldName, ae.getValue());
-				}
-			}
-			final String[] partitions = entityDef.getPartitions();
-			int[] partitionValueHashs = null;
-			if (partitions != null) {
-				partitionValueHashs = new int[partitions.length];
-				for (int i = 0; i < partitions.length; ++i) {
-					final String value = indexfieldMap.get(partitions[i]);
-					if (value == null) {
-						throw new IllegalArgumentException("Partition " + partitions[i] + " is not defined in the query: " + query.toString());
-					}
-					partitionValueHashs[i] = value.hashCode();
-				}
-			}
-			final byte[][] indexFieldValues = new byte[columns.length][];
-			for (int i = 0; i < columns.length; ++i) {
-				final IndexColumn col = columns[i];
-				if (!indexfieldMap.containsKey(col.getColumnName())) {
-					// If we have to use scan anyway, there's no need to go through index
-					return IndexType.NON_INDEX;
-				}
-				final String value = indexfieldMap.get(col.getColumnName());
-				indexFieldValues[i] = value.getBytes();
-			}
-			final byte[] rowkey = generateUniqueIndexRowkey(indexFieldValues, partitionValueHashs, null);
-			if (rowkeys != null) {
-				rowkeys.add(rowkey);
-			}
-		}
-		if (index.unique()) {
-			return IndexType.UNIQUE_INDEX;
-		}
-		return IndexType.NON_CLUSTER_INDEX;
-	}
+    private static final byte[] EMPTY_VALUE = new byte[0];
+    private static final Charset UTF_8_CHARSET = Charset.forName("UTF-8");
+    public static final int EMPTY_PARTITION_DEFAULT_HASH_CODE = 0;
+    public static final int MAX_INDEX_VALUE_BYTE_LENGTH = 65535;
 
-	private String parseEntityAttribute(String fieldName) {
-		Matcher m = FIELD_NAME_PATTERN.matcher(fieldName);
-		if(m.find()){
-			return m.group(1);
-		}
-		return null;
-	}
-	
-	// TODO: We should move index rowkey generation later since this class is for general purpose, not only for hbase.
-	public byte[] generateIndexRowkey(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
-		if (entity.getClass() != entityDef.getEntityClass()) {
-			throw new IllegalArgumentException("Expected entity class: " + entityDef.getEntityClass().getName() + ", but got class " + entity.getClass().getName());
-		}
-		final byte[][] indexValues = generateIndexValues(entity);
-		final int[] partitionHashCodes = generatePartitionHashCodes(entity);
-		SortedMap<Integer, Integer> tagMap = null;
-		if (!index.unique()) {
-			// non cluster index
-			tagMap = RowkeyBuilder.generateSortedTagMap(entityDef.getPartitions(), entity.getTags());
-		}
-		
-		return generateUniqueIndexRowkey(indexValues, partitionHashCodes, tagMap);
-	}
-	
-	private byte[] generateUniqueIndexRowkey(byte[][] indexValues, int[] partitionHashCodes, SortedMap<Integer, Integer> tagMap) {
-		final int prefixHashCode = indexPrefix.hashCode();
-		int totalLength = 4;
-		totalLength += (partitionHashCodes != null) ? (4 * partitionHashCodes.length) : 0;
-		
-		totalLength += (2 * indexValues.length);
-		for (int i = 0; i < indexValues.length; ++i) {
-			final byte[] value = indexValues[i];
-			totalLength += value.length;
-		}
-		if (tagMap != null && (!tagMap.isEmpty())) {
-			totalLength += tagMap.size() * 8;
-		}
-		
-		int offset = 0;
-		final byte[] rowkey = new byte[totalLength];
-		
-		// 1. set prefix
-		ByteUtil.intToBytes(prefixHashCode, rowkey, offset);
-		offset += 4;
-		
-		// 2. set partition
-		if (partitionHashCodes != null) {
-			for (Integer partitionHashCode : partitionHashCodes) {
-				ByteUtil.intToBytes(partitionHashCode, rowkey, offset);
-				offset += 4;
-			}
-		}
-		
-		// 3. set index values
-		for (int i = 0; i < columns.length; ++i) {
-			ByteUtil.shortToBytes((short)indexValues[i].length, rowkey, offset);
-			offset += 2;
-			for (int j = 0; j < indexValues[i].length; ++j) {
-				rowkey[offset++] = indexValues[i][j];
-			}
-		}
-		
-		// Check if it's non clustered index, then set the tag/value hash code
-		if (tagMap != null && (!tagMap.isEmpty())) {
-			// 4. set tag key/value hashes
-			for (Map.Entry<Integer, Integer> entry : tagMap.entrySet()) {
-				ByteUtil.intToBytes(entry.getKey(), rowkey, offset);
-				offset += 4;
-				ByteUtil.intToBytes(entry.getValue(), rowkey, offset);
-				offset += 4;
-			}
-		}
-		
-		return rowkey;
-	}
+    private static final String FIELD_NAME_PATTERN_STRING = "^@(.*)$";
+    private static final Pattern FIELD_NAME_PATTERN = Pattern.compile(FIELD_NAME_PATTERN_STRING);
+    private static final Logger LOG = LoggerFactory.getLogger(IndexDefinition.class);
 
-	private int[] generatePartitionHashCodes(TaggedLogAPIEntity entity) {
-		final String[] partitions = entityDef.getPartitions();
-		int[] result = null;
-		if (partitions != null) {
-			result = new int[partitions.length];
-			final Map<String, String> tags = entity.getTags();
-			for (int i = 0 ; i < partitions.length; ++i) {
-				final String partition = partitions[i];
-				final String tagValue = tags.get(partition);
-				if (tagValue != null) {
-					result[i] = tagValue.hashCode();
-				} else {
-					result[i] = EMPTY_PARTITION_DEFAULT_HASH_CODE;
-				}
-			}
-		}
-		return result;
-	}
+    public IndexDefinition(EntityDefinition entityDef, Index index) {
+        this.entityDef = entityDef;
+        this.index = index;
+        this.indexPrefix = entityDef.getPrefix() + "_" + index.name();
+        final String[] indexColumns = index.columns();
+        this.columns = new IndexColumn[indexColumns.length];
+        for (int i = 0; i < indexColumns.length; ++i) {
+            final String name = indexColumns[i];
+            final boolean isTag = entityDef.isTag(name);
+            final Qualifier qualifier = isTag ? null : entityDef.getDisplayNameMap().get(name);
+            columns[i] = new IndexColumn(name, isTag, qualifier);
+        }
+        LOG.info("Created index " + index.name() + " for " + entityDef.getEntityClass().getSimpleName());
+    }
 
-	private byte[][] generateIndexValues(TaggedLogAPIEntity entity) throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
+    public EntityDefinition getEntityDefinition() {
+        return entityDef;
+    }
 
-		final byte[][] result = new byte[columns.length][];
-		for (int i = 0; i < columns.length; ++i) {
-			final IndexColumn column = columns[i];
-			final String columnName = column.getColumnName();
-			if (column.isTag) {
-				final Map<String, String> tags = entity.getTags();
-				if (tags == null || tags.get(columnName) == null) {
-					result[i] = EMPTY_VALUE;
-				} else {
-					result[i] = tags.get(columnName).getBytes(UTF_8_CHARSET);
-				}
-			} else {
-				PropertyDescriptor pd = column.getPropertyDescriptor();
-				if (pd == null) {
-					pd = PropertyUtils.getPropertyDescriptor(entity, columnName);
-					column.setPropertyDescriptor(pd);
-				}
-				final Object value = pd.getReadMethod().invoke(entity);
-				if (value == null) {
-					result[i] = EMPTY_VALUE;
-				} else {
-					final Qualifier q = column.getQualifier();
-					result[i] = q.getSerDeser().serialize(value);
-				}
-			}
-			if (result[i].length > MAX_INDEX_VALUE_BYTE_LENGTH) {
-				throw new IllegalArgumentException("Index field value exceeded the max length: " + MAX_INDEX_VALUE_BYTE_LENGTH + ", actual length: " + result[i].length);
-			}
-		}
-		return result;
-	}
-	
-	/**
-	 * Index column definition class
-	 *
-	 */
-	public static class IndexColumn {
-		private final String columnName;
-		private final boolean isTag;
-		private final Qualifier qualifier;
-		private PropertyDescriptor propertyDescriptor;
-		
-		public IndexColumn(String columnName, boolean isTag, Qualifier qualifier) {
-			this.columnName = columnName;
-			this.isTag = isTag;
-			this.qualifier = qualifier;
-		}
-		
-		public String getColumnName() {
-			return columnName;
-		}
-		public boolean isTag() {
-			return isTag;
-		}
-		
-		public Qualifier getQualifier() {
-			return qualifier;
-		}
+    public Index getIndex() {
+        return index;
+    }
 
-		public PropertyDescriptor getPropertyDescriptor() {
-			return propertyDescriptor;
-		}
+    public String getIndexName() {
+        return index.name();
+    }
 
-		public void setPropertyDescriptor(PropertyDescriptor propertyDescriptor) {
-			this.propertyDescriptor = propertyDescriptor;
-		}
-		
-	}
+    public IndexColumn[] getIndexColumns() {
+        return columns;
+    }
+
+    public String getIndexPrefix() {
+        return indexPrefix;
+    }
+
+    public boolean isUnique() {
+        return index.unique();
+    }
+
+    /**
+     * Check if the query is suitable to go through index. If true, then return the value of index fields in
+     * order. Otherwise return null. TODO: currently index fields should be string type.
+     * 
+     * @param query query expression after re-write
+     * @param rowkeys if the query can go through the index, all rowkeys will be added into rowkeys.
+     * @return true if the query can go through the index, otherwise return false
+     */
+    public IndexType canGoThroughIndex(ORExpression query, List<byte[]> rowkeys) {
+        if (query == null || query.getANDExprList() == null || query.getANDExprList().isEmpty()) {
+            return IndexType.NON_CLUSTER_INDEX;
+        }
+        if (rowkeys != null) {
+            rowkeys.clear();
+        }
+        final Map<String, String> indexfieldMap = new HashMap<String, String>();
+        for (ANDExpression andExpr : query.getANDExprList()) {
+            indexfieldMap.clear();
+            for (AtomicExpression ae : andExpr.getAtomicExprList()) {
+                // TODO temporarily ignore those fields which are not for attributes
+                final String fieldName = parseEntityAttribute(ae.getKey());
+                if (fieldName != null && ComparisonOperator.EQUAL.equals(ae.getOp())) {
+                    indexfieldMap.put(fieldName, ae.getValue());
+                }
+            }
+            final String[] partitions = entityDef.getPartitions();
+            int[] partitionValueHashs = null;
+            if (partitions != null) {
+                partitionValueHashs = new int[partitions.length];
+                for (int i = 0; i < partitions.length; ++i) {
+                    final String value = indexfieldMap.get(partitions[i]);
+                    if (value == null) {
+                        throw new IllegalArgumentException("Partition " + partitions[i]
+                                                           + " is not defined in the query: "
+                                                           + query.toString());
+                    }
+                    partitionValueHashs[i] = value.hashCode();
+                }
+            }
+            final byte[][] indexFieldValues = new byte[columns.length][];
+            for (int i = 0; i < columns.length; ++i) {
+                final IndexColumn col = columns[i];
+                if (!indexfieldMap.containsKey(col.getColumnName())) {
+                    // If we have to use scan anyway, there's no need to go through index
+                    return IndexType.NON_INDEX;
+                }
+                final String value = indexfieldMap.get(col.getColumnName());
+                indexFieldValues[i] = value.getBytes();
+            }
+            final byte[] rowkey = generateUniqueIndexRowkey(indexFieldValues, partitionValueHashs, null);
+            if (rowkeys != null) {
+                rowkeys.add(rowkey);
+            }
+        }
+        if (index.unique()) {
+            return IndexType.UNIQUE_INDEX;
+        }
+        return IndexType.NON_CLUSTER_INDEX;
+    }
+
+    private String parseEntityAttribute(String fieldName) {
+        Matcher m = FIELD_NAME_PATTERN.matcher(fieldName);
+        if (m.find()) {
+            return m.group(1);
+        }
+        return null;
+    }
+
+    // TODO: We should move index rowkey generation later since this class is for general purpose, not only
+    // for hbase.
+    public byte[] generateIndexRowkey(TaggedLogAPIEntity entity)
+        throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
+        if (entity.getClass() != entityDef.getEntityClass()) {
+            throw new IllegalArgumentException("Expected entity class: "
+                                               + entityDef.getEntityClass().getName() + ", but got class "
+                                               + entity.getClass().getName());
+        }
+        final byte[][] indexValues = generateIndexValues(entity);
+        final int[] partitionHashCodes = generatePartitionHashCodes(entity);
+        SortedMap<Integer, Integer> tagMap = null;
+        if (!index.unique()) {
+            // non cluster index
+            tagMap = RowkeyBuilder.generateSortedTagMap(entityDef.getPartitions(), entity.getTags());
+        }
+
+        return generateUniqueIndexRowkey(indexValues, partitionHashCodes, tagMap);
+    }
+
+    private byte[] generateUniqueIndexRowkey(byte[][] indexValues, int[] partitionHashCodes,
+                                             SortedMap<Integer, Integer> tagMap) {
+        final int prefixHashCode = indexPrefix.hashCode();
+        int totalLength = 4;
+        totalLength += (partitionHashCodes != null) ? (4 * partitionHashCodes.length) : 0;
+
+        totalLength += (2 * indexValues.length);
+        for (int i = 0; i < indexValues.length; ++i) {
+            final byte[] value = indexValues[i];
+            totalLength += value.length;
+        }
+        if (tagMap != null && (!tagMap.isEmpty())) {
+            totalLength += tagMap.size() * 8;
+        }
+
+        int offset = 0;
+        final byte[] rowkey = new byte[totalLength];
+
+        // 1. set prefix
+        ByteUtil.intToBytes(prefixHashCode, rowkey, offset);
+        offset += 4;
+
+        // 2. set partition
+        if (partitionHashCodes != null) {
+            for (Integer partitionHashCode : partitionHashCodes) {
+                ByteUtil.intToBytes(partitionHashCode, rowkey, offset);
+                offset += 4;
+            }
+        }
+
+        // 3. set index values
+        for (int i = 0; i < columns.length; ++i) {
+            ByteUtil.shortToBytes((short)indexValues[i].length, rowkey, offset);
+            offset += 2;
+            for (int j = 0; j < indexValues[i].length; ++j) {
+                rowkey[offset++] = indexValues[i][j];
+            }
+        }
+
+        // Check if it's non clustered index, then set the tag/value hash code
+        if (tagMap != null && (!tagMap.isEmpty())) {
+            // 4. set tag key/value hashes
+            for (Map.Entry<Integer, Integer> entry : tagMap.entrySet()) {
+                ByteUtil.intToBytes(entry.getKey(), rowkey, offset);
+                offset += 4;
+                ByteUtil.intToBytes(entry.getValue(), rowkey, offset);
+                offset += 4;
+            }
+        }
+
+        return rowkey;
+    }
+
+    private int[] generatePartitionHashCodes(TaggedLogAPIEntity entity) {
+        final String[] partitions = entityDef.getPartitions();
+        int[] result = null;
+        if (partitions != null) {
+            result = new int[partitions.length];
+            final Map<String, String> tags = entity.getTags();
+            for (int i = 0; i < partitions.length; ++i) {
+                final String partition = partitions[i];
+                final String tagValue = tags.get(partition);
+                if (tagValue != null) {
+                    result[i] = tagValue.hashCode();
+                } else {
+                    result[i] = EMPTY_PARTITION_DEFAULT_HASH_CODE;
+                }
+            }
+        }
+        return result;
+    }
+
+    private byte[][] generateIndexValues(TaggedLogAPIEntity entity)
+        throws IllegalAccessException, InvocationTargetException, NoSuchMethodException {
+
+        final byte[][] result = new byte[columns.length][];
+        for (int i = 0; i < columns.length; ++i) {
+            final IndexColumn column = columns[i];
+            final String columnName = column.getColumnName();
+            if (column.isTag) {
+                final Map<String, String> tags = entity.getTags();
+                if (tags == null || tags.get(columnName) == null) {
+                    result[i] = EMPTY_VALUE;
+                } else {
+                    result[i] = tags.get(columnName).getBytes(UTF_8_CHARSET);
+                }
+            } else {
+                PropertyDescriptor pd = column.getPropertyDescriptor();
+                if (pd == null) {
+                    pd = PropertyUtils.getPropertyDescriptor(entity, columnName);
+                    column.setPropertyDescriptor(pd);
+                }
+                final Object value = pd.getReadMethod().invoke(entity);
+                if (value == null) {
+                    result[i] = EMPTY_VALUE;
+                } else {
+                    final Qualifier q = column.getQualifier();
+                    result[i] = q.getSerDeser().serialize(value);
+                }
+            }
+            if (result[i].length > MAX_INDEX_VALUE_BYTE_LENGTH) {
+                throw new IllegalArgumentException("Index field value exceeded the max length: "
+                                                   + MAX_INDEX_VALUE_BYTE_LENGTH + ", actual length: "
+                                                   + result[i].length);
+            }
+        }
+        return result;
+    }
+
+    /**
+     * Index column definition class
+     */
+    public static class IndexColumn {
+        private final String columnName;
+        private final boolean isTag;
+        private final Qualifier qualifier;
+        private PropertyDescriptor propertyDescriptor;
+
+        public IndexColumn(String columnName, boolean isTag, Qualifier qualifier) {
+            this.columnName = columnName;
+            this.isTag = isTag;
+            this.qualifier = qualifier;
+        }
+
+        public String getColumnName() {
+            return columnName;
+        }
+
+        public boolean isTag() {
+            return isTag;
+        }
+
+        public Qualifier getQualifier() {
+            return qualifier;
+        }
+
+        public PropertyDescriptor getPropertyDescriptor() {
+            return propertyDescriptor;
+        }
+
+        public void setPropertyDescriptor(PropertyDescriptor propertyDescriptor) {
+            this.propertyDescriptor = propertyDescriptor;
+        }
+
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java
index 3c82a0a..b8ada4a 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Indexes.java
@@ -21,9 +21,11 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Indexes {
 
-	public Index[] value();
+    public Index[] value();
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java
index 8831223..3daf4a1 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntArraySerDeser.java
@@ -18,54 +18,56 @@
 
 import org.apache.eagle.common.ByteUtil;
 
-/**
- * serialize int array which is stored like the following
- * <int><int>*size, where the first <int> is the size of int
+/*
+ * serialize int array which is stored like the following <int><int>*size, where the first entry is the size
+ * of int
  */
-public class IntArraySerDeser implements EntitySerDeser<int[]>{
+public class IntArraySerDeser implements EntitySerDeser<int[]> {
 
-	public IntArraySerDeser(){}
+    public IntArraySerDeser() {
+    }
 
-	@Override
-	public int[] deserialize(byte[] bytes){
-		if(bytes.length < 4)
-			return null;
-		int offset = 0;
-		// get size of int array
-		int size = ByteUtil.bytesToInt(bytes, offset);
-		offset += 4;
-		int[] values = new int[size];
-		for(int i=0; i<size; i++){
-			values[i] = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-		}
-		return values;
-	}
-	
-	/**
-	 * 
-	 * @param obj
-	 * @return
-	 */
-	@Override
-	public byte[] serialize(int[] obj){
-		if(obj == null)
-			return null;
-		int size = obj.length;
-		byte[] array = new byte[4 + 4*size];
-		byte[] first = ByteUtil.intToBytes(size);
-		int offset = 0;
-		System.arraycopy(first, 0, array, offset, first.length);
-		offset += first.length;
-		for(int i=0; i<size; i++){
-			System.arraycopy(ByteUtil.intToBytes(obj[i]), 0, array, offset, 4);
-			offset += 4;
-		}
-		return array;
-	}
+    @Override
+    public int[] deserialize(byte[] bytes) {
+        if (bytes.length < 4) {
+            return null;
+        }
+        int offset = 0;
+        // get size of int array
+        int size = ByteUtil.bytesToInt(bytes, offset);
+        offset += 4;
+        int[] values = new int[size];
+        for (int i = 0; i < size; i++) {
+            values[i] = ByteUtil.bytesToInt(bytes, offset);
+            offset += 4;
+        }
+        return values;
+    }
 
-	@Override
-	public Class<int[]> type() {
-		return int[].class;
-	}
+    /**
+     * @param obj
+     * @return
+     */
+    @Override
+    public byte[] serialize(int[] obj) {
+        if (obj == null) {
+            return null;
+        }
+        int size = obj.length;
+        byte[] array = new byte[4 + 4 * size];
+        byte[] first = ByteUtil.intToBytes(size);
+        int offset = 0;
+        System.arraycopy(first, 0, array, offset, first.length);
+        offset += first.length;
+        for (int i = 0; i < size; i++) {
+            System.arraycopy(ByteUtil.intToBytes(obj[i]), 0, array, offset, 4);
+            offset += 4;
+        }
+        return array;
+    }
+
+    @Override
+    public Class<int[]> type() {
+        return int[].class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java
index 695badd..8353499 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/IntSerDeser.java
@@ -18,25 +18,28 @@
 
 import org.apache.eagle.common.ByteUtil;
 
-public class IntSerDeser implements EntitySerDeser<Integer>{
-	public IntSerDeser(){}
+public class IntSerDeser implements EntitySerDeser<Integer> {
+    public IntSerDeser() {
+    }
 
-	@Override
-	public Integer deserialize(byte[] bytes){
-		if(bytes.length < 4)
-			return null;
-		return Integer.valueOf(ByteUtil.bytesToInt(bytes));
-	}
-	
-	@Override
-	public byte[] serialize(Integer obj){
-		if(obj == null)
-			return null;
-		return ByteUtil.intToBytes(obj);
-	}
+    @Override
+    public Integer deserialize(byte[] bytes) {
+        if (bytes.length < 4) {
+            return null;
+        }
+        return Integer.valueOf(ByteUtil.bytesToInt(bytes));
+    }
 
-	@Override
-	public Class<Integer> type() {
-		return Integer.class;
-	}
+    @Override
+    public byte[] serialize(Integer obj) {
+        if (obj == null) {
+            return null;
+        }
+        return ByteUtil.intToBytes(obj);
+    }
+
+    @Override
+    public Class<Integer> type() {
+        return Integer.class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java
index eaf5e92..b77f3ff 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ListSerDeser.java
@@ -25,104 +25,104 @@
 
 /**
  * Serialization/deserialization for map type
- *
  */
 @SuppressWarnings("rawtypes")
 public class ListSerDeser implements EntitySerDeser<List> {
 
-	@SuppressWarnings({ "unchecked" })
-	@Override
-	public List deserialize(byte[] bytes) {
-		if (bytes == null || bytes.length == 0) {
-			return null;
-		}
-		final List list = new ArrayList();
-		int offset = 0;
-		// get size of int array
-		final int size = ByteUtil.bytesToInt(bytes, offset);
-		offset += 4;
-		
-		for (int i = 0; i < size; ++i) {
-			final int valueID = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-			final Class<?> valueClass = EntityDefinitionManager.getClassByID(valueID);
-			if (valueClass == null) {
-				throw new IllegalArgumentException("Unsupported value type ID: " + valueID);
-			}
-			final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
-			final int valueLength = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-			final byte[] valueContent = new byte[valueLength];
-			System.arraycopy(bytes, offset, valueContent, 0, valueLength);
-			offset += valueLength;
-			final Object value = valueSerDer.deserialize(valueContent);
-			
-			list.add(value);
-		}
-		return list;
-	}
+    @SuppressWarnings({
+        "unchecked"
+        })
+    @Override
+    public List deserialize(byte[] bytes) {
+        if (bytes == null || bytes.length == 0) {
+            return null;
+        }
+        final List list = new ArrayList();
+        int offset = 0;
+        // get size of int array
+        final int size = ByteUtil.bytesToInt(bytes, offset);
+        offset += 4;
 
-	/**
-	 *  size + value1 type id + value length + value1 binary content + ...
-	 *   4B         4B              4B              value1 bytes
-	 */
-	@SuppressWarnings({ "unchecked" })
-	@Override
-	public byte[] serialize(List list) {
-		if(list == null)
-			return null;
-		final int size = list.size();
-		final int[] valueIDs = new int[size];
-		final byte[][] valueBytes = new byte[size][];
-		
-		int totalSize = 4 + size * 8;
-		int i = 0;
-		Iterator iter = list.iterator();
-		while (iter.hasNext()) {
-			final Object value = iter.next();
-			Class<?> valueClass = value.getClass();
-			int valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
+        for (int i = 0; i < size; ++i) {
+            final int valueID = ByteUtil.bytesToInt(bytes, offset);
+            offset += 4;
+            final Class<?> valueClass = EntityDefinitionManager.getClassByID(valueID);
+            if (valueClass == null) {
+                throw new IllegalArgumentException("Unsupported value type ID: " + valueID);
+            }
+            final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
+            final int valueLength = ByteUtil.bytesToInt(bytes, offset);
+            offset += 4;
+            final byte[] valueContent = new byte[valueLength];
+            System.arraycopy(bytes, offset, valueContent, 0, valueLength);
+            offset += valueLength;
+            final Object value = valueSerDer.deserialize(valueContent);
 
-			if (valueTypeID == -1) {
-				if (value instanceof List) {
-					valueClass = List.class;
-					valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
-				}
-				else if (value instanceof Map) {
-					valueClass = Map.class;
-					valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
-				}
-				else {
-					throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
-				}
-			}
-			valueIDs[i] = valueTypeID;
-			final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
-			if (valueSerDer == null) {
-				throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
-			}
-			valueBytes[i] = valueSerDer.serialize(value);
-			totalSize += valueBytes[i].length;
-			++i;
-		}
-		final byte[] result = new byte[totalSize];
-		int offset = 0;
-		ByteUtil.intToBytes(size, result, offset);
-		offset += 4;
-		for (i = 0; i < size; ++i) {			
-			ByteUtil.intToBytes(valueIDs[i], result, offset);
-			offset += 4;
-			ByteUtil.intToBytes(valueBytes[i].length, result, offset);
-			offset += 4;
-			System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length);
-			offset += valueBytes[i].length;
-		}
-		return result;
-	}
+            list.add(value);
+        }
+        return list;
+    }
 
-	@Override
-	public Class<List> type() {
-		return List.class;
-	}
+    /**
+     * size + value1 type id + value length + value1 binary content + ... 4B 4B 4B value1 bytes
+     */
+    @SuppressWarnings({
+        "unchecked"
+        })
+    @Override
+    public byte[] serialize(List list) {
+        if (list == null) {
+            return null;
+        }
+        final int size = list.size();
+        final int[] valueIDs = new int[size];
+        final byte[][] valueBytes = new byte[size][];
+
+        int totalSize = 4 + size * 8;
+        int i = 0;
+        Iterator iter = list.iterator();
+        while (iter.hasNext()) {
+            final Object value = iter.next();
+            Class<?> valueClass = value.getClass();
+            int valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
+
+            if (valueTypeID == -1) {
+                if (value instanceof List) {
+                    valueClass = List.class;
+                    valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
+                } else if (value instanceof Map) {
+                    valueClass = Map.class;
+                    valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
+                } else {
+                    throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
+                }
+            }
+            valueIDs[i] = valueTypeID;
+            final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
+            if (valueSerDer == null) {
+                throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
+            }
+            valueBytes[i] = valueSerDer.serialize(value);
+            totalSize += valueBytes[i].length;
+            ++i;
+        }
+        final byte[] result = new byte[totalSize];
+        int offset = 0;
+        ByteUtil.intToBytes(size, result, offset);
+        offset += 4;
+        for (i = 0; i < size; ++i) {
+            ByteUtil.intToBytes(valueIDs[i], result, offset);
+            offset += 4;
+            ByteUtil.intToBytes(valueBytes[i].length, result, offset);
+            offset += 4;
+            System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length);
+            offset += valueBytes[i].length;
+        }
+        return result;
+    }
+
+    @Override
+    public Class<List> type() {
+        return List.class;
+    }
 }
-
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java
index 914cd95..6f0c6ab 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/LongSerDeser.java
@@ -18,26 +18,29 @@
 
 import org.apache.eagle.common.ByteUtil;
 
-public class LongSerDeser implements EntitySerDeser<Long>{
-	public LongSerDeser(){}
+public class LongSerDeser implements EntitySerDeser<Long> {
+    public LongSerDeser() {
+    }
 
-	@Override
-	public Long deserialize(byte[] bytes){
-		if(bytes.length < 8)
-			return null;
-//		return new Long(ByteUtil.bytesToLong(bytes));
-		return Long.valueOf(ByteUtil.bytesToLong(bytes));
-	}
-	
-	@Override
-	public byte[] serialize(Long obj){
-		if(obj == null)
-			return null;
-		return ByteUtil.longToBytes(obj);
-	}
+    @Override
+    public Long deserialize(byte[] bytes) {
+        if (bytes.length < 8) {
+            return null;
+        }
+        // return new Long(ByteUtil.bytesToLong(bytes));
+        return Long.valueOf(ByteUtil.bytesToLong(bytes));
+    }
 
-	@Override
-	public Class<Long> type() {
-		return Long.class;
-	}
+    @Override
+    public byte[] serialize(Long obj) {
+        if (obj == null) {
+            return null;
+        }
+        return ByteUtil.longToBytes(obj);
+    }
+
+    @Override
+    public Class<Long> type() {
+        return Long.class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java
index d16fe3a..bb9889e 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MapSerDeser.java
@@ -24,145 +24,148 @@
 
 /**
  * Serialization/deserialization for map type
- *
  */
 @SuppressWarnings("rawtypes")
 public class MapSerDeser implements EntitySerDeser<Map> {
 
-	@SuppressWarnings({ "unchecked" })
-	@Override
-	public Map deserialize(byte[] bytes) {
-		if (bytes == null || bytes.length == 0) {
-			return null;
-		}
-		final Map map = new TreeMap();
-		int offset = 0;
-		// get size of int array
-		final int size = ByteUtil.bytesToInt(bytes, offset);
-		offset += 4;
-		
-		for (int i = 0; i < size; ++i) {
-			final int keyID = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-			final Class<?> keyClass = EntityDefinitionManager.getClassByID(keyID);
-			if (keyClass == null) {
-				throw new IllegalArgumentException("Unsupported key type ID: " + keyID);
-			}
-			final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass);
-			final int keyLength = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-			final byte[] keyContent = new byte[keyLength];
-			System.arraycopy(bytes, offset, keyContent, 0, keyLength);
-			offset += keyLength;
-			final Object key = keySerDer.deserialize(keyContent);
-			
-			final int valueID = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-			final Class<?> valueClass = EntityDefinitionManager.getClassByID(valueID);
-			if (valueClass == null) {
-				throw new IllegalArgumentException("Unsupported value type ID: " + valueID);
-			}
-			final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
-			final int valueLength = ByteUtil.bytesToInt(bytes, offset);
-			offset += 4;
-			final byte[] valueContent = new byte[valueLength];
-			System.arraycopy(bytes, offset, valueContent, 0, valueLength);
-			offset += valueLength;
-			final Object value = valueSerDer.deserialize(valueContent);
-			
-			map.put(key, value);
-		}
-		return map;
-	}
+    @SuppressWarnings({
+        "unchecked"
+        })
+    @Override
+    public Map deserialize(byte[] bytes) {
+        if (bytes == null || bytes.length == 0) {
+            return null;
+        }
+        final Map map = new TreeMap();
+        int offset = 0;
+        // get size of int array
+        final int size = ByteUtil.bytesToInt(bytes, offset);
+        offset += 4;
 
-	/**
-	 *  size + key1 type ID + key1 length + key1 binary content + value1 type id + value length + value1 binary content + ...
-	 *   4B        4B             4B             key1 bytes            4B              4B              value1 bytes
-	 */
-	@SuppressWarnings({ "unchecked" })
-	@Override
-	public byte[] serialize(Map map) {
-		if(map == null)
-			return null;
-		final int size = map.size();
-		final int[] keyIDs = new int[size];
-		final int[] valueIDs = new int[size];
-		final byte[][] keyBytes = new byte[size][];
-		final byte[][] valueBytes = new byte[size][];
-		
-		int totalSize = 4 + size * 16;
-		int i = 0;
-		Iterator iter = map.entrySet().iterator();
-		while (iter.hasNext()) {
-			final Map.Entry entry = (Map.Entry)iter.next();
-			final Object key = entry.getKey();
-			final Object value = entry.getValue();
-			Class<?> keyClass = key.getClass();
-			Class<?> valueClass = NullObject.class;
-			if (value != null) {
-				valueClass = value.getClass();
-			}
-			int keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass);
-			int valueTypeID = 0; // default null object
-			if (valueClass != null) {
-				valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
-			}
-			if (keyTypeID == -1) {
-				if (key instanceof Map) {
-					keyClass = Map.class;
-					keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass);
-				} else {
-					throw new IllegalArgumentException("Unsupported class: " + keyClass.getName());
-				}
-			}
-			if (valueTypeID == -1) {
-				if (value instanceof Map) {
-					valueClass = Map.class;
-					valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
-				} else {
-					throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
-				}
-			}
-			keyIDs[i] = keyTypeID;
-			valueIDs[i] = valueTypeID;
-			final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass);
-			final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
-			if (keySerDer == null) {
-				throw new IllegalArgumentException("Unsupported class: " + keyClass.getName());
-			}
-			if (valueSerDer == null) {
-				throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
-			}
-			keyBytes[i] = keySerDer.serialize(key);
-			valueBytes[i] = valueSerDer.serialize(value);
-			totalSize += keyBytes[i].length + valueBytes[i].length;
-			++i;
-		}
-		final byte[] result = new byte[totalSize];
-		int offset = 0;
-		ByteUtil.intToBytes(size, result, offset);
-		offset += 4;
-		for (i = 0; i < size; ++i) {
-			ByteUtil.intToBytes(keyIDs[i], result, offset);
-			offset += 4;
-			ByteUtil.intToBytes(keyBytes[i].length, result, offset);
-			offset += 4;
-			System.arraycopy(keyBytes[i], 0, result, offset, keyBytes[i].length);
-			offset += keyBytes[i].length;
-			
-			ByteUtil.intToBytes(valueIDs[i], result, offset);
-			offset += 4;
-			ByteUtil.intToBytes(valueBytes[i].length, result, offset);
-			offset += 4;
-			System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length);
-			offset += valueBytes[i].length;
-		}
-		return result;
-	}
+        for (int i = 0; i < size; ++i) {
+            final int keyID = ByteUtil.bytesToInt(bytes, offset);
+            offset += 4;
+            final Class<?> keyClass = EntityDefinitionManager.getClassByID(keyID);
+            if (keyClass == null) {
+                throw new IllegalArgumentException("Unsupported key type ID: " + keyID);
+            }
+            final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass);
+            final int keyLength = ByteUtil.bytesToInt(bytes, offset);
+            offset += 4;
+            final byte[] keyContent = new byte[keyLength];
+            System.arraycopy(bytes, offset, keyContent, 0, keyLength);
+            offset += keyLength;
+            final Object key = keySerDer.deserialize(keyContent);
 
-	@Override
-	public Class<Map> type() {
-		return Map.class;
-	}
+            final int valueID = ByteUtil.bytesToInt(bytes, offset);
+            offset += 4;
+            final Class<?> valueClass = EntityDefinitionManager.getClassByID(valueID);
+            if (valueClass == null) {
+                throw new IllegalArgumentException("Unsupported value type ID: " + valueID);
+            }
+            final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
+            final int valueLength = ByteUtil.bytesToInt(bytes, offset);
+            offset += 4;
+            final byte[] valueContent = new byte[valueLength];
+            System.arraycopy(bytes, offset, valueContent, 0, valueLength);
+            offset += valueLength;
+            final Object value = valueSerDer.deserialize(valueContent);
+
+            map.put(key, value);
+        }
+        return map;
+    }
+
+    /**
+     * size + key1 type ID + key1 length + key1 binary content + value1 type id + value length + value1 binary
+     * content + ... 4B 4B 4B key1 bytes 4B 4B value1 bytes
+     */
+    @SuppressWarnings({
+        "unchecked"
+        })
+    @Override
+    public byte[] serialize(Map map) {
+        if (map == null) {
+            return null;
+        }
+        final int size = map.size();
+        final int[] keyIDs = new int[size];
+        final int[] valueIDs = new int[size];
+        final byte[][] keyBytes = new byte[size][];
+        final byte[][] valueBytes = new byte[size][];
+
+        int totalSize = 4 + size * 16;
+        int i = 0;
+        Iterator iter = map.entrySet().iterator();
+        while (iter.hasNext()) {
+            final Map.Entry entry = (Map.Entry)iter.next();
+            final Object key = entry.getKey();
+            final Object value = entry.getValue();
+            Class<?> keyClass = key.getClass();
+            Class<?> valueClass = NullObject.class;
+            if (value != null) {
+                valueClass = value.getClass();
+            }
+            int keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass);
+            int valueTypeID = 0; // default null object
+            if (valueClass != null) {
+                valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
+            }
+            if (keyTypeID == -1) {
+                if (key instanceof Map) {
+                    keyClass = Map.class;
+                    keyTypeID = EntityDefinitionManager.getIDBySerDerClass(keyClass);
+                } else {
+                    throw new IllegalArgumentException("Unsupported class: " + keyClass.getName());
+                }
+            }
+            if (valueTypeID == -1) {
+                if (value instanceof Map) {
+                    valueClass = Map.class;
+                    valueTypeID = EntityDefinitionManager.getIDBySerDerClass(valueClass);
+                } else {
+                    throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
+                }
+            }
+            keyIDs[i] = keyTypeID;
+            valueIDs[i] = valueTypeID;
+            final EntitySerDeser keySerDer = EntityDefinitionManager.getSerDeser(keyClass);
+            final EntitySerDeser valueSerDer = EntityDefinitionManager.getSerDeser(valueClass);
+            if (keySerDer == null) {
+                throw new IllegalArgumentException("Unsupported class: " + keyClass.getName());
+            }
+            if (valueSerDer == null) {
+                throw new IllegalArgumentException("Unsupported class: " + valueClass.getName());
+            }
+            keyBytes[i] = keySerDer.serialize(key);
+            valueBytes[i] = valueSerDer.serialize(value);
+            totalSize += keyBytes[i].length + valueBytes[i].length;
+            ++i;
+        }
+        final byte[] result = new byte[totalSize];
+        int offset = 0;
+        ByteUtil.intToBytes(size, result, offset);
+        offset += 4;
+        for (i = 0; i < size; ++i) {
+            ByteUtil.intToBytes(keyIDs[i], result, offset);
+            offset += 4;
+            ByteUtil.intToBytes(keyBytes[i].length, result, offset);
+            offset += 4;
+            System.arraycopy(keyBytes[i], 0, result, offset, keyBytes[i].length);
+            offset += keyBytes[i].length;
+
+            ByteUtil.intToBytes(valueIDs[i], result, offset);
+            offset += 4;
+            ByteUtil.intToBytes(valueBytes[i].length, result, offset);
+            offset += 4;
+            System.arraycopy(valueBytes[i], 0, result, offset, valueBytes[i].length);
+            offset += valueBytes[i].length;
+        }
+        return result;
+    }
+
+    @Override
+    public Class<Map> type() {
+        return Map.class;
+    }
 }
-
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java
index 0e3e776..5f5bb6e 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Metric.java
@@ -21,9 +21,11 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Metric {
-	// interval with million seconds
-	long interval() default 60000;
+    // interval with million seconds
+    long interval() default 60000;
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java
index 06bbed3..4b23ea9 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/MetricDefinition.java
@@ -25,44 +25,54 @@
 import java.io.IOException;
 
 public class MetricDefinition implements Writable {
-	private final static Logger LOG = LoggerFactory.getLogger(MetricDefinition.class);
-	private long interval;
-	private Class<?> singleTimestampEntityClass;
-	public long getInterval() {
-		return interval;
-	}
-	public void setInterval(long interval) {
-		this.interval = interval;
-	}
-	public Class<?> getSingleTimestampEntityClass() {
-		return singleTimestampEntityClass;
-	}
-	public void setSingleTimestampEntityClass(Class<?> singleTimestampEntityClass) {
-		this.singleTimestampEntityClass = singleTimestampEntityClass;
-	}
+    private static final Logger LOG = LoggerFactory.getLogger(MetricDefinition.class);
+    private long interval;
+    private Class<?> singleTimestampEntityClass;
 
-	private final static String EMPTY="";
-	@Override
-	public void write(DataOutput out) throws IOException {
-		if(LOG.isDebugEnabled()) LOG.debug("Writing metric definition: interval = "+interval+" singleTimestampEntityClass = "+ this.singleTimestampEntityClass);
-		out.writeLong(interval);
-		if(this.singleTimestampEntityClass == null){
-			out.writeUTF(EMPTY);
-		}else {
-			out.writeUTF(this.singleTimestampEntityClass.getName());
-		}
-	}
+    public long getInterval() {
+        return interval;
+    }
 
-	@Override
-	public void readFields(DataInput in) throws IOException {
-		interval = in.readLong();
-		String singleTimestampEntityClassName = in.readUTF();
-		if(!EMPTY.equals(singleTimestampEntityClassName)) {
-			try {
-				this.singleTimestampEntityClass = Class.forName(singleTimestampEntityClassName);
-			} catch (ClassNotFoundException e) {
-				if(LOG.isDebugEnabled()) LOG.warn("Class " + singleTimestampEntityClassName + " not found ");
-			}
-		}
-	}
+    public void setInterval(long interval) {
+        this.interval = interval;
+    }
+
+    public Class<?> getSingleTimestampEntityClass() {
+        return singleTimestampEntityClass;
+    }
+
+    public void setSingleTimestampEntityClass(Class<?> singleTimestampEntityClass) {
+        this.singleTimestampEntityClass = singleTimestampEntityClass;
+    }
+
+    private static final String EMPTY = "";
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Writing metric definition: interval = " + interval + " singleTimestampEntityClass = "
+                      + this.singleTimestampEntityClass);
+        }
+        out.writeLong(interval);
+        if (this.singleTimestampEntityClass == null) {
+            out.writeUTF(EMPTY);
+        } else {
+            out.writeUTF(this.singleTimestampEntityClass.getName());
+        }
+    }
+
+    @Override
+    public void readFields(DataInput in) throws IOException {
+        interval = in.readLong();
+        String singleTimestampEntityClassName = in.readUTF();
+        if (!EMPTY.equals(singleTimestampEntityClassName)) {
+            try {
+                this.singleTimestampEntityClass = Class.forName(singleTimestampEntityClassName);
+            } catch (ClassNotFoundException e) {
+                if (LOG.isDebugEnabled()) {
+                    LOG.warn("Class " + singleTimestampEntityClassName + " not found ");
+                }
+            }
+        }
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java
index 9fb05a3..01536f1 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndex.java
@@ -21,7 +21,9 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface NonUniqueIndex {
 
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java
index ff11397..f838590 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NonUniqueIndexes.java
@@ -21,11 +21,12 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface NonUniqueIndexes {
-	
-	public NonUniqueIndex[] value();
+
+    public NonUniqueIndex[] value();
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java
index 1778788..fd76999 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/NullSerDeser.java
@@ -16,22 +16,22 @@
  */
 package org.apache.eagle.log.entity.meta;
 
-public class NullSerDeser implements EntitySerDeser<NullObject>{
+public class NullSerDeser implements EntitySerDeser<NullObject> {
 
-	private static final byte[] EMPTY_NULL_ARRAY = new byte[0];
-	
-	@Override
-	public NullObject deserialize(byte[] bytes) {
-		return null;
-	}
+    private static final byte[] EMPTY_NULL_ARRAY = new byte[0];
 
-	@Override
-	public byte[] serialize(NullObject t) {
-		return EMPTY_NULL_ARRAY;
-	}
+    @Override
+    public NullObject deserialize(byte[] bytes) {
+        return null;
+    }
 
-	@Override
-	public Class<NullObject> type() {
-		return NullObject.class;
-	}
+    @Override
+    public byte[] serialize(NullObject t) {
+        return EMPTY_NULL_ARRAY;
+    }
+
+    @Override
+    public Class<NullObject> type() {
+        return NullObject.class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java
index cb60016..479cb33 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Partition.java
@@ -22,19 +22,18 @@
 import java.lang.annotation.Target;
 
 /**
- * Partition annotation will impact the rowkey generation for Eagle entities. Once an entity class 
- * has defined the partition fields for an Eagle entity, the hash codes of the defined partition 
- * fields will be placed just after prefix field, and before timestamp field.
- * 
- *
+ * Partition annotation will impact the rowkey generation for Eagle entities. Once an entity class has defined
+ * the partition fields for an Eagle entity, the hash codes of the defined partition fields will be placed
+ * just after prefix field, and before timestamp field.
  */
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
-public @interface Partition
-{
+public @interface Partition {
     /**
      * Order in which annotated tags are to be regarded as data partitions.
      */
-    public String[] value() default { };
+    public String[] value() default {};
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java
index 36f404c..587243d 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Prefix.java
@@ -21,8 +21,10 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Prefix {
-	String value() default "";
-}
\ No newline at end of file
+    String value() default "";
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java
index 64d73dd..7f849ff 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Qualifier.java
@@ -27,74 +27,83 @@
 import java.util.HashMap;
 import java.util.Map;
 
-public class Qualifier implements Writable{
-	private final static Logger LOG = LoggerFactory.getLogger(Qualifier.class);
+public class Qualifier implements Writable {
+    private static final Logger LOG = LoggerFactory.getLogger(Qualifier.class);
 
-	private String displayName;
-	private String qualifierName;
-	private EntitySerDeser<Object> serDeser;
-	@JsonIgnore
-	public EntitySerDeser<Object> getSerDeser() {
-		return serDeser;
-	}
-	public void setSerDeser(EntitySerDeser<Object> serDeser) {
-		this.serDeser = serDeser;
-	}
-	public String getDisplayName() {
-		return displayName;
-	}
-	public void setDisplayName(String displayName) {
-		this.displayName = displayName;
-	}
-	public String getQualifierName() {
-		return qualifierName;
-	}
-	public void setQualifierName(String qualifierName) {
-		this.qualifierName = qualifierName;
-	}
-	
-	public String toString(){
-		StringBuffer sb = new StringBuffer();
-		sb.append("displayName:");
-		sb.append(displayName);
-		sb.append(",");
-		sb.append("qualifierName:");
-		sb.append(qualifierName);
-		sb.append(",");
-		sb.append("serDeser class:");
-		sb.append(serDeser.getClass().getName());
-		return sb.toString();
-	}
+    private String displayName;
+    private String qualifierName;
+    private EntitySerDeser<Object> serDeser;
 
-	@Override
-	public void write(DataOutput out) throws IOException {
-		out.writeUTF(displayName);
-		out.writeUTF(qualifierName);
-		out.writeUTF(serDeser.getClass().getName());
-	}
+    @JsonIgnore
+    public EntitySerDeser<Object> getSerDeser() {
+        return serDeser;
+    }
 
-	private final static Map<String, EntitySerDeser> _entitySerDeserCache = new HashMap<String,EntitySerDeser>();
+    public void setSerDeser(EntitySerDeser<Object> serDeser) {
+        this.serDeser = serDeser;
+    }
 
-	@Override
-	public void readFields(DataInput in) throws IOException {
-		displayName = in.readUTF();
-		qualifierName = in.readUTF();
-		String serDeserClassName = in.readUTF();
+    public String getDisplayName() {
+        return displayName;
+    }
 
-		EntitySerDeser _cached = _entitySerDeserCache.get(serDeserClassName);
-		if(_cached != null){
-			this.serDeser = _cached;
-		}else {
-			try {
-				if (LOG.isDebugEnabled()) LOG.debug("Creating new instance for " + serDeserClassName);
-				Class serDeserClass = Class.forName(serDeserClassName);
-				this.serDeser = (EntitySerDeser) serDeserClass.newInstance();
-				_entitySerDeserCache.put(serDeserClassName, this.serDeser);
-			} catch (Exception e) {
-				if (LOG.isDebugEnabled()) {
-					LOG.warn("Class not found for " + serDeserClassName + ": " + e.getMessage(), e);
-				}
-			}
-		}
-	}
+    public void setDisplayName(String displayName) {
+        this.displayName = displayName;
+    }
+
+    public String getQualifierName() {
+        return qualifierName;
+    }
+
+    public void setQualifierName(String qualifierName) {
+        this.qualifierName = qualifierName;
+    }
+
+    @Override
+    public String toString() {
+        StringBuffer sb = new StringBuffer();
+        sb.append("displayName:");
+        sb.append(displayName);
+        sb.append(",");
+        sb.append("qualifierName:");
+        sb.append(qualifierName);
+        sb.append(",");
+        sb.append("serDeser class:");
+        sb.append(serDeser.getClass().getName());
+        return sb.toString();
+    }
+
+    @Override
+    public void write(DataOutput out) throws IOException {
+        out.writeUTF(displayName);
+        out.writeUTF(qualifierName);
+        out.writeUTF(serDeser.getClass().getName());
+    }
+
+    private static final Map<String, EntitySerDeser> _entitySerDeserCache = new HashMap<String, EntitySerDeser>();
+
+    @Override
+    public void readFields(DataInput in) throws IOException {
+        displayName = in.readUTF();
+        qualifierName = in.readUTF();
+        String serDeserClassName = in.readUTF();
+
+        EntitySerDeser _cached = _entitySerDeserCache.get(serDeserClassName);
+        if (_cached != null) {
+            this.serDeser = _cached;
+        } else {
+            try {
+                if (LOG.isDebugEnabled()) {
+                    LOG.debug("Creating new instance for " + serDeserClassName);
+                }
+                Class serDeserClass = Class.forName(serDeserClassName);
+                this.serDeser = (EntitySerDeser)serDeserClass.newInstance();
+                _entitySerDeserCache.put(serDeserClassName, this.serDeser);
+            } catch (Exception e) {
+                if (LOG.isDebugEnabled()) {
+                    LOG.warn("Class not found for " + serDeserClassName + ": " + e.getMessage(), e);
+                }
+            }
+        }
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java
index 22d70ed..f6e9700 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Service.java
@@ -21,8 +21,10 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Service {
-	String value() default "";
+    String value() default "";
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java
index 8c712d0..6dc15c5 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/ServicePath.java
@@ -23,9 +23,10 @@
 
 /**
  * This class is for service client for generic entity creation API (entities and metrics)
- *
  */
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface ServicePath {
 
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java
index 635065b..2e5fa8d 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringArraySerDeser.java
@@ -22,73 +22,73 @@
 
 /**
  * String array entity serializer and deserializer
- *
  */
 public class StringArraySerDeser implements EntitySerDeser<String[]> {
 
-	public static final int MAX_STRING_LENGTH = 65535;
-	public static final String UTF_8 = "UTF-8";
-	
-	@Override
-	public String[] deserialize(byte[] bytes) {
-		if(bytes == null || bytes.length < 4)
-			return null;
-		int offset = 0;
-		// get size of int array
-		final int size = ByteUtil.bytesToInt(bytes, offset);
-		offset += 4;
-		final String[] strings = new String[size];
-		try {
-			for(int i = 0; i < size; i++) {
-				final int len = ByteUtil.bytesToInt(bytes, offset);
-				offset += 4;
-				strings[i] = new String(bytes, offset, len, UTF_8);
-				offset += len;
-			}
-		} catch (UnsupportedEncodingException e) {
-			throw new IllegalArgumentException("Invalid byte array");
-		}
-		return strings;
-	}
-	
-	/**
-	 *  size + str1 length + str1 + str2 length + str2 + ...
-	 *   4B        4B         n1B        4B        n2B
-	 *  
-	 * @param obj
-	 * @return
-	 */
-	@Override
-	public byte[] serialize(String[] array) {
-		if(array == null)
-			return null;
-		final int size = array.length;
-		final byte[][] tmp = new byte[size][];
-		int total = 4 + 4 * size;
-		for (int i = 0; i < size; ++i) {
-			try {
-				tmp[i] = array[i].getBytes(UTF_8);
-			} catch (UnsupportedEncodingException e) {
-				throw new IllegalArgumentException("String doesn't support UTF-8 encoding: " + array[i]);
-			}
-			total += tmp[i].length;
-		}
-		final byte[] result = new byte[total];
-		int offset = 0;
-		ByteUtil.intToBytes(size, result, offset);
-		offset += 4;
-		for (int i = 0; i < size; ++i) {
-			ByteUtil.intToBytes(tmp[i].length, result, offset);
-			offset += 4;
-			System.arraycopy(tmp[i], 0, result, offset, tmp[i].length);
-			offset += tmp[i].length;
-		}
-		return result;
-	}
+    public static final int MAX_STRING_LENGTH = 65535;
+    public static final String UTF_8 = "UTF-8";
 
-	@Override
-	public Class<String[]> type() {
-		return String[].class;
-	}
+    @Override
+    public String[] deserialize(byte[] bytes) {
+        if (bytes == null || bytes.length < 4) {
+            return null;
+        }
+        int offset = 0;
+        // get size of int array
+        final int size = ByteUtil.bytesToInt(bytes, offset);
+        offset += 4;
+        final String[] strings = new String[size];
+        try {
+            for (int i = 0; i < size; i++) {
+                final int len = ByteUtil.bytesToInt(bytes, offset);
+                offset += 4;
+                strings[i] = new String(bytes, offset, len, UTF_8);
+                offset += len;
+            }
+        } catch (UnsupportedEncodingException e) {
+            throw new IllegalArgumentException("Invalid byte array");
+        }
+        return strings;
+    }
+
+    /**
+     * size + str1 length + str1 + str2 length + str2 + ... 4B 4B n1B 4B n2B
+     * 
+     * @param obj
+     * @return
+     */
+    @Override
+    public byte[] serialize(String[] array) {
+        if (array == null) {
+            return null;
+        }
+        final int size = array.length;
+        final byte[][] tmp = new byte[size][];
+        int total = 4 + 4 * size;
+        for (int i = 0; i < size; ++i) {
+            try {
+                tmp[i] = array[i].getBytes(UTF_8);
+            } catch (UnsupportedEncodingException e) {
+                throw new IllegalArgumentException("String doesn't support UTF-8 encoding: " + array[i]);
+            }
+            total += tmp[i].length;
+        }
+        final byte[] result = new byte[total];
+        int offset = 0;
+        ByteUtil.intToBytes(size, result, offset);
+        offset += 4;
+        for (int i = 0; i < size; ++i) {
+            ByteUtil.intToBytes(tmp[i].length, result, offset);
+            offset += 4;
+            System.arraycopy(tmp[i], 0, result, offset, tmp[i].length);
+            offset += tmp[i].length;
+        }
+        return result;
+    }
+
+    @Override
+    public Class<String[]> type() {
+        return String[].class;
+    }
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java
index eef6e4f..532e27a 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/StringSerDeser.java
@@ -18,22 +18,24 @@
 
 public class StringSerDeser implements EntitySerDeser<String> {
 
-	public StringSerDeser(){}
+    public StringSerDeser() {
+    }
 
-	@Override
-	public String deserialize(byte[] bytes){
-		return new String(bytes);
-	}
-	
-	@Override
-	public byte[] serialize(String obj){
-		if(obj == null)
-			return null;
-		return obj.getBytes();
-	}
+    @Override
+    public String deserialize(byte[] bytes) {
+        return new String(bytes);
+    }
 
-	@Override
-	public Class<String> type() {
-		return String.class;
-	}
+    @Override
+    public byte[] serialize(String obj) {
+        if (obj == null) {
+            return null;
+        }
+        return obj.getBytes();
+    }
+
+    @Override
+    public Class<String> type() {
+        return String.class;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java
index ac722cd..d36487a 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Table.java
@@ -21,8 +21,10 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Table {
-	String value() default "";
+    String value() default "";
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java
index ac9b328..0f5c38d 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/Tags.java
@@ -27,5 +27,7 @@
 @Target(ElementType.TYPE)
 @Retention(RetentionPolicy.RUNTIME)
 public @interface Tags {
-	String[] value() default {""};
+    String[] value() default {
+                              ""
+    };
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java
index 01023bc..e708f14 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/meta/TimeSeries.java
@@ -21,8 +21,10 @@
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
-@Target({ElementType.TYPE})
+@Target({
+         ElementType.TYPE
+})
 @Retention(RetentionPolicy.RUNTIME)
 public @interface TimeSeries {
-	boolean value() default true;
+    boolean value() default true;
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java
index 43a7073..5744ee8 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericByRowkeyReader.java
@@ -30,49 +30,51 @@
 import org.apache.eagle.common.EagleBase64Wrapper;
 
 public class GenericByRowkeyReader {
-	private static final Logger LOG = LoggerFactory.getLogger(GenericByRowkeyReader.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GenericByRowkeyReader.class);
 
-	private TaggedLogObjectMapper mapper;
-	private String table;
-	private String columnFamily;
-	private boolean outputAll;
-	private List<String> outputColumns;
-	private GenericReader.EntityFactory entityFactory;
-	
-	public GenericByRowkeyReader(TaggedLogObjectMapper mapper, GenericReader.EntityFactory entityFactory, String table, String columnFamily, boolean outputAll, List<String> outputColumns){
-		this.mapper = mapper;
-		this.entityFactory = entityFactory;
-		this.table = table;
-		this.columnFamily = columnFamily;
-		this.outputAll = outputAll;
-		this.outputColumns = outputColumns;
-	}
-	
-	public List<TaggedLogAPIEntity> read(List<String> rowkeys) throws IOException{
-		HBaseLogByRowkeyReader reader = new HBaseLogByRowkeyReader(this.table, this.columnFamily, 
-				outputAll, outputColumns);
-		List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
-		try{
-			reader.open();
-			for(String rowkeyString : rowkeys){
-				byte[] rowkey = EagleBase64Wrapper.decode(rowkeyString);
-				InternalLog log = reader.get(rowkey);
-				TaggedLogAPIEntity entity = entityFactory.create();
-				entities.add(entity);
-				entity.setTags(log.getTags());
-				entity.setTimestamp(log.getTimestamp());
-				entity.setEncodedRowkey(log.getEncodedRowkey());
-				entity.setPrefix(log.getPrefix());
-				Map<String, byte[]> qualifierValues = log.getQualifierValues();
-				mapper.populateQualifierValues(entity, qualifierValues);
-			}
-		}catch(IOException ex){
-			LOG.error("Fail read by rowkey", ex);
-			throw ex;
-		}finally{
-			reader.close();
-		}
-		
-		return entities;
-	}
+    private TaggedLogObjectMapper mapper;
+    private String table;
+    private String columnFamily;
+    private boolean outputAll;
+    private List<String> outputColumns;
+    private GenericReader.EntityFactory entityFactory;
+
+    public GenericByRowkeyReader(TaggedLogObjectMapper mapper, GenericReader.EntityFactory entityFactory,
+                                 String table, String columnFamily, boolean outputAll,
+                                 List<String> outputColumns) {
+        this.mapper = mapper;
+        this.entityFactory = entityFactory;
+        this.table = table;
+        this.columnFamily = columnFamily;
+        this.outputAll = outputAll;
+        this.outputColumns = outputColumns;
+    }
+
+    public List<TaggedLogAPIEntity> read(List<String> rowkeys) throws IOException {
+        HBaseLogByRowkeyReader reader = new HBaseLogByRowkeyReader(this.table, this.columnFamily, outputAll,
+                                                                   outputColumns);
+        List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
+        try {
+            reader.open();
+            for (String rowkeyString : rowkeys) {
+                byte[] rowkey = EagleBase64Wrapper.decode(rowkeyString);
+                InternalLog log = reader.get(rowkey);
+                TaggedLogAPIEntity entity = entityFactory.create();
+                entities.add(entity);
+                entity.setTags(log.getTags());
+                entity.setTimestamp(log.getTimestamp());
+                entity.setEncodedRowkey(log.getEncodedRowkey());
+                entity.setPrefix(log.getPrefix());
+                Map<String, byte[]> qualifierValues = log.getQualifierValues();
+                mapper.populateQualifierValues(entity, qualifierValues);
+            }
+        } catch (IOException ex) {
+            LOG.error("Fail read by rowkey", ex);
+            throw ex;
+        } finally {
+            reader.close();
+        }
+
+        return entities;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java
index e97b522..f2acd85 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericDeleter.java
@@ -32,20 +32,19 @@
 import org.apache.eagle.log.entity.meta.IndexDefinition;
 
 public class GenericDeleter {
-	private static final Logger LOG = LoggerFactory.getLogger(GenericDeleter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GenericDeleter.class);
 
-	private final HBaseLogDeleter deleter;
-	private final HBaseLogByRowkeyReader reader;
-	
-	
-	public GenericDeleter(EntityDefinition ed) {
-		this(ed.getTable(), ed.getColumnFamily());
-	}
-	
-	public GenericDeleter(String table, String columnFamily) {
-		this.deleter = new HBaseLogDeleter(table, columnFamily);
-		this.reader = new HBaseLogByRowkeyReader(table, columnFamily, true, null);
-	}
+    private final HBaseLogDeleter deleter;
+    private final HBaseLogByRowkeyReader reader;
+
+    public GenericDeleter(EntityDefinition ed) {
+        this(ed.getTable(), ed.getColumnFamily());
+    }
+
+    public GenericDeleter(String table, String columnFamily) {
+        this.deleter = new HBaseLogDeleter(table, columnFamily);
+        this.reader = new HBaseLogByRowkeyReader(table, columnFamily, true, null);
+    }
 
     public void deleteByRowkeys(List<byte[]> rowkeys) throws Exception {
         try {
@@ -66,70 +65,73 @@
             throw e;
         }
     }
-	
-	public List<String> delete(List<? extends TaggedLogAPIEntity> entities) throws Exception{
+
+    public List<String> delete(List<? extends TaggedLogAPIEntity> entities) throws Exception {
         List<String> encodedRowkey = new LinkedList<String>();
-		try{
-			deleter.open();
-			final Map<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> entityClassMap = classifyEntities(entities);
-			for (Map.Entry<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> entry : entityClassMap.entrySet()) {
-				final Class<? extends TaggedLogAPIEntity> clazz = entry.getKey();
-				final List<? extends TaggedLogAPIEntity> entityList = entry.getValue();
+        try {
+            deleter.open();
+            final Map<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> entityClassMap = classifyEntities(entities);
+            for (Map.Entry<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> entry : entityClassMap
+                .entrySet()) {
+                final Class<? extends TaggedLogAPIEntity> clazz = entry.getKey();
+                final List<? extends TaggedLogAPIEntity> entityList = entry.getValue();
 
-				final EntityDefinition entityDef = EntityDefinitionManager.getEntityDefinitionByEntityClass(clazz);
-				// TODO: we should fix this hardcoded prefix hack
-				fixPrefixAndTimestampIssue(entityList, entityDef);
+                final EntityDefinition entityDef = EntityDefinitionManager
+                    .getEntityDefinitionByEntityClass(clazz);
+                // TODO: we should fix this hardcoded prefix hack
+                fixPrefixAndTimestampIssue(entityList, entityDef);
 
-				final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByEntities(entityList, entityDef);
-				// Check index
-				final IndexDefinition[] indexes = entityDef.getIndexes();
-				if (indexes != null && indexes.length > 0) {
-					reader.open();
-					final List<InternalLog> logs = reader.get(rowkeys);
-					final List<TaggedLogAPIEntity> newEntities = HBaseInternalLogHelper.buildEntities(logs, entityDef);
-					for (TaggedLogAPIEntity entity : newEntities) {
-						// Add index rowkeys
-						for (IndexDefinition index : indexes) {
-							final byte[] indexRowkey = index.generateIndexRowkey(entity);
-							rowkeys.add(indexRowkey);
-						}
-					}
-				}
-                for(byte[] rowkey:rowkeys) {
+                final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByEntities(entityList, entityDef);
+                // Check index
+                final IndexDefinition[] indexes = entityDef.getIndexes();
+                if (indexes != null && indexes.length > 0) {
+                    reader.open();
+                    final List<InternalLog> logs = reader.get(rowkeys);
+                    final List<TaggedLogAPIEntity> newEntities = HBaseInternalLogHelper
+                        .buildEntities(logs, entityDef);
+                    for (TaggedLogAPIEntity entity : newEntities) {
+                        // Add index rowkeys
+                        for (IndexDefinition index : indexes) {
+                            final byte[] indexRowkey = index.generateIndexRowkey(entity);
+                            rowkeys.add(indexRowkey);
+                        }
+                    }
+                }
+                for (byte[] rowkey : rowkeys) {
                     encodedRowkey.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
                 }
-				deleter.deleteRowkeys(rowkeys);
-			}
-		}catch(IOException ioe){
-			LOG.error("Fail writing tagged log", ioe);
-			throw ioe;
-		}finally{
-			deleter.close();
-	 	}
+                deleter.deleteRowkeys(rowkeys);
+            }
+        } catch (IOException ioe) {
+            LOG.error("Fail writing tagged log", ioe);
+            throw ioe;
+        } finally {
+            deleter.close();
+        }
         return encodedRowkey;
-	}
+    }
 
-	private void fixPrefixAndTimestampIssue(List<? extends TaggedLogAPIEntity> entities, EntityDefinition entityDef) {
-		for (TaggedLogAPIEntity e : entities) {
-			e.setPrefix(entityDef.getPrefix());
-			if (!entityDef.isTimeSeries()) {
-				e.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0
-			}
-		}
-	}
+    private void fixPrefixAndTimestampIssue(List<? extends TaggedLogAPIEntity> entities,
+                                            EntityDefinition entityDef) {
+        for (TaggedLogAPIEntity e : entities) {
+            e.setPrefix(entityDef.getPrefix());
+            if (!entityDef.isTimeSeries()) {
+                e.setTimestamp(EntityConstants.FIXED_WRITE_TIMESTAMP); // set timestamp to MAX, then actually stored 0
+            }
+        }
+    }
 
-	private Map<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> classifyEntities(List<? extends TaggedLogAPIEntity> entities) {
-		final Map<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> result = new 
-				HashMap<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>>();
-		for (TaggedLogAPIEntity entity : entities) {
-			final Class<? extends TaggedLogAPIEntity> clazz = entity.getClass();
-			List<TaggedLogAPIEntity> list = result.get(clazz);
-			if (list == null) {
-				list = new ArrayList<TaggedLogAPIEntity>();
-				result.put(clazz, list);
-			}
-			list.add(entity);
-		}
-		return result;
-	}
+    private Map<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> classifyEntities(List<? extends TaggedLogAPIEntity> entities) {
+        final Map<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>> result = new HashMap<Class<? extends TaggedLogAPIEntity>, List<TaggedLogAPIEntity>>();
+        for (TaggedLogAPIEntity entity : entities) {
+            final Class<? extends TaggedLogAPIEntity> clazz = entity.getClass();
+            List<TaggedLogAPIEntity> list = result.get(clazz);
+            if (list == null) {
+                list = new ArrayList<TaggedLogAPIEntity>();
+                result.put(clazz, list);
+            }
+            list.add(entity);
+        }
+        return result;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java
index 76e314b..09ff861 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericReader.java
@@ -32,84 +32,87 @@
 import org.apache.eagle.common.DateTimeUtil;
 
 public class GenericReader {
-	private static final Logger LOG = LoggerFactory.getLogger(GenericReader.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GenericReader.class);
 
-	public interface EntityFactory{
-		public TaggedLogAPIEntity create();
-	}
-	
-	private Schema schema;
-	private EntityFactory entityFactory;
-	private TaggedLogObjectMapper mapper;
-	
-	public GenericReader(TaggedLogObjectMapper mapper, Schema schema, EntityFactory factory){
-		this.mapper = mapper;
-		this.schema = schema;
-		this.entityFactory = factory;
-	}
-	
-	public List<TaggedLogAPIEntity> read(String startTime, 
-			String endTime, List<String> tagNameValues, List<String> outputTags, 
-			List<String> outputFields, String startRowkey, int pageSize) throws Exception{
-		Date start = DateTimeUtil.humanDateToDate(startTime);
-		Date end = DateTimeUtil.humanDateToDate(endTime);
-		
-		// decode the query parameters
-		// TODO should support one tag has multiple tag values
-		Map<String, List<String>> searchTags = new HashMap<String, List<String>>();
-		for(String tagNameValue : tagNameValues){
-			String[] tmp = tagNameValue.split("=");
-			if(tmp == null || tmp.length <=1){
-				continue; // silently ignore this parameter
-			}
-			List<String> tagValues = searchTags.get(tmp[0]);
-			if(tagValues == null){
-				tagValues = new ArrayList<String>();
-				searchTags.put(tmp[0], tagValues);
-			}
-			tagValues.add(tmp[1]);
-		}
-		
-		int numTags = outputTags.size();
-		int numFields = outputFields.size();
-		byte[][] outputQualifiers = new byte[numTags+numFields][];
-		int i = 0;
-		for(String tag : outputTags){
-			outputQualifiers[i++] = tag.getBytes();
-		}
-		for(String field : outputFields){
-			outputQualifiers[i++] = field.getBytes();
-		}
-		// shortcut to avoid read when pageSize=0
-		List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
-		if(pageSize <= 0){
-			return entities; // return empty entities
-		}
+    public interface EntityFactory {
+        public TaggedLogAPIEntity create();
+    }
 
-		HBaseLogReader reader = new HBaseLogReader(schema, start, end, searchTags, startRowkey, outputQualifiers);
-		try{
-			reader.open();
-			InternalLog log;
-			int count = 0;
-			while ((log = reader.read()) != null) {
-				TaggedLogAPIEntity entity = entityFactory.create();
-				entity.setTags(log.getTags());
-				entity.setTimestamp(log.getTimestamp());
-				entity.setEncodedRowkey(log.getEncodedRowkey());
-				entity.setPrefix(log.getPrefix());
-				entities.add(entity);
-				
-				Map<String, byte[]> qualifierValues = log.getQualifierValues();
-				mapper.populateQualifierValues(entity, qualifierValues);
-				if(++count == pageSize)
-					break;
-			}
-		}catch(IOException ioe){
-			LOG.error("Fail reading log", ioe);
-			throw ioe;
-		}finally{
-			reader.close();
-		}		
-		return entities;
-	}
+    private Schema schema;
+    private EntityFactory entityFactory;
+    private TaggedLogObjectMapper mapper;
+
+    public GenericReader(TaggedLogObjectMapper mapper, Schema schema, EntityFactory factory) {
+        this.mapper = mapper;
+        this.schema = schema;
+        this.entityFactory = factory;
+    }
+
+    public List<TaggedLogAPIEntity> read(String startTime, String endTime, List<String> tagNameValues,
+                                         List<String> outputTags, List<String> outputFields,
+                                         String startRowkey, int pageSize)
+        throws Exception {
+
+        // decode the query parameters
+        // TODO should support one tag has multiple tag values
+        Map<String, List<String>> searchTags = new HashMap<String, List<String>>();
+        for (String tagNameValue : tagNameValues) {
+            String[] tmp = tagNameValue.split("=");
+            if (tmp == null || tmp.length <= 1) {
+                continue; // silently ignore this parameter
+            }
+            List<String> tagValues = searchTags.get(tmp[0]);
+            if (tagValues == null) {
+                tagValues = new ArrayList<String>();
+                searchTags.put(tmp[0], tagValues);
+            }
+            tagValues.add(tmp[1]);
+        }
+
+        int numTags = outputTags.size();
+        int numFields = outputFields.size();
+        byte[][] outputQualifiers = new byte[numTags + numFields][];
+        int i = 0;
+        for (String tag : outputTags) {
+            outputQualifiers[i++] = tag.getBytes();
+        }
+        for (String field : outputFields) {
+            outputQualifiers[i++] = field.getBytes();
+        }
+        // shortcut to avoid read when pageSize=0
+        List<TaggedLogAPIEntity> entities = new ArrayList<TaggedLogAPIEntity>();
+        if (pageSize <= 0) {
+            return entities; // return empty entities
+        }
+
+        Date start = DateTimeUtil.humanDateToDate(startTime);
+        Date end = DateTimeUtil.humanDateToDate(endTime);
+        HBaseLogReader reader = new HBaseLogReader(schema, start, end, searchTags, startRowkey,
+                                                   outputQualifiers);
+        try {
+            reader.open();
+            InternalLog log;
+            int count = 0;
+            while ((log = reader.read()) != null) {
+                TaggedLogAPIEntity entity = entityFactory.create();
+                entity.setTags(log.getTags());
+                entity.setTimestamp(log.getTimestamp());
+                entity.setEncodedRowkey(log.getEncodedRowkey());
+                entity.setPrefix(log.getPrefix());
+                entities.add(entity);
+
+                Map<String, byte[]> qualifierValues = log.getQualifierValues();
+                mapper.populateQualifierValues(entity, qualifierValues);
+                if (++count == pageSize) {
+                    break;
+                }
+            }
+        } catch (IOException ioe) {
+            LOG.error("Fail reading log", ioe);
+            throw ioe;
+        } finally {
+            reader.close();
+        }
+        return entities;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java
index 3d29237..9f3765a 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/GenericWriter.java
@@ -32,62 +32,62 @@
 import org.apache.eagle.common.EagleBase64Wrapper;
 
 public class GenericWriter {
-	private static final Logger LOG = LoggerFactory.getLogger(GenericWriter.class);
+    private static final Logger LOG = LoggerFactory.getLogger(GenericWriter.class);
 
-	private String table;
-	private String columnFamily;
-	private TaggedLogObjectMapper mapper;
-	
-	public GenericWriter(TaggedLogObjectMapper mapper, String table, String columnFamily){
-		this.mapper = mapper;
-		this.table = table;
-		this.columnFamily = columnFamily;
-	}
-	
-	public List<String> write(List<? extends TaggedLogAPIEntity> entities) throws IOException{
-		HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily);
-		List<String> rowkeys = new ArrayList<String>();
-		
-		try{
-			writer.open();
-			for(TaggedLogAPIEntity entity : entities){
-				InternalLog log = new InternalLog();
-				Map<String, String> inputTags = entity.getTags();
-				Map<String, String> tags = new TreeMap<String, String>();
-				for(Map.Entry<String, String> entry : inputTags.entrySet()){
-					tags.put(entry.getKey(), entry.getValue());
-				}
-				log.setTags(tags);
-				log.setTimestamp(entity.getTimestamp());
-				log.setPrefix(entity.getPrefix());
-				log.setQualifierValues(mapper.createQualifierValues(entity));
-				byte[] rowkey  = writer.write(log);
-				rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
-			}
-		}catch(IOException ioe){
-			LOG.error("Fail writing tagged log", ioe);
-			throw ioe;
-		}finally{
-			writer.close();
-	 	}
-		return rowkeys;
-	}
-	
-	public void updateByRowkey(List<? extends TaggedLogAPIEntity> entities) throws IOException{
-		HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily);
-		try{
-			writer.open();
-			for(TaggedLogAPIEntity entity : entities){
-				byte[] rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey());
-				InternalLog log = new InternalLog();
-				log.setQualifierValues(mapper.createQualifierValues(entity));
-				writer.updateByRowkey(rowkey, log);
-			}
-		}catch(IOException ioe){
-			LOG.error("Fail writing tagged log", ioe);
-			throw ioe;
-		}finally{
-			writer.close();
-	 	}
-	}
+    private String table;
+    private String columnFamily;
+    private TaggedLogObjectMapper mapper;
+
+    public GenericWriter(TaggedLogObjectMapper mapper, String table, String columnFamily) {
+        this.mapper = mapper;
+        this.table = table;
+        this.columnFamily = columnFamily;
+    }
+
+    public List<String> write(List<? extends TaggedLogAPIEntity> entities) throws IOException {
+        HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily);
+        List<String> rowkeys = new ArrayList<String>();
+
+        try {
+            writer.open();
+            for (TaggedLogAPIEntity entity : entities) {
+                InternalLog log = new InternalLog();
+                Map<String, String> inputTags = entity.getTags();
+                Map<String, String> tags = new TreeMap<String, String>();
+                for (Map.Entry<String, String> entry : inputTags.entrySet()) {
+                    tags.put(entry.getKey(), entry.getValue());
+                }
+                log.setTags(tags);
+                log.setTimestamp(entity.getTimestamp());
+                log.setPrefix(entity.getPrefix());
+                log.setQualifierValues(mapper.createQualifierValues(entity));
+                byte[] rowkey = writer.write(log);
+                rowkeys.add(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
+            }
+        } catch (IOException ioe) {
+            LOG.error("Fail writing tagged log", ioe);
+            throw ioe;
+        } finally {
+            writer.close();
+        }
+        return rowkeys;
+    }
+
+    public void updateByRowkey(List<? extends TaggedLogAPIEntity> entities) throws IOException {
+        HBaseLogWriter writer = new HBaseLogWriter(table, columnFamily);
+        try {
+            writer.open();
+            for (TaggedLogAPIEntity entity : entities) {
+                byte[] rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey());
+                InternalLog log = new InternalLog();
+                log.setQualifierValues(mapper.createQualifierValues(entity));
+                writer.updateByRowkey(rowkey, log);
+            }
+        } catch (IOException ioe) {
+            LOG.error("Fail writing tagged log", ioe);
+            throw ioe;
+        } finally {
+            writer.close();
+        }
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java
index 37e55ac..6b49cf7 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogByRowkeyReader.java
@@ -36,126 +36,129 @@
 import org.apache.eagle.common.EagleBase64Wrapper;
 
 /**
- * Get details of rowkey and qualifiers given a raw rowkey. This function mostly is used for inspecting one row's content 
- * This only supports single column family, which is mostly used in log application 
+ * Get details of rowkey and qualifiers given a raw rowkey. This function mostly is used for inspecting one
+ * row's content This only supports single column family, which is mostly used in log application
  */
-public class HBaseLogByRowkeyReader implements Closeable{
-	private String table;
-	private String columnFamily;
-	private byte[][] outputQualifiers;
-	private boolean includingAllQualifiers;
-	private HTableInterface tbl;
-	private boolean isOpen;
-	
-	/**
-	 * if includingAllQualifiers is true, then the fourth argument outputQualifiers is ignored
-	 * if includingAllQualifiers is false, then need calculate based on the fourth argument outputQualifiers
-	 */
-	public HBaseLogByRowkeyReader(String table, String columnFamily, boolean includingAllQualifiers, List<String> qualifiers){
-		this.table = table;
-		this.columnFamily = columnFamily;
-		if(qualifiers != null){
-			this.outputQualifiers = new byte[qualifiers.size()][];
-			int i = 0;
-			for(String qualifier : qualifiers){
-				this.outputQualifiers[i++] = qualifier.getBytes();
-			}
-		}
-		this.includingAllQualifiers = includingAllQualifiers;
-	}
-	
-	
-	public void open() throws IOException {
-		if (isOpen)
-			return; // silently return
-		try {
-			tbl = EagleConfigFactory.load().getHTable(this.table);
-		} catch (RuntimeException ex) {
-			throw new IOException(ex);
-		}
-		
-		isOpen = true;
-	}
+public class HBaseLogByRowkeyReader implements Closeable {
+    private String table;
+    private String columnFamily;
+    private byte[][] outputQualifiers;
+    private boolean includingAllQualifiers;
+    private HTableInterface tbl;
+    private boolean isOpen;
 
-	/**
-	 * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't differentiate it's a tag or a field
-	 * @param rowkeys
-	 * @return
-	 * @throws IOException
-	 */
-	public List<InternalLog> get(List<byte[]> rowkeys) throws IOException, NoSuchRowException {
-		final List<Get> gets = createGets(rowkeys);
-		final Result[] results = tbl.get(gets);
-		final List<InternalLog> logs = new ArrayList<InternalLog>();
-		for (Result result : results) {
-			final InternalLog log = buildLog(result);
-			logs.add(log);
-		}
-		return logs;
-	}
-	
-	private List<Get> createGets(List<byte[]> rowkeys) {
-		final List<Get> gets = new ArrayList<Get>();
-		for (byte[] rowkey : rowkeys) {
-			final Get get = createGet(rowkey);
-			gets.add(get);
-		}
-		return gets;
-	}
+    /**
+     * if includingAllQualifiers is true, then the fourth argument outputQualifiers is ignored if
+     * includingAllQualifiers is false, then need calculate based on the fourth argument outputQualifiers
+     */
+    public HBaseLogByRowkeyReader(String table, String columnFamily, boolean includingAllQualifiers,
+                                  List<String> qualifiers) {
+        this.table = table;
+        this.columnFamily = columnFamily;
+        if (qualifiers != null) {
+            this.outputQualifiers = new byte[qualifiers.size()][];
+            int i = 0;
+            for (String qualifier : qualifiers) {
+                this.outputQualifiers[i++] = qualifier.getBytes();
+            }
+        }
+        this.includingAllQualifiers = includingAllQualifiers;
+    }
 
+    public void open() throws IOException {
+        if (isOpen) {
+            return; // silently return
+        }
+        try {
+            tbl = EagleConfigFactory.load().getHTable(this.table);
+        } catch (RuntimeException ex) {
+            throw new IOException(ex);
+        }
 
-	private Get createGet(byte[] rowkey) {
-		final Get get = new Get(rowkey);
-		byte[] cf = this.columnFamily.getBytes();
-		if(includingAllQualifiers){
-			get.addFamily(cf);
-		}else{
-			for(byte[] outputQualifier : outputQualifiers){
-				get.addColumn(cf, outputQualifier);
-			}
-		}
-		return get;
-	}
+        isOpen = true;
+    }
 
+    /**
+     * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't
+     * differentiate it's a tag or a field
+     *
+     * @param rowkeys
+     * @return
+     * @throws IOException
+     */
+    public List<InternalLog> get(List<byte[]> rowkeys) throws IOException, NoSuchRowException {
+        final List<Get> gets = createGets(rowkeys);
+        final Result[] results = tbl.get(gets);
+        final List<InternalLog> logs = new ArrayList<InternalLog>();
+        for (Result result : results) {
+            final InternalLog log = buildLog(result);
+            logs.add(log);
+        }
+        return logs;
+    }
 
-	/**
-	 * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't differentiate it's a tag or a field
-	 * @param rowkey
-	 * @return
-	 * @throws IOException
-	 */
-	public InternalLog get(byte[] rowkey) throws IOException, NoSuchRowException{
-		final Get get = createGet(rowkey);
-		final Result result = tbl.get(get);
-		final InternalLog log = buildLog(result);
-		return log;
-	}
-	
-	private InternalLog buildLog(Result result) {
-		final InternalLog log = new InternalLog();
-		final byte[] rowkey = result.getRow();
-		log.setEncodedRowkey(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
-		long timestamp = ByteUtil.bytesToLong(rowkey, 4);
-		timestamp = Long.MAX_VALUE - timestamp;
-		log.setTimestamp(timestamp);
-		Map<String, byte[]> qualifierValues = new HashMap<String, byte[]>();
-		log.setQualifierValues(qualifierValues);
-		NavigableMap<byte[], byte[]> map = result.getFamilyMap(this.columnFamily.getBytes());
-		if(map == null){
-			throw new NoSuchRowException(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
-		}
-		for(Map.Entry<byte[], byte[]> entry : map.entrySet()){
-			byte[] qualifier = entry.getKey();
-			byte[] value = entry.getValue();
-			qualifierValues.put(new String(qualifier), value);
-		}
-		return log;
-	}
+    /**
+     * Here all qualifiers' values goes into qualifierValues of InternalLog as given a row, we can't
+     * differentiate it's a tag or a field
+     *
+     * @param rowkey
+     * @return
+     * @throws IOException
+     */
+    public InternalLog get(byte[] rowkey) throws IOException, NoSuchRowException {
+        final Get get = createGet(rowkey);
+        final Result result = tbl.get(get);
+        final InternalLog log = buildLog(result);
+        return log;
+    }
 
+    private List<Get> createGets(List<byte[]> rowkeys) {
+        final List<Get> gets = new ArrayList<Get>();
+        for (byte[] rowkey : rowkeys) {
+            final Get get = createGet(rowkey);
+            gets.add(get);
+        }
+        return gets;
+    }
 
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-	}
+    private Get createGet(byte[] rowkey) {
+        final Get get = new Get(rowkey);
+        byte[] cf = this.columnFamily.getBytes();
+        if (includingAllQualifiers) {
+            get.addFamily(cf);
+        } else {
+            for (byte[] outputQualifier : outputQualifiers) {
+                get.addColumn(cf, outputQualifier);
+            }
+        }
+        return get;
+    }
+
+    private InternalLog buildLog(Result result) {
+        final InternalLog log = new InternalLog();
+        final byte[] rowkey = result.getRow();
+        log.setEncodedRowkey(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
+        long timestamp = ByteUtil.bytesToLong(rowkey, 4);
+        timestamp = Long.MAX_VALUE - timestamp;
+        log.setTimestamp(timestamp);
+        Map<String, byte[]> qualifierValues = new HashMap<String, byte[]>();
+        log.setQualifierValues(qualifierValues);
+        NavigableMap<byte[], byte[]> map = result.getFamilyMap(this.columnFamily.getBytes());
+        if (map == null) {
+            throw new NoSuchRowException(EagleBase64Wrapper.encodeByteArray2URLSafeString(rowkey));
+        }
+        for (Map.Entry<byte[], byte[]> entry : map.entrySet()) {
+            byte[] qualifier = entry.getKey();
+            byte[] value = entry.getValue();
+            qualifierValues.put(new String(qualifier), value);
+        }
+        return log;
+    }
+
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java
index 3460949..ae69321 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogDeleter.java
@@ -30,102 +30,105 @@
 import org.apache.eagle.log.entity.meta.EntityDefinition;
 import org.apache.eagle.common.EagleBase64Wrapper;
 
-public class HBaseLogDeleter implements LogDeleter{
-	private HTableInterface tbl;
-	private String table;
-	private String columnFamily;
-	
-	public HBaseLogDeleter(String table, String columnFamily) {
-		this.table = table;
-		this.columnFamily = columnFamily;
-	}
-	
-	@Override
-	public void open() throws IOException {
-		try{
-			tbl = EagleConfigFactory.load().getHTable(this.table);
-		}catch(RuntimeException ex){
-			throw new IOException(ex);
-		}
-	}
+public class HBaseLogDeleter implements LogDeleter {
+    private HTableInterface tbl;
+    private String table;
+    private String columnFamily;
 
-	@Override
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-	}
-	
-	@Override
-	public void flush() throws IOException{
-		throw new IllegalArgumentException("Not supported flush for hbase delete");
-	}
-	
-	/**
-	 * support delete by constructing a rowkey or by encoded rowkey passed from client
-	 */
-	@Override
-	public void delete(InternalLog log) throws IOException{
-		final byte[] rowkey = RowkeyHelper.getRowkey(log);
-		final Delete delete = createDelete(rowkey);
-		tbl.delete(delete);
-	}
-	
-	public void delete(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception {
-		final byte[] rowkey = RowkeyHelper.getRowkey(entity, entityDef);
-		final Delete delete = createDelete(rowkey);
-		tbl.delete(delete);
-	}
-	
-	/**
-	 * Batch delete
-	 * @param logs
-	 * @throws IOException
-	 */
-	public void delete(List<InternalLog> logs) throws IOException{
-		final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByLogs(logs);
-		deleteRowkeys(rowkeys);
-	}
+    public HBaseLogDeleter(String table, String columnFamily) {
+        this.table = table;
+        this.columnFamily = columnFamily;
+    }
 
+    @Override
+    public void open() throws IOException {
+        try {
+            tbl = EagleConfigFactory.load().getHTable(this.table);
+        } catch (RuntimeException ex) {
+            throw new IOException(ex);
+        }
+    }
 
-	/**
-	 * Batch delete
-	 * @throws Exception
-	 */
-	public void deleteEntities(List<? extends TaggedLogAPIEntity> entities, EntityDefinition entityDef) throws Exception{
-		final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByEntities(entities, entityDef);
-		deleteRowkeys(rowkeys);
-	}
-	
-	/**
-	 * Batch delete
-	 * @throws IOException
-	 */
-	public void deleteRowkeys(List<byte[]> rowkeys) throws IOException {
-		final List<Delete> deletes = new ArrayList<Delete>(rowkeys.size());
-		for (byte[] rowkey : rowkeys) {
-			final Delete delete = createDelete(rowkey);
-			deletes.add(delete);
-		}
-		tbl.delete(deletes);
-	}
-	
-	@Override
-	public void deleteRowByRowkey(String encodedRowkey) throws IOException{
-		byte[] row = EagleBase64Wrapper.decode(encodedRowkey);
-		final Delete delete = createDelete(row);
-		tbl.delete(delete);
-	}
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+    }
 
-	public void deleteRowByRowkey(List<String> encodedRowkeys) throws IOException {
-		final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByEncodedRowkeys(encodedRowkeys);
-		deleteRowkeys(rowkeys);
-	}
-	
-	private Delete createDelete(byte[] row) throws IOException{
-		Delete delete = new Delete(row);
-		delete.deleteFamily(columnFamily.getBytes());
-		return delete;
-	}
+    @Override
+    public void flush() throws IOException {
+        throw new IllegalArgumentException("Not supported flush for hbase delete");
+    }
+
+    /**
+     * support delete by constructing a rowkey or by encoded rowkey passed from client.
+     */
+    @Override
+    public void delete(InternalLog log) throws IOException {
+        final byte[] rowkey = RowkeyHelper.getRowkey(log);
+        final Delete delete = createDelete(rowkey);
+        tbl.delete(delete);
+    }
+
+    public void delete(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception {
+        final byte[] rowkey = RowkeyHelper.getRowkey(entity, entityDef);
+        final Delete delete = createDelete(rowkey);
+        tbl.delete(delete);
+    }
+
+    /**
+     * Batch delete.
+     *
+     * @param logs
+     * @throws IOException
+     */
+    public void delete(List<InternalLog> logs) throws IOException {
+        final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByLogs(logs);
+        deleteRowkeys(rowkeys);
+    }
+
+    /**
+     * Batch delete.
+     *
+     * @throws Exception
+     */
+    public void deleteEntities(List<? extends TaggedLogAPIEntity> entities, EntityDefinition entityDef)
+        throws Exception {
+        final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByEntities(entities, entityDef);
+        deleteRowkeys(rowkeys);
+    }
+
+    /**
+     * Batch delete.
+     *
+     * @throws IOException
+     */
+    public void deleteRowkeys(List<byte[]> rowkeys) throws IOException {
+        final List<Delete> deletes = new ArrayList<Delete>(rowkeys.size());
+        for (byte[] rowkey : rowkeys) {
+            final Delete delete = createDelete(rowkey);
+            deletes.add(delete);
+        }
+        tbl.delete(deletes);
+    }
+
+    @Override
+    public void deleteRowByRowkey(String encodedRowkey) throws IOException {
+        byte[] row = EagleBase64Wrapper.decode(encodedRowkey);
+        final Delete delete = createDelete(row);
+        tbl.delete(delete);
+    }
+
+    public void deleteRowByRowkey(List<String> encodedRowkeys) throws IOException {
+        final List<byte[]> rowkeys = RowkeyHelper.getRowkeysByEncodedRowkeys(encodedRowkeys);
+        deleteRowkeys(rowkeys);
+    }
+
+    private Delete createDelete(byte[] row) throws IOException {
+        Delete delete = new Delete(row);
+        delete.deleteFamily(columnFamily.getBytes());
+        return delete;
+    }
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java
index c75b94f..b8cbd20 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/HBaseLogReader.java
@@ -45,203 +45,207 @@
 import org.apache.eagle.common.EagleBase64Wrapper;
 
 public class HBaseLogReader implements LogReader {
-	private static Logger LOG = LoggerFactory.getLogger(HBaseLogReader.class);
+    private static Logger LOG = LoggerFactory.getLogger(HBaseLogReader.class);
 
-	protected byte[][] qualifiers;
-	private HTableInterface tbl;
-	private byte[] startKey;
-	private byte[] stopKey;
-	protected Map<String, List<String>> searchTags;
+    protected byte[][] qualifiers;
+    private HTableInterface tbl;
+    private byte[] startKey;
+    private byte[] stopKey;
+    protected Map<String, List<String>> searchTags;
 
-	private ResultScanner rs;
-	private boolean isOpen = false;
-	
-	private Schema schema;
+    private ResultScanner rs;
+    private boolean isOpen = false;
 
-	public HBaseLogReader(Schema schema, Date startTime, Date endTime, 
-			Map<String, List<String>> searchTags, String lastScanKey,
-			byte[][] outputQualifier){
-		this.schema = schema;
-		this.qualifiers = outputQualifier;
-		this.startKey = buildRowKey(schema.getPrefix(), startTime);
-		if (lastScanKey == null) {
-			this.stopKey = buildRowKey(schema.getPrefix(), endTime);
-		} else {
-			// build stop key
-			this.stopKey = EagleBase64Wrapper.decode(lastScanKey);
-			// concat byte 0 to exclude this stopKey
-			this.stopKey = ByteUtil.concat(this.stopKey, new byte[] { 0 });
-		}
-		this.searchTags = searchTags;
-	}
-	
-	/**
-	 * TODO If the required field is null for a row, then this row will not be fetched. That could be a problem for counting
-	 * Need another version of read to strictly get the number of rows which will return all the columns for a column family
-	 */
-	public void open() throws IOException {
-		if (isOpen)
-			return; // silently return
-		try {
-			tbl = EagleConfigFactory.load().getHTable(schema.getTable());
-		} catch (RuntimeException ex) {
-			throw new IOException(ex);
-		}
+    private Schema schema;
 
-		String rowkeyRegex = buildRegex2(searchTags);
-		RegexStringComparator regexStringComparator = new RegexStringComparator(
-				rowkeyRegex);
-		regexStringComparator.setCharset(Charset.forName("ISO-8859-1"));
-		RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator);
-		FilterList filterList = new FilterList();
-		filterList.addFilter(filter);
-		Scan s1 = new Scan();
-		// reverse timestamp, startRow is stopKey, and stopRow is startKey
-		s1.setStartRow(stopKey);
-		s1.setStopRow(startKey);
-		s1.setFilter(filterList);
-		// TODO the # of cached rows should be minimum of (pagesize and 100)
-		s1.setCaching(100);
-		// TODO not optimized for all applications
-		s1.setCacheBlocks(true);
-		// scan specified columnfamily and qualifiers
-		for(byte[] qualifier : qualifiers){
-			s1.addColumn(schema.getColumnFamily().getBytes(), qualifier);
-		}
-		rs = tbl.getScanner(s1);
-		isOpen = true;
-	}
+    public HBaseLogReader(Schema schema, Date startTime, Date endTime, Map<String, List<String>> searchTags,
+                          String lastScanKey, byte[][] outputQualifier) {
+        this.schema = schema;
+        this.qualifiers = outputQualifier;
+        this.startKey = buildRowKey(schema.getPrefix(), startTime);
+        if (lastScanKey == null) {
+            this.stopKey = buildRowKey(schema.getPrefix(), endTime);
+        } else {
+            // build stop key
+            this.stopKey = EagleBase64Wrapper.decode(lastScanKey);
+            // concat byte 0 to exclude this stopKey
+            this.stopKey = ByteUtil.concat(this.stopKey, new byte[] { 0 });
+        }
+        this.searchTags = searchTags;
+    }
 
-	public void close() throws IOException {
-		if(tbl != null){
-			new HTableFactory().releaseHTableInterface(tbl);
-		}
-		if(rs != null){
-			rs.close();
-		}
-	}
+    /**
+     * TODO If the required field is null for a row, then this row will not be fetched. That could be a
+     * problem for counting Need another version of read to strictly get the number of rows which will return
+     * all the columns for a column family
+     */
+    @Override
+    public void open() throws IOException {
+        if (isOpen) {
+            return; // silently return
+        }
+        try {
+            tbl = EagleConfigFactory.load().getHTable(schema.getTable());
+        } catch (RuntimeException ex) {
+            throw new IOException(ex);
+        }
 
-	public void flush() throws IOException {
-		tbl.flushCommits();
-	}
+        String rowkeyRegex = buildRegex2(searchTags);
+        RegexStringComparator regexStringComparator = new RegexStringComparator(rowkeyRegex);
+        regexStringComparator.setCharset(Charset.forName("ISO-8859-1"));
+        RowFilter filter = new RowFilter(CompareOp.EQUAL, regexStringComparator);
+        FilterList filterList = new FilterList();
+        filterList.addFilter(filter);
+        Scan s1 = new Scan();
+        // reverse timestamp, startRow is stopKey, and stopRow is startKey
+        s1.setStartRow(stopKey);
+        s1.setStopRow(startKey);
+        s1.setFilter(filterList);
+        // TODO the # of cached rows should be minimum of (pagesize and 100)
+        s1.setCaching(100);
+        // TODO not optimized for all applications
+        s1.setCacheBlocks(true);
+        // scan specified columnfamily and qualifiers
+        for (byte[] qualifier : qualifiers) {
+            s1.addColumn(schema.getColumnFamily().getBytes(), qualifier);
+        }
+        rs = tbl.getScanner(s1);
+        isOpen = true;
+    }
 
-	private byte[] buildRowKey(String appName, Date t){
-		byte[] key = new byte[4 + 8];
-		byte[] b = ByteUtil.intToBytes(appName.hashCode());
-		System.arraycopy(b, 0, key, 0, 4);
-		// reverse timestamp
-		long ts = Long.MAX_VALUE - t.getTime();
-		System.arraycopy(ByteUtil.longToBytes(ts), 0, key, 4, 8);
-		return key;
-	}
+    @Override
+    public void close() throws IOException {
+        if (tbl != null) {
+            new HTableFactory().releaseHTableInterface(tbl);
+        }
+        if (rs != null) {
+            rs.close();
+        }
+    }
 
-	/**
-	 * one search tag may have multiple values which have OR relationship, and relationship between
-	 * different search tags is AND
-	 * the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2"
-	 * @param tags
-	 * @return
-	 */
-	protected String buildRegex2(Map<String, List<String>> tags){
-		// TODO need consider that \E could be part of tag, refer to https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java
-		SortedMap<Integer, List<Integer>> tagHash = new TreeMap<Integer, List<Integer>>();
+    public void flush() throws IOException {
+        tbl.flushCommits();
+    }
 
-		for(Map.Entry<String, List<String>> entry : tags.entrySet()){
-			String tagName = entry.getKey();
-			List<String> stringValues = entry.getValue();
-			List<Integer> hashValues = new ArrayList<Integer>(1);
-			for(String value : stringValues){
-				hashValues.add(value.hashCode());
-			}
-			tagHash.put(tagName.hashCode(), hashValues);
-		}
-		// <tag1:3><value1:3> ... <tagn:3><valuen:3>
-		StringBuilder sb = new StringBuilder();
-		sb.append("(?s)");
-		sb.append("^(?:.{12})");
-		sb.append("(?:.{").append(8).append("})*"); // for any number of tags
-		for (Map.Entry<Integer, List<Integer>> entry : tagHash.entrySet()) {
-			try {
-				sb.append("\\Q");
-				sb.append(new String(ByteUtil.intToBytes(entry.getKey()), "ISO-8859-1")).append("\\E");
-				List<Integer> hashValues = entry.getValue();
-				sb.append("(?:");
-				boolean first = true;
-				for(Integer value : hashValues){
-					if(!first){
-						sb.append('|');
-					}
-					sb.append("\\Q");
-					sb.append(new String(ByteUtil.intToBytes(value), "ISO-8859-1"));
-					sb.append("\\E");
-					first = false;
-				}
-				sb.append(")");
-				sb.append("(?:.{").append(8).append("})*"); // for any number of tags
-					} catch (Exception ex) {
-						LOG.error("Constructing regex error", ex);
-					}
-				}
-				sb.append("$");
-				if (LOG.isDebugEnabled()) {
-					LOG.debug("Pattern is " + sb.toString());
-				}
-				return sb.toString();
-	}
-	
-	public InternalLog read() throws IOException {
-		if (rs == null)
-			throw new IllegalArgumentException(
-					"ResultScanner must be initialized before reading");
+    private byte[] buildRowKey(String appName, Date t) {
+        byte[] key = new byte[4 + 8];
+        byte[] b = ByteUtil.intToBytes(appName.hashCode());
+        System.arraycopy(b, 0, key, 0, 4);
+        // reverse timestamp
+        long ts = Long.MAX_VALUE - t.getTime();
+        System.arraycopy(ByteUtil.longToBytes(ts), 0, key, 4, 8);
+        return key;
+    }
 
-		InternalLog t = null;
+    /**
+     * one search tag may have multiple values which have OR relationship, and relationship between different
+     * search tags is AND the query is like "(TAG1=value11 OR TAG1=value12) AND TAG2=value2".
+     *
+     * @param tags
+     * @return
+     */
+    protected String buildRegex2(Map<String, List<String>> tags) {
+        // TODO need consider that \E could be part of tag, refer to
+        // https://github.com/OpenTSDB/opentsdb/blob/master/src/core/TsdbQuery.java
+        SortedMap<Integer, List<Integer>> tagHash = new TreeMap<Integer, List<Integer>>();
 
-		Result r = rs.next();
-		if (r != null) {
-			byte[] row = r.getRow();
-			// skip the first 4 bytes : prefix
-			long timestamp = ByteUtil.bytesToLong(row, 4);
-			// reverse timestamp
-			timestamp = Long.MAX_VALUE - timestamp;
-			int count = 0; 
-			if(qualifiers != null){
-				count = qualifiers.length;
-			}
-			byte[][] values = new byte[count][];
-			Map<String, byte[]> allQualifierValues = new HashMap<String, byte[]>();
-			for (int i = 0; i < count; i++) {
-				// TODO if returned value is null, it means no this column for this row, so why set null to the object?
-				values[i] = r.getValue(schema.getColumnFamily().getBytes(), qualifiers[i]);
-				allQualifierValues.put(new String(qualifiers[i]), values[i]);
-			}
-			t = buildObject(row, timestamp, allQualifierValues);
-		}
+        for (Map.Entry<String, List<String>> entry : tags.entrySet()) {
+            String tagName = entry.getKey();
+            List<String> stringValues = entry.getValue();
+            List<Integer> hashValues = new ArrayList<Integer>(1);
+            for (String value : stringValues) {
+                hashValues.add(value.hashCode());
+            }
+            tagHash.put(tagName.hashCode(), hashValues);
+        }
+        // <tag1:3><value1:3> ... <tagn:3><valuen:3>
+        StringBuilder sb = new StringBuilder();
+        sb.append("(?s)");
+        sb.append("^(?:.{12})");
+        sb.append("(?:.{").append(8).append("})*"); // for any number of tags
+        for (Map.Entry<Integer, List<Integer>> entry : tagHash.entrySet()) {
+            try {
+                sb.append("\\Q");
+                sb.append(new String(ByteUtil.intToBytes(entry.getKey()), "ISO-8859-1")).append("\\E");
+                List<Integer> hashValues = entry.getValue();
+                sb.append("(?:");
+                boolean first = true;
+                for (Integer value : hashValues) {
+                    if (!first) {
+                        sb.append('|');
+                    }
+                    sb.append("\\Q");
+                    sb.append(new String(ByteUtil.intToBytes(value), "ISO-8859-1"));
+                    sb.append("\\E");
+                    first = false;
+                }
+                sb.append(")");
+                sb.append("(?:.{").append(8).append("})*"); // for any number of tags
+            } catch (Exception ex) {
+                LOG.error("Constructing regex error", ex);
+            }
+        }
+        sb.append("$");
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("Pattern is " + sb.toString());
+        }
+        return sb.toString();
+    }
 
-		return t;
-	}
+    @Override
+    public InternalLog read() throws IOException {
+        if (rs == null) {
+            throw new IllegalArgumentException("ResultScanner must be initialized before reading");
+        }
 
-	public InternalLog buildObject(byte[] row, long timestamp,
-			Map<String, byte[]> allQualifierValues) {
-		InternalLog alertDetail = new InternalLog();
-		String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row);
-		alertDetail.setEncodedRowkey(myRow);
-		alertDetail.setPrefix(schema.getPrefix());
-		alertDetail.setSearchTags(searchTags);
-		alertDetail.setTimestamp(timestamp);
+        InternalLog t = null;
 
-		Map<String, byte[]> logQualifierValues = new HashMap<String, byte[]>();
-		Map<String, String> logTags = new HashMap<String, String>();
-		for (Map.Entry<String, byte[]> entry : allQualifierValues.entrySet()) {
-			if (schema.isTag(entry.getKey())) {
-				if (entry.getValue() != null) {
-					logTags.put(entry.getKey(), new String(entry.getValue()));
-				}
-			} else {
-				logQualifierValues.put(entry.getKey(),entry.getValue());
-			}
-		}
-		alertDetail.setQualifierValues(logQualifierValues);
-		alertDetail.setTags(logTags);
-		return alertDetail;
-	}
+        Result r = rs.next();
+        if (r != null) {
+            byte[] row = r.getRow();
+            // skip the first 4 bytes : prefix
+            long timestamp = ByteUtil.bytesToLong(row, 4);
+            // reverse timestamp
+            timestamp = Long.MAX_VALUE - timestamp;
+            int count = 0;
+            if (qualifiers != null) {
+                count = qualifiers.length;
+            }
+            byte[][] values = new byte[count][];
+            Map<String, byte[]> allQualifierValues = new HashMap<String, byte[]>();
+            for (int i = 0; i < count; i++) {
+                // TODO if returned value is null, it means no this column for this row, so why set null to
+                // the object?
+                values[i] = r.getValue(schema.getColumnFamily().getBytes(), qualifiers[i]);
+                allQualifierValues.put(new String(qualifiers[i]), values[i]);
+            }
+            t = buildObject(row, timestamp, allQualifierValues);
+        }
+
+        return t;
+    }
+
+    public InternalLog buildObject(byte[] row, long timestamp, Map<String, byte[]> allQualifierValues) {
+        InternalLog alertDetail = new InternalLog();
+        String myRow = EagleBase64Wrapper.encodeByteArray2URLSafeString(row);
+        alertDetail.setEncodedRowkey(myRow);
+        alertDetail.setPrefix(schema.getPrefix());
+        alertDetail.setSearchTags(searchTags);
+        alertDetail.setTimestamp(timestamp);
+
+        Map<String, byte[]> logQualifierValues = new HashMap<String, byte[]>();
+        Map<String, String> logTags = new HashMap<String, String>();
+        for (Map.Entry<String, byte[]> entry : allQualifierValues.entrySet()) {
+            if (schema.isTag(entry.getKey())) {
+                if (entry.getValue() != null) {
+                    logTags.put(entry.getKey(), new String(entry.getValue()));
+                }
+            } else {
+                logQualifierValues.put(entry.getKey(), entry.getValue());
+            }
+        }
+        alertDetail.setQualifierValues(logQualifierValues);
+        alertDetail.setTags(logTags);
+        return alertDetail;
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java
index c5087e7..022dfc0 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/LogDeleter.java
@@ -21,14 +21,15 @@
 
 import org.apache.eagle.log.entity.InternalLog;
 
-public interface LogDeleter extends Closeable{
-	public void flush() throws IOException;
+public interface LogDeleter extends Closeable {
+    public void flush() throws IOException;
 
-	public void open() throws IOException;
+    public void open() throws IOException;
 
-	public void close() throws IOException;
+    @Override
+    public void close() throws IOException;
 
-	public void delete(InternalLog log) throws IOException;
-	
-	public void deleteRowByRowkey(String encodedRowkey) throws IOException;
+    public void delete(InternalLog log) throws IOException;
+
+    public void deleteRowByRowkey(String encodedRowkey) throws IOException;
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java
index f2bdd15..9d08cc1 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/RowkeyHelper.java
@@ -28,58 +28,59 @@
 
 public final class RowkeyHelper {
 
-	public static byte[] getRowkey(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception {
-		byte[] rowkey = null;
-		if(entity.getEncodedRowkey() != null && !(entity.getEncodedRowkey().isEmpty())){
-			rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey());
-		}else{
-			InternalLog log = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef);
-			rowkey = RowkeyBuilder.buildRowkey(log);
-		}
-		return rowkey;
-	}
+    public static byte[] getRowkey(TaggedLogAPIEntity entity, EntityDefinition entityDef) throws Exception {
+        byte[] rowkey = null;
+        if (entity.getEncodedRowkey() != null && !(entity.getEncodedRowkey().isEmpty())) {
+            rowkey = EagleBase64Wrapper.decode(entity.getEncodedRowkey());
+        } else {
+            InternalLog log = HBaseInternalLogHelper.convertToInternalLog(entity, entityDef);
+            rowkey = RowkeyBuilder.buildRowkey(log);
+        }
+        return rowkey;
+    }
 
-	public static List<byte[]> getRowkeysByEntities(List<? extends TaggedLogAPIEntity> entities, EntityDefinition entityDef) throws Exception {
-		final List<byte[]> result = new ArrayList<byte[]>(entities.size());
-		for (TaggedLogAPIEntity entity : entities) {
-			final byte[] rowkey = getRowkey(entity, entityDef);
-			result.add(rowkey);
-		}
-		return result;
-	}
-	
+    public static byte[] getRowkey(InternalLog log) {
+        byte[] rowkey = null;
+        if (log.getEncodedRowkey() != null && !(log.getEncodedRowkey().isEmpty())) {
+            rowkey = EagleBase64Wrapper.decode(log.getEncodedRowkey());
+        } else {
+            rowkey = RowkeyBuilder.buildRowkey(log);
+        }
+        return rowkey;
+    }
 
-	public static byte[] getRowkey(InternalLog log) {
-		byte[] rowkey = null;
-		if(log.getEncodedRowkey() != null && !(log.getEncodedRowkey().isEmpty())){
-			rowkey = EagleBase64Wrapper.decode(log.getEncodedRowkey());
-		}else{
-			rowkey = RowkeyBuilder.buildRowkey(log);
-		}
-		return rowkey;
-	}
+    public static byte[] getRowkey(String encodedRowkey) {
+        byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey);
+        return rowkey;
+    }
 
-	public static List<byte[]> getRowkeysByLogs(List<InternalLog> logs) {
-		final List<byte[]> result = new ArrayList<byte[]>(logs.size());
-		for (InternalLog log : logs) {
-			final byte[] rowkey = getRowkey(log);
-			result.add(rowkey);
-		}
-		return result;
-	}
+    public static List<byte[]> getRowkeysByEntities(List<? extends TaggedLogAPIEntity> entities,
+                                                    EntityDefinition entityDef)
+        throws Exception {
+        final List<byte[]> result = new ArrayList<byte[]>(entities.size());
+        for (TaggedLogAPIEntity entity : entities) {
+            final byte[] rowkey = getRowkey(entity, entityDef);
+            result.add(rowkey);
+        }
+        return result;
+    }
 
-	public static byte[] getRowkey(String encodedRowkey) {
-		byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey);
-		return rowkey;
-	}
+    public static List<byte[]> getRowkeysByLogs(List<InternalLog> logs) {
+        final List<byte[]> result = new ArrayList<byte[]>(logs.size());
+        for (InternalLog log : logs) {
+            final byte[] rowkey = getRowkey(log);
+            result.add(rowkey);
+        }
+        return result;
+    }
 
-	public static List<byte[]> getRowkeysByEncodedRowkeys(List<String> encodedRowkeys) {
-		final List<byte[]> result = new ArrayList<byte[]>(encodedRowkeys.size());
-		for (String encodedRowkey : encodedRowkeys) {
-			byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey);
-			result.add(rowkey);
-		}
-		return result;
-	}
+    public static List<byte[]> getRowkeysByEncodedRowkeys(List<String> encodedRowkeys) {
+        final List<byte[]> result = new ArrayList<byte[]>(encodedRowkeys.size());
+        for (String encodedRowkey : encodedRowkeys) {
+            byte[] rowkey = EagleBase64Wrapper.decode(encodedRowkey);
+            result.add(rowkey);
+        }
+        return result;
+    }
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java
index 55556bd..2ab0773 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/old/Schema.java
@@ -17,8 +17,11 @@
 package org.apache.eagle.log.entity.old;
 
 public interface Schema {
-	public boolean isTag(String qualifier);
-	public String getTable();
-	public String getColumnFamily();
-	public String getPrefix();
+    public boolean isTag(String qualifier);
+
+    public String getTable();
+
+    public String getColumnFamily();
+
+    public String getPrefix();
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java
index 1d0d398..26ec328 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/DefaultEntityRepository.java
@@ -22,13 +22,11 @@
 
 /**
  * Default Eagle framework entity repository class.
- * 
- *
  */
 public class DefaultEntityRepository extends EntityRepository {
-	public DefaultEntityRepository() {
-		entitySet.add(MetricMetadataEntity.class);
-		entitySet.add(TestLogAPIEntity.class);
-		entitySet.add(TestTimeSeriesAPIEntity.class);
-	}
+    public DefaultEntityRepository() {
+        entitySet.add(MetricMetadataEntity.class);
+        entitySet.add(TestLogAPIEntity.class);
+        entitySet.add(TestTimeSeriesAPIEntity.class);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java
index 0c24d30..ab50742 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepository.java
@@ -27,30 +27,29 @@
 import org.apache.eagle.log.entity.meta.EntitySerDeser;
 
 /**
- * Entity repository is used to store entity definition class. Each domain should define its own entities. Eagle entity
- * framework will scan all derived class implementations to get all entity definitions, then register them to EntityDefinitionManager.
- * 
- *
+ * Entity repository is used to store entity definition class. Each domain should define its own entities.
+ * Eagle entity framework will scan all derived class implementations to get all entity definitions, then
+ * register them to EntityDefinitionManager.
  */
 public abstract class EntityRepository {
 
-	protected Set<Class<? extends TaggedLogAPIEntity>> entitySet = new HashSet<Class<? extends TaggedLogAPIEntity>>();
-	protected Map<Class<?>, EntitySerDeser<?>> serDeserMap = new HashMap<Class<?>, EntitySerDeser<?>>();
+    protected Set<Class<? extends TaggedLogAPIEntity>> entitySet = new HashSet<Class<? extends TaggedLogAPIEntity>>();
+    protected Map<Class<?>, EntitySerDeser<?>> serDeserMap = new HashMap<Class<?>, EntitySerDeser<?>>();
 
-	public synchronized Collection<Class<? extends TaggedLogAPIEntity>> getEntitySet() {
-		return new ArrayList<Class<? extends TaggedLogAPIEntity>>(entitySet);
-	}
-	
-	public synchronized Map<Class<?>, EntitySerDeser<?>> getSerDeserMap() {
-		return new HashMap<Class<?>, EntitySerDeser<?>>(serDeserMap);
-	}
-	
-	public synchronized void registerEntity(Class<? extends TaggedLogAPIEntity> clazz) {
-		entitySet.add(clazz);
-	}
+    public synchronized Collection<Class<? extends TaggedLogAPIEntity>> getEntitySet() {
+        return new ArrayList<Class<? extends TaggedLogAPIEntity>>(entitySet);
+    }
 
-	public synchronized void registerSerDeser(Class<?> clazz, EntitySerDeser<?> serDeser) {
-		serDeserMap.put(clazz, serDeser);
-	}
-	
+    public synchronized Map<Class<?>, EntitySerDeser<?>> getSerDeserMap() {
+        return new HashMap<Class<?>, EntitySerDeser<?>>(serDeserMap);
+    }
+
+    public synchronized void registerEntity(Class<? extends TaggedLogAPIEntity> clazz) {
+        entitySet.add(clazz);
+    }
+
+    public synchronized void registerSerDeser(Class<?> clazz, EntitySerDeser<?> serDeser) {
+        serDeserMap.put(clazz, serDeser);
+    }
+
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java
index 8ccee87..5c2a107 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/repo/EntityRepositoryScanner.java
@@ -31,26 +31,28 @@
 
     private static final Logger LOG = LoggerFactory.getLogger(EntityRepositoryScanner.class);
 
-    //    public static void scan() throws InstantiationException, IllegalAccessException {
-    //        // TODO currently extcos 0.3b doesn't support to search packages like "com.*.eagle.*", "org.*.eagle.*". However 0.4b depends on asm-all version 4.0, which is
-    //        // conflicted with jersey server 1.8. We should fix it later
-    //        LOG.info("Scanning all entity repositories with pattern \"org.apache.eagle.*\"");
-    //        final ComponentScanner scanner = new ComponentScanner();
-    //        final Set<Class<?>> classes = scanner.getClasses(new EntityRepoScanQuery() );
-    //        for (Class<?> entityClass : classes) {
-    //            LOG.info("Processing entity repository: " + entityClass.getName());
-    //            if (EntityRepository.class.isAssignableFrom(entityClass)) {
-    //                EntityRepository repo = (EntityRepository)entityClass.newInstance();
-    //                addRepo(repo);
-    //            }
-    //        }
-    //    }
+    // public static void scan() throws InstantiationException, IllegalAccessException {
+    // // TODO currently extcos 0.3b doesn't support to search packages like "com.*.eagle.*", "org.*.eagle.*".
+    // However 0.4b depends on asm-all version 4.0, which is
+    // // conflicted with jersey server 1.8. We should fix it later
+    // LOG.info("Scanning all entity repositories with pattern \"org.apache.eagle.*\"");
+    // final ComponentScanner scanner = new ComponentScanner();
+    // final Set<Class<?>> classes = scanner.getClasses(new EntityRepoScanQuery() );
+    // for (Class<?> entityClass : classes) {
+    // LOG.info("Processing entity repository: " + entityClass.getName());
+    // if (EntityRepository.class.isAssignableFrom(entityClass)) {
+    // EntityRepository repo = (EntityRepository)entityClass.newInstance();
+    // addRepo(repo);
+    // }
+    // }
+    // }
 
     public static void scan() throws IllegalAccessException, InstantiationException {
         LOG.info("Scanning all entity repositories");
         StopWatch stopWatch = new StopWatch();
         stopWatch.start();
-        for (Class<? extends EntityRepository> entityRepoClass : ReflectionsHelper.getInstance().getSubTypesOf(EntityRepository.class)) {
+        for (Class<? extends EntityRepository> entityRepoClass : ReflectionsHelper.getInstance()
+            .getSubTypesOf(EntityRepository.class)) {
             if (EntityRepository.class.isAssignableFrom(entityRepoClass)) {
                 EntityRepository repo = entityRepoClass.newInstance();
                 addRepo(repo);
@@ -71,12 +73,12 @@
         }
     }
 
-    //    public static class EntityRepoScanQuery extends ComponentQuery {
+    // public static class EntityRepoScanQuery extends ComponentQuery {
     //
-    //        @Override
-    //        protected void query() {
-    //            select().from("org.apache.eagle").returning(
-    //            allExtending(EntityRepository.class));
-    //        }
-    //   }
-}
\ No newline at end of file
+    // @Override
+    // protected void query() {
+    // select().from("org.apache.eagle").returning(
+    // allExtending(EntityRepository.class));
+    // }
+    // }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java
index 7aeffa8..f606a2a 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestEntity.java
@@ -26,82 +26,81 @@
 import org.apache.eagle.log.entity.meta.Table;
 
 /**
- * this class is written by customer, but it has some contracts
- * 0. This class should conform to java bean conventions
- * 1. Annotate this class with hbase table name
- * 2. Annotate this class with hbase column family name
- * 3. Annotate those qualifier fields with column name
- * 4. Fire property change event for all fields' setter method, where field name is mandatory parameter
+ * this class is written by customer, but it has some contracts 0. This class should conform to java bean
+ * conventions 1. Annotate this class with hbase table name 2. Annotate this class with hbase column family
+ * name 3. Annotate those qualifier fields with column name 4. Fire property change event for all fields'
+ * setter method, where field name is mandatory parameter
  */
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @Table("alertdetail")
 @ColumnFamily("f")
 @Prefix("hadoop")
 @TimeSeries(true)
 public class TestEntity extends TaggedLogAPIEntity {
-	@Column("remediationID")
-	private String remediationID;
-	@Column("remediationStatus")
-	private String remediationStatus;
-	@Column("c")
-	private long count;
-	@Column("d")
-	private int numHosts;
-	@Column("e")
-	private Long numClusters;
+    @Column("remediationID")
+    private String remediationID;
+    @Column("remediationStatus")
+    private String remediationStatus;
+    @Column("c")
+    private long count;
+    @Column("d")
+    private int numHosts;
+    @Column("e")
+    private Long numClusters;
 
-	public Long getNumClusters() {
-		return numClusters;
-	}
+    public Long getNumClusters() {
+        return numClusters;
+    }
 
-	public void setNumClusters(Long numClusters) {
-		this.numClusters = numClusters;
-		pcs.firePropertyChange("numClusters", null, null);
-	}
+    public void setNumClusters(Long numClusters) {
+        this.numClusters = numClusters;
+        pcs.firePropertyChange("numClusters", null, null);
+    }
 
-	public int getNumHosts() {
-		return numHosts;
-	}
+    public int getNumHosts() {
+        return numHosts;
+    }
 
-	public void setNumHosts(int numHosts) {
-		this.numHosts = numHosts;
-		pcs.firePropertyChange("numHosts", null, null);
-	}
+    public void setNumHosts(int numHosts) {
+        this.numHosts = numHosts;
+        pcs.firePropertyChange("numHosts", null, null);
+    }
 
-	public long getCount() {
-		return count;
-	}
+    public long getCount() {
+        return count;
+    }
 
-	public void setCount(long count) {
-		this.count = count;
-		pcs.firePropertyChange("count", null, null);
-	}
+    public void setCount(long count) {
+        this.count = count;
+        pcs.firePropertyChange("count", null, null);
+    }
 
-	public String getRemediationID() {
-		return remediationID;
-	}
+    public String getRemediationID() {
+        return remediationID;
+    }
 
-	public void setRemediationID(String remediationID) {
-		this.remediationID = remediationID;
-		pcs.firePropertyChange("remediationID", null, null);
-	}
+    public void setRemediationID(String remediationID) {
+        this.remediationID = remediationID;
+        pcs.firePropertyChange("remediationID", null, null);
+    }
 
-	public String getRemediationStatus() {
-		return remediationStatus;
-	}
+    public String getRemediationStatus() {
+        return remediationStatus;
+    }
 
-	public void setRemediationStatus(String remediationStatus) {
-		this.remediationStatus = remediationStatus;
-		pcs.firePropertyChange("remediationStatus", null, null);
-	}
-	
-	public String toString(){
-		StringBuffer sb = new StringBuffer();
-		sb.append(super.toString());
-		sb.append(", remediationID:");
-		sb.append(remediationID);
-		sb.append(", remediationStatus:");
-		sb.append(remediationStatus);
-		return sb.toString();
-	}
-}
\ No newline at end of file
+    public void setRemediationStatus(String remediationStatus) {
+        this.remediationStatus = remediationStatus;
+        pcs.firePropertyChange("remediationStatus", null, null);
+    }
+
+    @Override
+    public String toString() {
+        StringBuffer sb = new StringBuffer();
+        sb.append(super.toString());
+        sb.append(", remediationID:");
+        sb.append(remediationID);
+        sb.append(", remediationStatus:");
+        sb.append(remediationStatus);
+        return sb.toString();
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java
index 99d6620..5d213b1 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestLogAPIEntity.java
@@ -20,80 +20,96 @@
 import org.apache.eagle.log.entity.meta.*;
 import com.fasterxml.jackson.databind.annotation.JsonSerialize;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @Table("unittest")
 @ColumnFamily("f")
 @Prefix("entityut")
 @Service("TestLogAPIEntity")
 @TimeSeries(false)
 @Indexes({
-	@Index(name="jobIdIndex1", columns = { "jobID" }, unique = true),
-	@Index(name="jobIdNonIndex2", columns = { "hostname" }, unique = false)
-	})
+          @Index(name = "jobIdIndex1", columns = {
+                                                  "jobID"
+          }, unique = true), @Index(name = "jobIdNonIndex2", columns = {
+                                                                        "hostname"
+          }, unique = false)
+    })
 public class TestLogAPIEntity extends TaggedLogAPIEntity {
 
-	@Column("a")
-	private int field1;
-	@Column("b")
-	private Integer field2;
-	@Column("c")
-	private long field3;
-	@Column("d")
-	private Long field4;
-	@Column("e")
-	private double field5;
-	@Column("f")
-	private Double field6;
-	@Column("g")
-	private String field7;
-	
-	public int getField1() {
-		return field1;
-	}
-	public void setField1(int field1) {
-		this.field1 = field1;
-		pcs.firePropertyChange("field1", null, null);
-	}
-	public Integer getField2() {
-		return field2;
-	}
-	public void setField2(Integer field2) {
-		this.field2 = field2;
-		pcs.firePropertyChange("field2", null, null);
-	}
-	public long getField3() {
-		return field3;
-	}
-	public void setField3(long field3) {
-		this.field3 = field3;
-		pcs.firePropertyChange("field3", null, null);
-	}
-	public Long getField4() {
-		return field4;
-	}
-	public void setField4(Long field4) {
-		this.field4 = field4;
-		pcs.firePropertyChange("field4", null, null);
-	}
-	public double getField5() {
-		return field5;
-	}
-	public void setField5(double field5) {
-		this.field5 = field5;
-		pcs.firePropertyChange("field5", null, null);
-	}
-	public Double getField6() {
-		return field6;
-	}
-	public void setField6(Double field6) {
-		this.field6 = field6;
-		pcs.firePropertyChange("field6", null, null);
-	}
-	public String getField7() {
-		return field7;
-	}
-	public void setField7(String field7) {
-		this.field7 = field7;
-		pcs.firePropertyChange("field7", null, null);
-	}
+    @Column("a")
+    private int field1;
+    @Column("b")
+    private Integer field2;
+    @Column("c")
+    private long field3;
+    @Column("d")
+    private Long field4;
+    @Column("e")
+    private double field5;
+    @Column("f")
+    private Double field6;
+    @Column("g")
+    private String field7;
+
+    public int getField1() {
+        return field1;
+    }
+
+    public void setField1(int field1) {
+        this.field1 = field1;
+        pcs.firePropertyChange("field1", null, null);
+    }
+
+    public Integer getField2() {
+        return field2;
+    }
+
+    public void setField2(Integer field2) {
+        this.field2 = field2;
+        pcs.firePropertyChange("field2", null, null);
+    }
+
+    public long getField3() {
+        return field3;
+    }
+
+    public void setField3(long field3) {
+        this.field3 = field3;
+        pcs.firePropertyChange("field3", null, null);
+    }
+
+    public Long getField4() {
+        return field4;
+    }
+
+    public void setField4(Long field4) {
+        this.field4 = field4;
+        pcs.firePropertyChange("field4", null, null);
+    }
+
+    public double getField5() {
+        return field5;
+    }
+
+    public void setField5(double field5) {
+        this.field5 = field5;
+        pcs.firePropertyChange("field5", null, null);
+    }
+
+    public Double getField6() {
+        return field6;
+    }
+
+    public void setField6(Double field6) {
+        this.field6 = field6;
+        pcs.firePropertyChange("field6", null, null);
+    }
+
+    public String getField7() {
+        return field7;
+    }
+
+    public void setField7(String field7) {
+        this.field7 = field7;
+        pcs.firePropertyChange("field7", null, null);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java
index 72cf5a2..bca195f 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/entity/test/TestTimeSeriesAPIEntity.java
@@ -21,77 +21,92 @@
 
 import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;
 
-@JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)
+@JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)
 @Table("unittest")
 @ColumnFamily("f")
 @Prefix("testTSEntity")
 @Service("TestTimeSeriesAPIEntity")
 @TimeSeries(true)
-@Tags({"cluster","datacenter","random"})
+@Tags({
+       "cluster", "datacenter", "random"
+    })
 public class TestTimeSeriesAPIEntity extends TaggedLogAPIEntity {
 
-	@Column("a")
-	private int field1;
-	@Column("b")
-	private Integer field2;
-	@Column("c")
-	private long field3;
-	@Column("d")
-	private Long field4;
-	@Column("e")
-	private double field5;
-	@Column("f")
-	private Double field6;
-	@Column("g")
-	private String field7;
-	
-	public int getField1() {
-		return field1;
-	}
-	public void setField1(int field1) {
-		this.field1 = field1;
-		pcs.firePropertyChange("field1", null, null);
-	}
-	public Integer getField2() {
-		return field2;
-	}
-	public void setField2(Integer field2) {
-		this.field2 = field2;
-		pcs.firePropertyChange("field2", null, null);
-	}
-	public long getField3() {
-		return field3;
-	}
-	public void setField3(long field3) {
-		this.field3 = field3;
-		pcs.firePropertyChange("field3", null, null);
-	}
-	public Long getField4() {
-		return field4;
-	}
-	public void setField4(Long field4) {
-		this.field4 = field4;
-		pcs.firePropertyChange("field4", null, null);
-	}
-	public double getField5() {
-		return field5;
-	}
-	public void setField5(double field5) {
-		this.field5 = field5;
-		pcs.firePropertyChange("field5", null, null);
-	}
-	public Double getField6() {
-		return field6;
-	}
-	public void setField6(Double field6) {
-		this.field6 = field6;
-		pcs.firePropertyChange("field6", null, null);
-	}
-	public String getField7() {
-		return field7;
-	}
-	public void setField7(String field7) {
-		this.field7 = field7;
-		pcs.firePropertyChange("field7", null, null);
-	}
+    @Column("a")
+    private int field1;
+    @Column("b")
+    private Integer field2;
+    @Column("c")
+    private long field3;
+    @Column("d")
+    private Long field4;
+    @Column("e")
+    private double field5;
+    @Column("f")
+    private Double field6;
+    @Column("g")
+    private String field7;
+
+    public int getField1() {
+        return field1;
+    }
+
+    public void setField1(int field1) {
+        this.field1 = field1;
+        pcs.firePropertyChange("field1", null, null);
+    }
+
+    public Integer getField2() {
+        return field2;
+    }
+
+    public void setField2(Integer field2) {
+        this.field2 = field2;
+        pcs.firePropertyChange("field2", null, null);
+    }
+
+    public long getField3() {
+        return field3;
+    }
+
+    public void setField3(long field3) {
+        this.field3 = field3;
+        pcs.firePropertyChange("field3", null, null);
+    }
+
+    public Long getField4() {
+        return field4;
+    }
+
+    public void setField4(Long field4) {
+        this.field4 = field4;
+        pcs.firePropertyChange("field4", null, null);
+    }
+
+    public double getField5() {
+        return field5;
+    }
+
+    public void setField5(double field5) {
+        this.field5 = field5;
+        pcs.firePropertyChange("field5", null, null);
+    }
+
+    public Double getField6() {
+        return field6;
+    }
+
+    public void setField6(Double field6) {
+        this.field6 = field6;
+        pcs.firePropertyChange("field6", null, null);
+    }
+
+    public String getField7() {
+        return field7;
+    }
+
+    public void setField7(String field7) {
+        this.field7 = field7;
+        pcs.firePropertyChange("field7", null, null);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java
index 4f74715..39dd34b 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ExpressionParser.java
@@ -15,7 +15,7 @@
  * limitations under the License.
  */
 /**
- * 
+ *
  */
 package org.apache.eagle.log.expression;
 
@@ -37,171 +37,169 @@
 import java.util.Map.Entry;
 
 /**
- * <h1>Expression Evaluation</h1>
- *
- * Given expression in string and set context variables, return value in double
- *
+ * <h1>Expression Evaluation</h1> Given expression in string and set context variables, return value in double
  * <br/>
  * <br/>
- * For example:
- * <code>EXP{(max(a, b)* min(a, b)) / abs(a-b+c-d)} => 600.0</code>
- *
+ * For example: <code>EXP{(max(a, b)* min(a, b)) / abs(a-b+c-d)} => 600.0</code> <br/>
  * <br/>
- * <br/>
- * <b>NOTE:</b>  Expression variable <b>must</b> be in format: <code>fieldName</code> instead of <code>@fieldName</code>
- *
- * <br/>
+ * <b>NOTE:</b> Expression variable <b>must</b> be in format: <code>fieldName</code> instead of
+ * <code>@fieldName</code> <br/>
  * <br/>
  * <h2>Dependencies:</h2>
  * <ul>
- *     <li>
- *         <a href="https://github.com/scireum/parsii">scireum/parsii</a>
- *         <i>Super fast and simple evaluator for mathematical expressions written in Java</i>
- *     </li>
+ * <li><a href="https://github.com/scireum/parsii">scireum/parsii</a> <i>Super fast and simple evaluator for
+ * mathematical expressions written in Java</i></li>
  * </ul>
- *
  */
-public class ExpressionParser{
-	private final static Logger LOG = LoggerFactory.getLogger(ExpressionParser.class);
+public class ExpressionParser {
+    private static final Logger LOG = LoggerFactory.getLogger(ExpressionParser.class);
 
-	private String exprStr;
-	private Expression expression;
-	private Scope scope;
+    private String exprStr;
+    private Expression expression;
+    private Scope scope;
 
-	@SuppressWarnings("unused")
-	public Scope getScope() {
-		return scope;
-	}
+    private List<String> dependentFields;
 
-	private List<String> dependentFields;
+    /**
+     * @param exprStr expression string in format like: <code>(max(a, b)* min(a, b)) / abs(a-b+c-d)</code>
+     * @throws ParseException
+     * @throws ParsiiInvalidException
+     */
+    public ExpressionParser(String exprStr) throws ParseException, ParsiiInvalidException {
+        this.exprStr = exprStr;
+        scope = Scope.create();
+        expression = Parser.parse(this.exprStr, scope);
+    }
 
-	/**
-	 * @param exprStr expression string in format like: <code>(max(a, b)* min(a, b)) / abs(a-b+c-d)</code>
-	 *
-	 * @throws ParseException
-	 * @throws ParsiiInvalidException
-	 */
-	public ExpressionParser(String exprStr) throws ParseException, ParsiiInvalidException{
-		this.exprStr = exprStr;
-		scope = Scope.create();
-		expression = Parser.parse(this.exprStr,scope);
-	}
+    @SuppressWarnings("unused")
+    public ExpressionParser(String exprStr, Map<String, Double> context)
+        throws ParsiiInvalidException, ParseException, ParsiiUnknowVariableException {
+        this(exprStr);
+        setVariables(context);
+    }
 
-	@SuppressWarnings("unused")
-	public ExpressionParser(String exprStr, Map<String, Double> context) throws ParsiiInvalidException, ParseException, ParsiiUnknowVariableException {
-		this(exprStr);
-		setVariables(context);
-	}
-	
-	public ExpressionParser setVariables(Map<String, Double> tuple) throws ParsiiUnknowVariableException{
-//		for(String valName : tuple.keySet()) {
-//			Double value = tuple.get(valName);
-		for(Map.Entry<String,Double> entry : tuple.entrySet()) {
+    public ExpressionParser setVariables(Map<String, Double> tuple) throws ParsiiUnknowVariableException {
+        // for(String valName : tuple.keySet()) {
+        // Double value = tuple.get(valName);
+        for (Map.Entry<String, Double> entry : tuple.entrySet()) {
             String valName = entry.getKey();
             Double value = entry.getValue();
-			Variable variable = scope.getVariable(valName);
-			if(variable!=null && value !=null) {
-				variable.setValue(value);
-			}else{
-				if(LOG.isDebugEnabled()) LOG.warn("Variable for "+valName+" is null in scope of expression: "+this.exprStr);
-			}
-		}
-		return this;
-	}
+            Variable variable = scope.getVariable(valName);
+            if (variable != null && value != null) {
+                variable.setValue(value);
+            } else {
+                if (LOG.isDebugEnabled()) {
+                    LOG.warn("Variable for " + valName + " is null in scope of expression: " + this.exprStr);
+                }
+            }
+        }
+        return this;
+    }
 
-	@SuppressWarnings("unused")
-	public ExpressionParser setVariable(Entry<String, Double> tuple) throws ParsiiUnknowVariableException{
-		if (getDependentFields().contains(tuple.getKey())) {
-			scope.getVariable(tuple.getKey()).setValue(tuple.getValue());
-		}
-		else {
-			throw new ParsiiUnknowVariableException("unknown variable: " + tuple.getKey());
-		}
-		return this;
-	}
-	
-	public ExpressionParser setVariable(String key, Double value) throws ParsiiUnknowVariableException{
-		scope.getVariable(key).setValue(value);
-		return this;
-	}
+    @SuppressWarnings("unused")
+    public ExpressionParser setVariable(Entry<String, Double> tuple) throws ParsiiUnknowVariableException {
+        if (getDependentFields().contains(tuple.getKey())) {
+            scope.getVariable(tuple.getKey()).setValue(tuple.getValue());
+        } else {
+            throw new ParsiiUnknowVariableException("unknown variable: " + tuple.getKey());
+        }
+        return this;
+    }
 
-	public double eval() throws Exception{
-		return expression.evaluate();
-	}
+    public ExpressionParser setVariable(String key, Double value) throws ParsiiUnknowVariableException {
+        scope.getVariable(key).setValue(value);
+        return this;
+    }
 
-	/**
-	 * Thread safe
-	 *
-	 * @param tuple
-	 * @return
-	 * @throws ParsiiUnknowVariableException
-	 */
-	public double eval(Map<String, Double> tuple) throws Exception {
-		synchronized (this){
-			this.setVariables(tuple);
-			return this.eval();
-		}
-	}
+    public double eval() throws Exception {
+        return expression.evaluate();
+    }
 
-	public List<String> getDependentFields() {
-		if (dependentFields == null) {
-			dependentFields = new ArrayList<String>();
-			for (String variable : scope.getNames()) {
-				if (!variable.equals("pi") && !variable.equals("E") && !variable.equals("euler"))
-					dependentFields.add(variable);
-			}
-		}
-		return dependentFields; 
-	}
+    public static double eval(String expression, TaggedLogAPIEntity entity) throws Exception {
+        ExpressionParser parser = parse(expression);
+        List<String> dependencies = parser.getDependentFields();
+        Map<String, Double> context = new HashMap<String, Double>();
+        for (String field : dependencies) {
+            String methodName = "get" + field.substring(0, 1).toUpperCase() + field.substring(1);
+            String methodUID = entity.getClass().getName() + "." + methodName;
 
-	private final static Map<String, ExpressionParser> _exprParserCache = new HashMap<String, ExpressionParser>();
+            Method m;
+            synchronized (_entityMethodCache) {
+                m = _entityMethodCache.get(methodUID);
+                if (m == null) {
+                    m = entity.getClass().getMethod(methodName);
+                    _entityMethodCache.put(methodUID, m);
+                }
+            }
+            Object obj = m.invoke(entity);
+            Double doubleValue = EntityQualifierUtils.convertObjToDouble(obj);
+            // if(doubleValue == Double.NaN) throw new IllegalArgumentException("Field "+field+": "+obj+" in
+            // expression "+expression+" is not number");
+            context.put(field, doubleValue);
+        }
+        return parser.eval(context);
+    }
 
-	/**
-	 * Thread safe
-	 *
-	 * @param expr
-	 * @return
-	 * @throws ParsiiInvalidException
-	 * @throws ParseException
-	 */
-	public static ExpressionParser parse(String expr) throws ParsiiInvalidException, ParseException {
-		if(expr == null) throw new IllegalStateException("Expression to parse is null");
-		synchronized (_exprParserCache) {
-			ExpressionParser parser = _exprParserCache.get(expr);
-			if (parser == null) {
-				parser = new ExpressionParser(expr);
-				_exprParserCache.put(expr, parser);
-			}
-			return parser;
-		}
-	}
-	public static double eval(String expression,Map<String,Double> context) throws Exception {
-		ExpressionParser parser = parse(expression);
-		return parser.eval(context);
-	}
+    /**
+     * Thread safe
+     *
+     * @param tuple
+     * @return
+     * @throws ParsiiUnknowVariableException
+     */
+    public double eval(Map<String, Double> tuple) throws Exception {
+        synchronized (this) {
+            this.setVariables(tuple);
+            return this.eval();
+        }
+    }
 
-	private static final Map<String,Method> _entityMethodCache = new HashMap<String, Method>();
-	public static double eval(String expression,TaggedLogAPIEntity entity) throws Exception {
-		ExpressionParser parser = parse(expression);
-		List<String> dependencies = parser.getDependentFields();
-		Map<String,Double> context = new HashMap<String,Double>();
-		for(String field:dependencies){
-			String methodName = "get"+field.substring(0, 1).toUpperCase() + field.substring(1);
-			String methodUID = entity.getClass().getName()+"."+methodName;
+    public static double eval(String expression, Map<String, Double> context) throws Exception {
+        ExpressionParser parser = parse(expression);
+        return parser.eval(context);
+    }
 
-			Method m;
-			synchronized (_entityMethodCache) {
-				m = _entityMethodCache.get(methodUID);
-				if (m == null) {
-					m = entity.getClass().getMethod(methodName);
-					_entityMethodCache.put(methodUID, m);
-				}
-			}
-			Object obj = m.invoke(entity);
-			Double doubleValue = EntityQualifierUtils.convertObjToDouble(obj);
-			// if(doubleValue == Double.NaN) throw new IllegalArgumentException("Field "+field+": "+obj+" in expression "+expression+" is not number");
-			context.put(field,doubleValue);
-		}
-		return parser.eval(context);
-	}
+    @SuppressWarnings("unused")
+    public Scope getScope() {
+        return scope;
+    }
+
+    public List<String> getDependentFields() {
+        if (dependentFields == null) {
+            dependentFields = new ArrayList<String>();
+            for (String variable : scope.getNames()) {
+                if (!variable.equals("pi") && !variable.equals("E") && !variable.equals("euler")) {
+                    dependentFields.add(variable);
+                }
+            }
+        }
+        return dependentFields;
+    }
+
+    private static final Map<String, ExpressionParser> _exprParserCache = new HashMap<String, ExpressionParser>();
+
+    /**
+     * Thread safe
+     *
+     * @param expr
+     * @return
+     * @throws ParsiiInvalidException
+     * @throws ParseException
+     */
+    public static ExpressionParser parse(String expr) throws ParsiiInvalidException, ParseException {
+        if (expr == null) {
+            throw new IllegalStateException("Expression to parse is null");
+        }
+        synchronized (_exprParserCache) {
+            ExpressionParser parser = _exprParserCache.get(expr);
+            if (parser == null) {
+                parser = new ExpressionParser(expr);
+                _exprParserCache.put(expr, parser);
+            }
+            return parser;
+        }
+    }
+
+    private static final Map<String, Method> _entityMethodCache = new HashMap<String, Method>();
+
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java
index 22301f8..b53d947 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiInvalidException.java
@@ -22,36 +22,36 @@
 /**
  * @since Nov 7, 2014
  */
-public class ParsiiInvalidException extends Exception{
-	
-	private static final long serialVersionUID = 1L;
+public class ParsiiInvalidException extends Exception {
 
-	/**
-	 * Default constructor
-	 */
-	public ParsiiInvalidException() {
-		super();
-	}
+    private static final long serialVersionUID = 1L;
 
-	/**
-	 * @param message
-	 * @param cause
-	 */
-	public ParsiiInvalidException(String message, Throwable cause) {
-		super(message, cause);
-	}
+    /**
+     * Default constructor
+     */
+    public ParsiiInvalidException() {
+        super();
+    }
 
-	/**
-	 * @param message
-	 */
-	public ParsiiInvalidException(String message) {
-		super(message);
-	}
+    /**
+     * @param message
+     * @param cause
+     */
+    public ParsiiInvalidException(String message, Throwable cause) {
+        super(message, cause);
+    }
 
-	/**
-	 * @param cause
-	 */
-	public ParsiiInvalidException(Throwable cause) {
-		super(cause);
-	}
+    /**
+     * @param message
+     */
+    public ParsiiInvalidException(String message) {
+        super(message);
+    }
+
+    /**
+     * @param cause
+     */
+    public ParsiiInvalidException(Throwable cause) {
+        super(cause);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java
index 1573a08..bfe5a81 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/main/java/org/apache/eagle/log/expression/ParsiiUnknowVariableException.java
@@ -22,36 +22,36 @@
 /**
  * @since Nov 7, 2014
  */
-public class ParsiiUnknowVariableException extends Exception{
-	
-	private static final long serialVersionUID = 1L;
+public class ParsiiUnknowVariableException extends Exception {
 
-	/**
-	 * Default constructor
-	 */
-	public ParsiiUnknowVariableException() {
-		super();
-	}
+    private static final long serialVersionUID = 1L;
 
-	/**
-	 * @param message
-	 * @param cause
-	 */
-	public ParsiiUnknowVariableException(String message, Throwable cause) {
-		super(message, cause);
-	}
+    /**
+     * Default constructor
+     */
+    public ParsiiUnknowVariableException() {
+        super();
+    }
 
-	/**
-	 * @param message
-	 */
-	public ParsiiUnknowVariableException(String message) {
-		super(message);
-	}
+    /**
+     * @param message
+     * @param cause
+     */
+    public ParsiiUnknowVariableException(String message, Throwable cause) {
+        super(message, cause);
+    }
 
-	/**
-	 * @param cause
-	 */
-	public ParsiiUnknowVariableException(Throwable cause) {
-		super(cause);
-	}
+    /**
+     * @param message
+     */
+    public ParsiiUnknowVariableException(String message) {
+        super(message);
+    }
+
+    /**
+     * @param cause
+     */
+    public ParsiiUnknowVariableException(Throwable cause) {
+        super(cause);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java
index 092ef75..8d1bf22 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/TestGenericServiceAPIResponseEntity.java
@@ -34,34 +34,40 @@
  * @since 3/18/15
  */
 public class TestGenericServiceAPIResponseEntity {
-    final static Logger LOG = LoggerFactory.getLogger(TestGenericServiceAPIResponseEntity.class);
+    static final Logger LOG = LoggerFactory.getLogger(TestGenericServiceAPIResponseEntity.class);
 
     ObjectMapper objectMapper;
 
     @Before
-    public void setUp(){
+    public void setUp() {
         objectMapper = new ObjectMapper();
     }
 
     @JsonSerialize
-    public static class Item{
-        public Item(){}
-        public Item(String name,Double value){
+    public static class Item {
+        public Item() {
+        }
+
+        public Item(String name, Double value) {
             this.name = name;
             this.value = value;
         }
+
         private String name;
         private Double value;
 
         public String getName() {
             return name;
         }
+
         public void setName(String name) {
             this.name = name;
         }
+
         public Double getValue() {
             return value;
         }
+
         public void setValue(Double value) {
             this.value = value;
         }
@@ -71,22 +77,25 @@
     public void testSerDeserialize() throws IOException {
         // mock up service side to serialize
         GenericServiceAPIResponseEntity<Item> entity = new GenericServiceAPIResponseEntity<Item>(Item.class);
-        entity.setObj(Arrays.asList(new Item("a",1.2),new Item("b",1.3),new Item("c",1.4)));
-        entity.setMeta(new HashMap<String, Object>(){{
-            put("tag1","val1");
-            put("tag2","val2");
-        }});
+        entity.setObj(Arrays.asList(new Item("a", 1.2), new Item("b", 1.3), new Item("c", 1.4)));
+        entity.setMeta(new HashMap<String, Object>() {
+            {
+                put("tag1", "val1");
+                put("tag2", "val2");
+            }
+        });
 
-//        entity.setTypeByObj();
+        // entity.setTypeByObj();
         entity.setSuccess(true);
         String json = objectMapper.writeValueAsString(entity);
         LOG.info(json);
 
         // mock up client side to deserialize
-        GenericServiceAPIResponseEntity deserEntity = objectMapper.readValue(json,GenericServiceAPIResponseEntity.class);
-        Assert.assertEquals(json,objectMapper.writeValueAsString(deserEntity));
+        GenericServiceAPIResponseEntity deserEntity = objectMapper
+            .readValue(json, GenericServiceAPIResponseEntity.class);
+        Assert.assertEquals(json, objectMapper.writeValueAsString(deserEntity));
         Assert.assertEquals(3, deserEntity.getObj().size());
-        Assert.assertEquals(LinkedList.class,deserEntity.getObj().getClass());
-        Assert.assertEquals(Item.class,deserEntity.getObj().get(0).getClass());
+        Assert.assertEquals(LinkedList.class, deserEntity.getObj().getClass());
+        Assert.assertEquals(Item.class, deserEntity.getObj().get(0).getClass());
     }
-}
\ No newline at end of file
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java
index 12fba59..85ce8a5 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDouble2DArraySerDeser.java
@@ -26,23 +26,25 @@
     private Double2DArraySerDeser double2DArraySerDeser = new Double2DArraySerDeser();
 
     @Test
-    public void testSerDeser(){
-        double[][] data = new double[][]{
-                {0,1,2,4},
-                {4,2,1,0},
-                {4},
-                null,
-                {}
+    public void testSerDeser() {
+        double[][] data = new double[][] {
+                                          {
+                                           0, 1, 2, 4
+                                          }, {
+                                              4, 2, 1, 0
+                                          }, {
+                                              4
+                                          }, null, {}
         };
 
         byte[] bytes = double2DArraySerDeser.serialize(data);
         double[][] data2 = double2DArraySerDeser.deserialize(bytes);
 
-        assert  data.length == data2.length;
+        assert data.length == data2.length;
         assert data[0].length == data2[0].length;
         assert data[1].length == data2[1].length;
         assert data[2].length == data2[2].length;
         assert data[3] == data2[3] && data2[3] == null;
         assert data[4].length == data2[4].length;
     }
-}
\ No newline at end of file
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java
index 2432e47..641f128 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestDoubleSerDeser.java
@@ -24,35 +24,46 @@
 
 public class TestDoubleSerDeser {
 
-	@Test
-	public void test() {
-		DoubleSerDeser dsd = new DoubleSerDeser();
-		//byte[] t = {'N', 'a', 'N'};
-		byte [] t = dsd.serialize(Double.NaN); 
-	
-		Double d = dsd.deserialize(t);
-		System.out.println(d);
-		//Double d = dsd.deserialize(t);		
-	}
+    @Test
+    public void test() {
+        DoubleSerDeser dsd = new DoubleSerDeser();
+        // byte[] t = {'N', 'a', 'N'};
+        byte[] t = dsd.serialize(Double.NaN);
 
-	/**
-	 * @link http://en.wikipedia.org/wiki/Double-precision_floating-point_format
-	 */
-	@Test
-	public void testIEEE754_Binary64_DoublePrecisionFloatingPointFormat(){
-		for(Double last = null,i=Math.pow(-2.0,33);i< Math.pow(2.0,33);i+=Math.pow(2.0,10)){
-			if(last != null){
-				Assert.assertTrue(i > last);
-				if(last < 0 && i <0){
-					Assert.assertTrue("Negative double value and its  serialization Binary array have negative correlation", new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil.doubleToBytes(last)) < 0);
-				}else if(last < 0 && i >=0){
-					Assert.assertTrue("Binary array for negative double is always greater than any positive doubles' ",new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil.doubleToBytes(last)) < 0);
-				}else if(last >= 0){
-					Assert.assertTrue("Positive double value and its  serialization Binary array have positive correlation",new BinaryComparator(ByteUtil.doubleToBytes(i)).compareTo(ByteUtil.doubleToBytes(last)) > 0);
-				}
-			}
-			last = i;
-		}
-		Assert.assertTrue("Binary array for negative double is always greater than any positive doubles'",new BinaryComparator(ByteUtil.doubleToBytes(-1.0)).compareTo(ByteUtil.doubleToBytes(Math.pow(2.0,32)))>0) ;
-	}
+        Double d = dsd.deserialize(t);
+        System.out.println(d);
+        // Double d = dsd.deserialize(t);
+    }
+
+    /**
+     * @link http://en.wikipedia.org/wiki/Double-precision_floating-point_format
+     */
+    @Test
+    public void testIEEE754_Binary64_DoublePrecisionFloatingPointFormat() {
+        for (Double last = null, i = Math.pow(-2.0, 33); i < Math.pow(2.0, 33); i += Math.pow(2.0, 10)) {
+            if (last != null) {
+                Assert.assertTrue(i > last);
+                if (last < 0 && i < 0) {
+                    Assert
+                        .assertTrue("Negative double value and its  serialization Binary array have negative correlation",
+                                    new BinaryComparator(ByteUtil.doubleToBytes(i))
+                                        .compareTo(ByteUtil.doubleToBytes(last)) < 0);
+                } else if (last < 0 && i >= 0) {
+                    Assert
+                        .assertTrue("Binary array for negative double is always greater than any positive doubles' ",
+                                    new BinaryComparator(ByteUtil.doubleToBytes(i))
+                                        .compareTo(ByteUtil.doubleToBytes(last)) < 0);
+                } else if (last >= 0) {
+                    Assert
+                        .assertTrue("Positive double value and its  serialization Binary array have positive correlation",
+                                    new BinaryComparator(ByteUtil.doubleToBytes(i))
+                                        .compareTo(ByteUtil.doubleToBytes(last)) > 0);
+                }
+            }
+            last = i;
+        }
+        Assert.assertTrue("Binary array for negative double is always greater than any positive doubles'",
+                          new BinaryComparator(ByteUtil.doubleToBytes(-1.0))
+                              .compareTo(ByteUtil.doubleToBytes(Math.pow(2.0, 32))) > 0);
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java
index 33aee32..cb1534c 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestGenericEntityIndexStreamReader.java
@@ -38,15 +38,17 @@
 
     @BeforeClass
     public static void createTable() throws IOException, IllegalAccessException, InstantiationException {
-        EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition entityDefinition = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily());
     }
 
     @Test
     public void testUniqueIndexRead() throws Exception {
         EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
-        final EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
-        
+        final EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+
         final List<TestLogAPIEntity> list = new ArrayList<TestLogAPIEntity>();
         TestLogAPIEntity e = new TestLogAPIEntity();
         e.setField1(1);
@@ -64,7 +66,7 @@
         GenericEntityWriter writer = new GenericEntityWriter(ed.getService());
         List<String> result = writer.write(list);
         Assert.assertNotNull(result);
-        
+
         final IndexDefinition indexDef = ed.getIndexes()[0];
         SearchCondition condition = new SearchCondition();
         condition.setOutputFields(new ArrayList<String>());
@@ -82,7 +84,7 @@
 
         UniqueIndexStreamReader indexReader = new UniqueIndexStreamReader(indexDef, condition);
         GenericEntityBatchReader batchReader = new GenericEntityBatchReader(indexReader);
-        List<TestLogAPIEntity> entities =  batchReader.read();
+        List<TestLogAPIEntity> entities = batchReader.read();
         Assert.assertNotNull(entities);
         Assert.assertTrue(entities.size() >= 1);
         TestLogAPIEntity e1 = entities.get(0);
@@ -93,25 +95,27 @@
         Assert.assertEquals(e.getField5(), e1.getField5(), 0.001);
         Assert.assertEquals(e.getField6(), e1.getField6());
         Assert.assertEquals(e.getField7(), e1.getField7());
-        
+
         GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily());
         deleter.delete(list);
-        
+
         indexReader = new UniqueIndexStreamReader(indexDef, condition);
         batchReader = new GenericEntityBatchReader(indexReader);
-        entities =  batchReader.read();
+        entities = batchReader.read();
         Assert.assertNotNull(entities);
         Assert.assertTrue(entities.isEmpty());
     }
 
     @Test
     public void testNonClusterIndexRead() throws Exception {
-        EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition entityDefinition = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         // hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily());
 
         EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
-        final EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
-        
+        final EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+
         final List<TestLogAPIEntity> list = new ArrayList<TestLogAPIEntity>();
         TestLogAPIEntity e = new TestLogAPIEntity();
         e.setField1(1);
@@ -129,7 +133,7 @@
         GenericEntityWriter writer = new GenericEntityWriter(ed.getService());
         List<String> result = writer.write(list);
         Assert.assertNotNull(result);
-        
+
         final IndexDefinition indexDef = ed.getIndexes()[1];
         SearchCondition condition = new SearchCondition();
         condition.setOutputFields(new ArrayList<String>());
@@ -147,7 +151,7 @@
 
         NonClusteredIndexStreamReader indexReader = new NonClusteredIndexStreamReader(indexDef, condition);
         GenericEntityBatchReader batchReader = new GenericEntityBatchReader(indexReader);
-        List<TestLogAPIEntity> entities =  batchReader.read();
+        List<TestLogAPIEntity> entities = batchReader.read();
         Assert.assertNotNull(entities);
         Assert.assertTrue(entities.size() >= 1);
         TestLogAPIEntity e1 = entities.get(0);
@@ -159,13 +163,12 @@
         Assert.assertEquals(e.getField6(), e1.getField6());
         Assert.assertEquals(e.getField7(), e1.getField7());
 
-
         GenericDeleter deleter = new GenericDeleter(ed.getTable(), ed.getColumnFamily());
         deleter.delete(list);
-        
+
         indexReader = new NonClusteredIndexStreamReader(indexDef, condition);
         batchReader = new GenericEntityBatchReader(indexReader);
-        entities =  batchReader.read();
+        entities = batchReader.read();
         // hbase.deleteTable(entityDefinition.getTable());
         Assert.assertNotNull(entities);
         Assert.assertTrue(entities.isEmpty());
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java
index a755668..2b9e915 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseIntegerLogHelper.java
@@ -31,37 +31,37 @@
  * @since : 11/10/14,2014
  */
 public class TestHBaseIntegerLogHelper {
-	@Test
-	public void testTimeSeriesAPIEntity(){
-		InternalLog internalLog = new InternalLog();
-		Map<String,byte[]> map = new HashMap<String,byte[]>();
-		TestTimeSeriesAPIEntity apiEntity = new TestTimeSeriesAPIEntity();
-		EntityDefinition ed = null;
-		try {
-			ed = EntityDefinitionManager.getEntityByServiceName("TestTimeSeriesAPIEntity");
-		} catch (InstantiationException e) {
-			e.printStackTrace();
-		} catch (IllegalAccessException e) {
-			e.printStackTrace();
-		}
-		map.put("a", ByteUtil.intToBytes(12));
-		map.put("c", ByteUtil.longToBytes(123432432l));
-		map.put("cluster", new String("cluster4ut").getBytes());
-		map.put("datacenter", new String("datacenter4ut").getBytes());
+    @Test
+    public void testTimeSeriesAPIEntity() {
+        InternalLog internalLog = new InternalLog();
+        Map<String, byte[]> map = new HashMap<String, byte[]>();
+        TestTimeSeriesAPIEntity apiEntity = new TestTimeSeriesAPIEntity();
+        EntityDefinition ed = null;
+        try {
+            ed = EntityDefinitionManager.getEntityByServiceName("TestTimeSeriesAPIEntity");
+        } catch (InstantiationException e) {
+            e.printStackTrace();
+        } catch (IllegalAccessException e) {
+            e.printStackTrace();
+        }
+        map.put("a", ByteUtil.intToBytes(12));
+        map.put("c", ByteUtil.longToBytes(123432432l));
+        map.put("cluster", new String("cluster4ut").getBytes());
+        map.put("datacenter", new String("datacenter4ut").getBytes());
 
-		internalLog.setQualifierValues(map);
-		internalLog.setTimestamp(System.currentTimeMillis());
+        internalLog.setQualifierValues(map);
+        internalLog.setTimestamp(System.currentTimeMillis());
 
-		try {
-			TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(internalLog, ed);
-			Assert.assertTrue(entity instanceof TestTimeSeriesAPIEntity);
-			TestTimeSeriesAPIEntity tsentity = (TestTimeSeriesAPIEntity) entity;
-			Assert.assertEquals("cluster4ut",tsentity.getTags().get("cluster"));
-			Assert.assertEquals("datacenter4ut",tsentity.getTags().get("datacenter"));
-			Assert.assertEquals(12,tsentity.getField1());
-			Assert.assertEquals(123432432l,tsentity.getField3());
-		} catch (Exception e) {
-			e.printStackTrace();
-		}
-	}
+        try {
+            TaggedLogAPIEntity entity = HBaseInternalLogHelper.buildEntity(internalLog, ed);
+            Assert.assertTrue(entity instanceof TestTimeSeriesAPIEntity);
+            TestTimeSeriesAPIEntity tsentity = (TestTimeSeriesAPIEntity)entity;
+            Assert.assertEquals("cluster4ut", tsentity.getTags().get("cluster"));
+            Assert.assertEquals("datacenter4ut", tsentity.getTags().get("datacenter"));
+            Assert.assertEquals(12, tsentity.getField1());
+            Assert.assertEquals(123432432l, tsentity.getField3());
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java
index 4324b73..04a1336 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWriteEntitiesPerformance.java
@@ -38,7 +38,8 @@
 
     @Before
     public void setUp() throws IllegalAccessException, InstantiationException, IOException {
-        EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition entityDefinition = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily());
 
         EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
@@ -52,7 +53,8 @@
 
     @After
     public void cleanUp() throws IllegalAccessException, InstantiationException, IOException {
-        EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition entityDefinition = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         hbase.deleteTable(entityDefinition.getTable());
     }
 
@@ -70,15 +72,15 @@
         int wroteCount = 0;
         List<String> rowkeys = new ArrayList<String>();
         List<TestLogAPIEntity> list = new ArrayList<TestLogAPIEntity>();
-        for (int i = 0 ; i <= count;i++) {
+        for (int i = 0; i <= count; i++) {
             TestLogAPIEntity e = new TestLogAPIEntity();
             e.setTimestamp(new Date().getTime());
             e.setField1(i);
             e.setField2(i);
             e.setField3(i);
-            e.setField4((long) i);
-            e.setField5((double) i);
-            e.setField6((double) i);
+            e.setField4((long)i);
+            e.setField5(i);
+            e.setField6((double)i);
             e.setField7(String.valueOf(i));
             e.setTags(new HashMap<String, String>());
             e.getTags().put("jobID", "index_test_job_id");
@@ -87,14 +89,17 @@
             e.getTags().put("class", e.toString());
             list.add(e);
 
-            if ( list.size() >= 1000) {
+            if (list.size() >= 1000) {
                 try {
                     StopWatch watch = new StopWatch();
                     watch.start();
                     rowkeys.addAll(writer.write(list));
                     watch.stop();
                     wroteCount += list.size();
-                    if (LOG.isDebugEnabled()) LOG.debug("Wrote "+wroteCount+" / "+count+" entities"+" in "+watch.getTime()+" ms");
+                    if (LOG.isDebugEnabled()) {
+                        LOG.debug("Wrote " + wroteCount + " / " + count + " entities" + " in "
+                                  + watch.getTime() + " ms");
+                    }
                     list.clear();
                 } catch (Exception e1) {
                     Assert.fail(e1.getMessage());
@@ -105,11 +110,15 @@
         try {
             rowkeys.addAll(writer.write(list));
             wroteCount += list.size();
-            if (LOG.isDebugEnabled()) LOG.debug("wrote "+wroteCount+" / "+count+" entities");
+            if (LOG.isDebugEnabled()) {
+                LOG.debug("wrote " + wroteCount + " / " + count + " entities");
+            }
         } catch (Exception e) {
             Assert.fail(e.getMessage());
         }
-        if (LOG.isDebugEnabled()) LOG.debug("done "+count+" entities");
+        if (LOG.isDebugEnabled()) {
+            LOG.debug("done " + count + " entities");
+        }
         return rowkeys;
     }
 
@@ -127,4 +136,4 @@
         LOG.info("End time: " + endTime);
         LOG.info("Totally take " + watch.getTime() * 1.0 / 1000 + " s");
     }
-}
\ No newline at end of file
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWritePerformance.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWritePerformance.java
index 75b55ba..9ee52c9 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWritePerformance.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestHBaseWritePerformance.java
@@ -27,74 +27,71 @@
 
 public class TestHBaseWritePerformance {
 
-	public static void main(String[] args) throws IOException {
-		
-		HTableInterface tbl = EagleConfigFactory.load().getHTable("unittest");
+    public static void main(String[] args) throws IOException {
 
-		int putSize = 1000;
-		List<Put> list = new ArrayList<Put>(putSize);
-		for (int i = 0; i < putSize; ++i) {
-			byte[] v = Integer.toString(i).getBytes();
-			Put p = new Put(v);
-			p.add("f".getBytes(), "a".getBytes(), 100, v);
-			list.add(p);
-		}
+        HTableInterface tbl = EagleConfigFactory.load().getHTable("unittest");
 
-		// Case 1
-		System.out.println("Case 1: autoflush = true, individual put");
-		tbl.setAutoFlush(true);
-		long startTime = System.currentTimeMillis();
-		for (int i = 0; i < 1; ++i) {
-			for (Put p : list) {
-				tbl.put(p);
-			}
-			tbl.flushCommits();
-		}
-		long endTime = System.currentTimeMillis();
-		System.out.println("Case 1: " + (endTime - startTime) + " ms");
-		
-		
-		// Case 2
-		System.out.println("Case 2: autoflush = true, multi-put");
-		tbl.setAutoFlush(true);
-		startTime = System.currentTimeMillis();
-		for (int i = 0; i < 1; ++i) {
-			tbl.put(list);
-			tbl.flushCommits();
-		}
-		endTime = System.currentTimeMillis();
-		System.out.println("Case 2: " + (endTime - startTime) + " ms");
+        int putSize = 1000;
+        List<Put> list = new ArrayList<Put>(putSize);
+        for (int i = 0; i < putSize; ++i) {
+            byte[] v = Integer.toString(i).getBytes();
+            Put p = new Put(v);
+            p.add("f".getBytes(), "a".getBytes(), 100, v);
+            list.add(p);
+        }
 
-		
-		// Case 3
-		System.out.println("Case 3: autoflush = false, multi-put");
-		tbl.setAutoFlush(false);
-		startTime = System.currentTimeMillis();
-		for (int i = 0; i < 1; ++i) {
-			tbl.put(list);
-			tbl.flushCommits();
-		}
-		endTime = System.currentTimeMillis();
-		System.out.println("Case 3: " + (endTime - startTime) + " ms");
+        // Case 1
+        System.out.println("Case 1: autoflush = true, individual put");
+        tbl.setAutoFlush(true);
+        long startTime = System.currentTimeMillis();
+        for (int i = 0; i < 1; ++i) {
+            for (Put p : list) {
+                tbl.put(p);
+            }
+            tbl.flushCommits();
+        }
+        long endTime = System.currentTimeMillis();
+        System.out.println("Case 1: " + (endTime - startTime) + " ms");
 
-		
-		// Case 4
-		System.out.println("Case 4: autoflush = false, individual put");
-		tbl.setAutoFlush(true);
-		startTime = System.currentTimeMillis();
-		for (int i = 0; i < 1; ++i) {
-			for (Put p : list) {
-				tbl.put(p);
-			}
-			tbl.flushCommits();
-		}
-		endTime = System.currentTimeMillis();
-		System.out.println("Case 4: " + (endTime - startTime) + " ms");
+        // Case 2
+        System.out.println("Case 2: autoflush = true, multi-put");
+        tbl.setAutoFlush(true);
+        startTime = System.currentTimeMillis();
+        for (int i = 0; i < 1; ++i) {
+            tbl.put(list);
+            tbl.flushCommits();
+        }
+        endTime = System.currentTimeMillis();
+        System.out.println("Case 2: " + (endTime - startTime) + " ms");
 
-	}
-	
-	@Test
-	public void test() {
-		
-	}
+        // Case 3
+        System.out.println("Case 3: autoflush = false, multi-put");
+        tbl.setAutoFlush(false);
+        startTime = System.currentTimeMillis();
+        for (int i = 0; i < 1; ++i) {
+            tbl.put(list);
+            tbl.flushCommits();
+        }
+        endTime = System.currentTimeMillis();
+        System.out.println("Case 3: " + (endTime - startTime) + " ms");
+
+        // Case 4
+        System.out.println("Case 4: autoflush = false, individual put");
+        tbl.setAutoFlush(true);
+        startTime = System.currentTimeMillis();
+        for (int i = 0; i < 1; ++i) {
+            for (Put p : list) {
+                tbl.put(p);
+            }
+            tbl.flushCommits();
+        }
+        endTime = System.currentTimeMillis();
+        System.out.println("Case 4: " + (endTime - startTime) + " ms");
+
+    }
+
+    @Test
+    public void test() {
+
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java
index 63b1d82..6d7a9ef 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/TestTestLogAPIEntity.java
@@ -25,7 +25,6 @@
 import org.apache.eagle.log.entity.old.GenericDeleter;
 import org.apache.eagle.log.entity.test.TestLogAPIEntity;
 import org.apache.eagle.service.hbase.TestHBaseBase;
-import org.junit.AfterClass;
 import org.junit.Assert;
 import org.junit.BeforeClass;
 import org.junit.Test;
@@ -40,13 +39,16 @@
 
     @BeforeClass
     public static void createTable() throws IllegalAccessException, InstantiationException, IOException {
-        EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         hbase.createTable(ed.getTable(), ed.getColumnFamily());
     }
 
     @Test
-    public void testGetValue() throws InstantiationException, IllegalAccessException, IllegalArgumentException, InvocationTargetException {
-        EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+    public void testGetValue() throws InstantiationException, IllegalAccessException,
+        IllegalArgumentException, InvocationTargetException {
+        EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         if (ed == null) {
             EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
             ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
@@ -79,7 +81,8 @@
     @Test
     public void testIndexDefinition() throws InstantiationException, IllegalAccessException {
 
-        EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         if (ed == null) {
             EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
             ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
@@ -97,10 +100,12 @@
 
     @Test
     public void testWriteEmptyIndexFieldAndDeleteWithoutPartition() throws Exception {
-        EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition entityDefinition = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         // hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily());
 
-        EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         if (ed == null) {
             EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
             ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
@@ -141,14 +146,15 @@
             qualifiers[5] = "f".getBytes();
             qualifiers[6] = "g".getBytes();
 
-            UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null);
+            UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers,
+                                                                   null);
             reader.open();
             InternalLog log = reader.read();
             Assert.assertNotNull(log);
 
             TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed);
             Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass());
-            TestLogAPIEntity e1 = (TestLogAPIEntity) newEntity;
+            TestLogAPIEntity e1 = (TestLogAPIEntity)newEntity;
             Assert.assertEquals(e.getField1(), e1.getField1());
             Assert.assertEquals(e.getField2(), e1.getField2());
             Assert.assertEquals(e.getField3(), e1.getField3());
@@ -161,7 +167,7 @@
             Assert.assertNotNull(log);
             newEntity = HBaseInternalLogHelper.buildEntity(log, ed);
             Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass());
-            e1 = (TestLogAPIEntity) newEntity;
+            e1 = (TestLogAPIEntity)newEntity;
             Assert.assertEquals(e.getField1(), e1.getField1());
             Assert.assertEquals(e.getField2(), e1.getField2());
             Assert.assertEquals(e.getField3(), e1.getField3());
@@ -188,16 +194,18 @@
         // hbase.deleteTable(entityDefinition.getTable());
     }
 
-
     /*
-     *  testWriteEmptyIndexFieldAndDeleteWithPartition(eagle.log.entity.TestTestLogAPIEntity): expected:<86400000> but was:<0>
+     * testWriteEmptyIndexFieldAndDeleteWithPartition(eagle.log.entity.TestTestLogAPIEntity):
+     * expected:<86400000> but was:<0>
      */
-    //@Test
+    // @Test
     public void testWriteEmptyIndexFieldAndDeleteWithPartition() throws Exception {
-        EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition entityDefinition = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         // hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily());
 
-        EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         if (ed == null) {
             EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
             ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
@@ -244,14 +252,15 @@
             qualifiers[7] = "cluster".getBytes();
             qualifiers[8] = "datacenter".getBytes();
 
-            UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null);
+            UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers,
+                                                                   null);
             reader.open();
             InternalLog log = reader.read();
             Assert.assertNotNull(log);
 
             TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed);
             Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass());
-            TestLogAPIEntity e1 = (TestLogAPIEntity) newEntity;
+            TestLogAPIEntity e1 = (TestLogAPIEntity)newEntity;
             Assert.assertEquals(e.getField1(), e1.getField1());
             Assert.assertEquals(e.getField2(), e1.getField2());
             Assert.assertEquals(e.getField3(), e1.getField3());
@@ -267,7 +276,7 @@
             Assert.assertNotNull(log);
             newEntity = HBaseInternalLogHelper.buildEntity(log, ed);
             Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass());
-            e1 = (TestLogAPIEntity) newEntity;
+            e1 = (TestLogAPIEntity)newEntity;
             Assert.assertEquals(e.getField1(), e1.getField1());
             Assert.assertEquals(e.getField2(), e1.getField2());
             Assert.assertEquals(e.getField3(), e1.getField3());
@@ -298,15 +307,18 @@
     }
 
     /**
-     * testWriteEmptyIndexFieldAndDeleteWithPartitionAndTimeSeries(eagle.log.entity.TestTestLogAPIEntity): expected:<1434809555569> but was:<0>
+     * testWriteEmptyIndexFieldAndDeleteWithPartitionAndTimeSeries(eagle.log.entity.TestTestLogAPIEntity):
+     * expected:<1434809555569> but was:<0>
      */
 
-    //@Test
+    // @Test
     public void testWriteEmptyIndexFieldAndDeleteWithPartitionAndTimeSeries() throws Exception {
-        EntityDefinition entityDefinition = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition entityDefinition = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         // hbase.createTable(entityDefinition.getTable(), entityDefinition.getColumnFamily());
 
-        EntityDefinition ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+        EntityDefinition ed = EntityDefinitionManager
+            .getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
         if (ed == null) {
             EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
             ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
@@ -357,14 +369,15 @@
             qualifiers[7] = "cluster".getBytes();
             qualifiers[8] = "datacenter".getBytes();
 
-            UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers, null);
+            UniqueIndexLogReader reader = new UniqueIndexLogReader(indexDefs[0], indexRowkeys, qualifiers,
+                                                                   null);
             reader.open();
             InternalLog log = reader.read();
             Assert.assertNotNull(log);
 
             TaggedLogAPIEntity newEntity = HBaseInternalLogHelper.buildEntity(log, ed);
             Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass());
-            TestLogAPIEntity e1 = (TestLogAPIEntity) newEntity;
+            TestLogAPIEntity e1 = (TestLogAPIEntity)newEntity;
             Assert.assertEquals(e.getField1(), e1.getField1());
             Assert.assertEquals(e.getField2(), e1.getField2());
             Assert.assertEquals(e.getField3(), e1.getField3());
@@ -380,7 +393,7 @@
             Assert.assertNotNull(log);
             newEntity = HBaseInternalLogHelper.buildEntity(log, ed);
             Assert.assertEquals(TestLogAPIEntity.class, newEntity.getClass());
-            e1 = (TestLogAPIEntity) newEntity;
+            e1 = (TestLogAPIEntity)newEntity;
             Assert.assertEquals(e.getField1(), e1.getField1());
             Assert.assertEquals(e.getField2(), e1.getField2());
             Assert.assertEquals(e.getField3(), e1.getField3());
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java
index 2b59831..3dfd893 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/base/taggedlog/TestTaggedLogAPIEntity.java
@@ -24,7 +24,6 @@
 

 import com.fasterxml.jackson.core.JsonProcessingException;

 import com.fasterxml.jackson.databind.ObjectMapper;

-import com.fasterxml.jackson.databind.SerializationFeature;

 import org.apache.eagle.log.base.taggedlog.EntityJsonModule;

 import org.apache.eagle.log.base.taggedlog.TaggedLogAPIEntity;

 import org.apache.eagle.log.entity.meta.Column;

@@ -38,13 +37,13 @@
     private static ObjectMapper objectMapper;

 

     @BeforeClass

-    public static void setUp(){

+    public static void setUp() {

         objectMapper = new ObjectMapper();

         objectMapper.setFilters(TaggedLogAPIEntity.getFilterProvider());

         objectMapper.registerModule(new EntityJsonModule());

     }

 

-    @JsonSerialize(include=JsonSerialize.Inclusion.NON_NULL)

+    @JsonSerialize(include = JsonSerialize.Inclusion.NON_NULL)

     private class MockSubTaggedLogAPIEntity extends TaggedLogAPIEntity {

         public double getField1() {

             return field1;

@@ -72,7 +71,7 @@
     }

 

     @SuppressWarnings("unchecked")

-	@Test

+    @Test

     public void testJsonSerializeFilter() throws IOException {

         MockSubTaggedLogAPIEntity mock = new MockSubTaggedLogAPIEntity();

         Assert.assertTrue(mock instanceof TaggedLogAPIEntity);

@@ -91,12 +90,16 @@
         mock.setPrefix("mock");

         mock.setField2("ok");

         mock.setField1(12.345);

-        mock.setTags(new HashMap<String, String>(){{

-            put("tagName", "tagValue");

-        }});

-        mock.setExp(new HashMap<String, Object>() {{

-            put("extra_field", 3.14);

-        }});

+        mock.setTags(new HashMap<String, String>() {

+            {

+                put("tagName", "tagValue");

+            }

+        });

+        mock.setExp(new HashMap<String, Object>() {

+            {

+                put("extra_field", 3.14);

+            }

+        });

         json = objectMapper.writeValueAsString(mock);

         System.out.println(json);

         Assert.assertTrue(json.contains("field2"));

@@ -105,13 +108,13 @@
 

     @Test

     public void testJsonSerializeMap() throws JsonProcessingException {

-        Map<List<String>,List<Object>> entries = new HashMap<List<String>,List<Object>>(){

+        Map<List<String>, List<Object>> entries = new HashMap<List<String>, List<Object>>() {

             {

-                put(Arrays.asList("a","b"),Arrays.asList(1,2,3));

+                put(Arrays.asList("a", "b"), Arrays.asList(1, 2, 3));

             }

         };

         String json = objectMapper.writeValueAsString(entries.entrySet());

         Assert.assertNotNull(json);

         System.out.print(json);

     }

-}
\ No newline at end of file
+}

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java
index e22c699..63dcca2 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestEntityQualifierHelper.java
@@ -28,155 +28,156 @@
 import java.util.List;
 
 /**
-* @since : 10/15/14 2014
-*/
+ * @since : 10/15/14 2014
+ */
 public class TestEntityQualifierHelper {
-	private EntityDefinition ed;
-	@Before
-	public void setUp(){
-		try {
-			if(EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity") == null){
-				EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
-			}
-			ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
-		} catch (InstantiationException e) {
-			e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-		} catch (IllegalAccessException e) {
-			e.printStackTrace();  //To change body of catch statement use File | Settings | File Templates.
-		}
-	}
+    private EntityDefinition ed;
 
-	@Test
-	public void testEd(){
-		Assert.assertNotNull(ed);
-		Assert.assertNotNull(ed.getQualifierNameMap().get("a"));
-		Assert.assertNull(ed.getQualifierNameMap().get("notexist"));
-	}
+    @Before
+    public void setUp() {
+        try {
+            if (EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity") == null) {
+                EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
+            }
+            ed = EntityDefinitionManager.getEntityByServiceName("TestLogAPIEntity");
+        } catch (InstantiationException e) {
+            e.printStackTrace(); // To change body of catch statement use File | Settings | File Templates.
+        } catch (IllegalAccessException e) {
+            e.printStackTrace(); // To change body of catch statement use File | Settings | File Templates.
+        }
+    }
 
-	@Test
-	public void  testIntEntityQualifierHelper(){
-		byte[] value = EntityQualifierUtils.toBytes(ed, "field1", "2");
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1)) > 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2)) == 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(3)) < 0);
-	}
+    @Test
+    public void testEd() {
+        Assert.assertNotNull(ed);
+        Assert.assertNotNull(ed.getQualifierNameMap().get("a"));
+        Assert.assertNull(ed.getQualifierNameMap().get("notexist"));
+    }
 
-	@Test
-	public void  testStringEntityQualifierHelper(){
-		byte[] value = EntityQualifierUtils.toBytes(ed, "field7", "xyz");
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyy")) > 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyz")) == 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xzz")) < 0);
+    @Test
+    public void testIntEntityQualifierHelper() {
+        byte[] value = EntityQualifierUtils.toBytes(ed, "field1", "2");
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1)) > 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2)) == 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(3)) < 0);
+    }
 
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xy")) > 0);
-	}
+    @Test
+    public void testStringEntityQualifierHelper() {
+        byte[] value = EntityQualifierUtils.toBytes(ed, "field7", "xyz");
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyy")) > 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xyz")) == 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xzz")) < 0);
 
-	@Test
-	public void  testDoubleEntityQualifierHelper(){
-		byte[] value = EntityQualifierUtils.toBytes(ed, "field5", "1.0");
-		Assert.assertTrue(Bytes.compareTo(value,Bytes.toBytes(0.5)) > 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1.0)) == 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2.2)) < 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes("xy")) > 0);
+    }
 
-//      TODO There is problem with negative double
-//		Assert.assertTrue(Bytes.compareTo(Bytes.toBytes(-0.6),Bytes.toBytes(-0.5)) < 0);
-	}
+    @Test
+    public void testDoubleEntityQualifierHelper() {
+        byte[] value = EntityQualifierUtils.toBytes(ed, "field5", "1.0");
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(0.5)) > 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(1.0)) == 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(2.2)) < 0);
 
-	@Test
-	public void  testLongEntityQualifierHelper(){
-		byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "100000");
-		Assert.assertTrue(Bytes.compareTo(value,Bytes.toBytes(100000l-1l )) > 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l)) == 0);
-		Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l + 1l)) < 0);
-	}
+        // TODO There is problem with negative double
+        // Assert.assertTrue(Bytes.compareTo(Bytes.toBytes(-0.6),Bytes.toBytes(-0.5)) < 0);
+    }
 
-	@Test
-	public void  testNegativeLongEntityQualifierHelper(){
-		Exception ex = null;
-		try{
-			byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "-100000");
-		}catch (IllegalArgumentException e){
-			ex = e;
-		}
-		Assert.assertNull(ex);
-	}
+    @Test
+    public void testLongEntityQualifierHelper() {
+        byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "100000");
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l - 1l)) > 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l)) == 0);
+        Assert.assertTrue(Bytes.compareTo(value, Bytes.toBytes(100000l + 1l)) < 0);
+    }
 
-	@Test
-	public void testParseAsList(){
-		List<String> set = EntityQualifierUtils.parseList("(\"abc1\",\"abc2\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("abc1",set.toArray()[0]);
-		Assert.assertEquals("abc2",set.toArray()[1]);
+    @Test
+    public void testNegativeLongEntityQualifierHelper() {
+        Exception ex = null;
+        try {
+            byte[] value = EntityQualifierUtils.toBytes(ed, "field4", "-100000");
+        } catch (IllegalArgumentException e) {
+            ex = e;
+        }
+        Assert.assertNull(ex);
+    }
 
-		set = EntityQualifierUtils.parseList("(1,\"abc2\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("1",set.toArray()[0]);
-		Assert.assertEquals("abc2",set.toArray()[1]);
+    @Test
+    public void testParseAsList() {
+        List<String> set = EntityQualifierUtils.parseList("(\"abc1\",\"abc2\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("abc1", set.toArray()[0]);
+        Assert.assertEquals("abc2", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(-1.5,\"abc2\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("-1.5",set.toArray()[0]);
-		Assert.assertEquals("abc2",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(1,\"abc2\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("1", set.toArray()[0]);
+        Assert.assertEquals("abc2", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,abc\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("-1.5",set.toArray()[0]);
-		Assert.assertEquals("-1.5,abc",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(-1.5,\"abc2\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("-1.5", set.toArray()[0]);
+        Assert.assertEquals("abc2", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(-1.5,\"\\\"abc\\\"\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("-1.5",set.toArray()[0]);
-		Assert.assertEquals("\"abc\"",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,abc\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("-1.5", set.toArray()[0]);
+        Assert.assertEquals("-1.5,abc", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\\\"abc\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("-1.5",set.toArray()[0]);
-		Assert.assertEquals("-1.5,\"abc",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(-1.5,\"\\\"abc\\\"\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("-1.5", set.toArray()[0]);
+        Assert.assertEquals("\"abc\"", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(\"\\\"-1.5\\\",abc1\",\"-1.5,\\\"abc2\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("\"-1.5\",abc1",set.toArray()[0]);
-		Assert.assertEquals("-1.5,\"abc2",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\\\"abc\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("-1.5", set.toArray()[0]);
+        Assert.assertEquals("-1.5,\"abc", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\"abc\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("-1.5",set.toArray()[0]);
-		Assert.assertEquals("-1.5,\"abc",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(\"\\\"-1.5\\\",abc1\",\"-1.5,\\\"abc2\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("\"-1.5\",abc1", set.toArray()[0]);
+        Assert.assertEquals("-1.5,\"abc2", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(\"\\\"value1,part1\\\",\\\"value1,part2\\\"\",\"value2\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("\"value1,part1\",\"value1,part2\"",set.toArray()[0]);
-		Assert.assertEquals("value2",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(-1.5,\"-1.5,\"abc\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("-1.5", set.toArray()[0]);
+        Assert.assertEquals("-1.5,\"abc", set.toArray()[1]);
 
-		////////////////////////////////
-		// Bad Format
-		////////////////////////////////
-		set = EntityQualifierUtils.parseList("(\"a,b)");
-		Assert.assertEquals(1,set.size());
-		Assert.assertEquals("a,b",set.toArray()[0]);
+        set = EntityQualifierUtils.parseList("(\"\\\"value1,part1\\\",\\\"value1,part2\\\"\",\"value2\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("\"value1,part1\",\"value1,part2\"", set.toArray()[0]);
+        Assert.assertEquals("value2", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(a,b\")");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("a",set.toArray()[0]);
-		Assert.assertEquals("b",set.toArray()[1]);
+        ////////////////////////////////
+        // Bad Format
+        ////////////////////////////////
+        set = EntityQualifierUtils.parseList("(\"a,b)");
+        Assert.assertEquals(1, set.size());
+        Assert.assertEquals("a,b", set.toArray()[0]);
 
-		set = EntityQualifierUtils.parseList("(a\",b)");
-		Assert.assertEquals(1,set.size());
-		Assert.assertEquals("a\",b",set.toArray()[0]);
+        set = EntityQualifierUtils.parseList("(a,b\")");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("a", set.toArray()[0]);
+        Assert.assertEquals("b", set.toArray()[1]);
 
-		set = EntityQualifierUtils.parseList("(abc,def)");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("abc",set.toArray()[0]);
-		Assert.assertEquals("def",set.toArray()[1]);
+        set = EntityQualifierUtils.parseList("(a\",b)");
+        Assert.assertEquals(1, set.size());
+        Assert.assertEquals("a\",b", set.toArray()[0]);
 
-		set = EntityQualifierUtils.parseList("(1.5,def)");
-		Assert.assertEquals(2,set.size());
-		Assert.assertEquals("1.5",set.toArray()[0]);
-		Assert.assertEquals("def",set.toArray()[1]);
-	}
+        set = EntityQualifierUtils.parseList("(abc,def)");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("abc", set.toArray()[0]);
+        Assert.assertEquals("def", set.toArray()[1]);
 
-//	@Test
-//	public void testEscapeRegExp(){
-//		Assert.assertEquals("abc\\.def",EntityQualifierHelper.escapeRegExp("abc.def"));
-//	}
-}
\ No newline at end of file
+        set = EntityQualifierUtils.parseList("(1.5,def)");
+        Assert.assertEquals(2, set.size());
+        Assert.assertEquals("1.5", set.toArray()[0]);
+        Assert.assertEquals("def", set.toArray()[1]);
+    }
+
+    // @Test
+    // public void testEscapeRegExp(){
+    // Assert.assertEquals("abc\\.def",EntityQualifierHelper.escapeRegExp("abc.def"));
+    // }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java
index 37adb55..8a9a4b4 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestExpressionComparator.java
@@ -27,7 +27,7 @@
 

 public class TestExpressionComparator {

     @Test

-    public void testCompareToForEval(){

+    public void testCompareToForEval() {

         QualifierFilterEntity entity = new QualifierFilterEntity();

         // a+b >= a+100.0

         entity.setKey("a/b");

@@ -36,36 +36,38 @@
         entity.setValueType(TokenType.EXP);

         entity.setOp(ComparisonOperator.GREATER_OR_EQUAL);

         EntityDefinition qualifierDisplayNameMap = null;

-        BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap);

+        BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity,

+                                                                                 qualifierDisplayNameMap);

 

-        Map<String,Double> context = new HashMap<String,Double>();

-        Assert.assertEquals("Should return 0 because not given enough variable",0,comparator.compareTo(context));

+        Map<String, Double> context = new HashMap<String, Double>();

+        Assert.assertEquals("Should return 0 because not given enough variable", 0,

+                            comparator.compareTo(context));

 

         context.put("a", 80.0);

-        context.put("b",20.0);

-        context.put("c",3.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("b", 20.0);

+        context.put("c", 3.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",80.0);

-        context.put("b",20.0);

-        context.put("c",4.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 80.0);

+        context.put("b", 20.0);

+        context.put("c", 4.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",80.0);

-        context.put("b",20.0);

-        context.put("c",5.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 80.0);

+        context.put("b", 20.0);

+        context.put("c", 5.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

         // Return false once any Double.isInfinite ( 80.0 / 0.0 )

-        Assert.assertTrue(Double.isInfinite( 80.0 / 0.0 ));

-        context.put("a",80.0);

-        context.put("b",0.0);

+        Assert.assertTrue(Double.isInfinite(80.0 / 0.0));

+        context.put("a", 80.0);

+        context.put("b", 0.0);

         context.put("c", 5.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        Assert.assertEquals(0, comparator.compareTo(context));

     }

 

     @Test

-    public void testCompareToForOp(){

+    public void testCompareToForOp() {

         QualifierFilterEntity entity = new QualifierFilterEntity();

 

         // a+b >= a+100.0

@@ -74,122 +76,123 @@
         entity.setOp(ComparisonOperator.GREATER_OR_EQUAL);

         EntityDefinition qualifierDisplayNameMap = new EntityDefinition();

 

-        BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap);

+        BooleanExpressionComparator comparator = new BooleanExpressionComparator(entity,

+                                                                                 qualifierDisplayNameMap);

 

-        Map<String,Double> context = new HashMap<String,Double>();

-        context.put("a",100.1);

-        context.put("b",100.1);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        Map<String, Double> context = new HashMap<String, Double>();

+        context.put("a", 100.1);

+        context.put("b", 100.1);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",100.1);

-        context.put("b",100.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",100.0);

-        context.put("b",99.9);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 100.0);

+        context.put("b", 99.9);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",100.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", 100.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",-100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", -100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

         // a+b = a+100.0

         entity.setOp(ComparisonOperator.GREATER);

-        comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap);

+        comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap);

 

-        context.put("a",100.1);

-        context.put("b",100.1);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.1);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",100.1);

-        context.put("b",100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",100.0);

-        context.put("b",99.9);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 100.0);

+        context.put("b", 99.9);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", 100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",-100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", -100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

         // a+b = a+100.0

         entity.setOp(ComparisonOperator.LESS);

-        comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap);

+        comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap);

 

-        context.put("a",100.1);

-        context.put("b",100.1);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.1);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",100.1);

-        context.put("b",100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",100.0);

-        context.put("b",99.9);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 100.0);

+        context.put("b", 99.9);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", 100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",-100.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", -100.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

         // a+b <= a+100.0

         entity.setOp(ComparisonOperator.LESS_OR_EQUAL);

-        comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap);

+        comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap);

 

-        context.put("a",100.1);

-        context.put("b",100.1);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.1);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",100.1);

-        context.put("b",100.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",100.0);

-        context.put("b",99.9);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 100.0);

+        context.put("b", 99.9);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",100.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", 100.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",-100.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", -100.0);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

         entity.setOp(ComparisonOperator.NOT_EQUAL);

-        comparator = new BooleanExpressionComparator(entity,qualifierDisplayNameMap);

+        comparator = new BooleanExpressionComparator(entity, qualifierDisplayNameMap);

 

-        context.put("a",100.1);

-        context.put("b",100.1);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.1);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",100.1);

-        context.put("b",100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", 100.1);

+        context.put("b", 100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",100.0);

-        context.put("b",99.9);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        context.put("a", 100.0);

+        context.put("b", 99.9);

+        Assert.assertEquals(1, comparator.compareTo(context));

 

-        context.put("a",-200.0);

-        context.put("b",100.0);

-        Assert.assertEquals(0,comparator.compareTo(context));

+        context.put("a", -200.0);

+        context.put("b", 100.0);

+        Assert.assertEquals(0, comparator.compareTo(context));

 

-        context.put("a",-200.0);

+        context.put("a", -200.0);

         context.put("b", -100.0);

-        Assert.assertEquals(1,comparator.compareTo(context));

+        Assert.assertEquals(1, comparator.compareTo(context));

     }

 }

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java
index 30e4523..93602a5 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestHBaseFilterBuilder.java
@@ -32,251 +32,258 @@
 import org.slf4j.LoggerFactory;
 
 public class TestHBaseFilterBuilder {
-	private final static Logger LOG = LoggerFactory.getLogger(TestHBaseFilterBuilder.class);
-	private EntityDefinition ed;
+    private static final Logger LOG = LoggerFactory.getLogger(TestHBaseFilterBuilder.class);
+    private EntityDefinition ed;
 
-	private Filter buildFilter(String query) throws EagleQueryParseException {
-		ORExpression expression = new EagleQueryParser(query).parse();
-		HBaseFilterBuilder builder = new HBaseFilterBuilder(ed,expression);
-		Filter filterList =  builder.buildFilters();
-		LOG.info("\n" + expression + " \n=> " + filterList);
-		return filterList;
-	}
+    private Filter buildFilter(String query) throws EagleQueryParseException {
+        ORExpression expression = new EagleQueryParser(query).parse();
+        HBaseFilterBuilder builder = new HBaseFilterBuilder(ed, expression);
+        Filter filterList = builder.buildFilters();
+        LOG.info("\n" + expression + " \n=> " + filterList);
+        return filterList;
+    }
 
-	@Before
-	public void setUp(){
-		try {
-			ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
-			if(ed == null){
-				EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
-				ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
-			}
-		} catch (InstantiationException e) {
-			Assert.fail(e.getMessage());
-		} catch (IllegalAccessException e) {
-			Assert.fail(e.getMessage());
-		}
-	}
+    @Before
+    public void setUp() {
+        try {
+            ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+            if (ed == null) {
+                EntityDefinitionManager.registerEntity(TestLogAPIEntity.class);
+                ed = EntityDefinitionManager.getEntityDefinitionByEntityClass(TestLogAPIEntity.class);
+            }
+        } catch (InstantiationException e) {
+            Assert.fail(e.getMessage());
+        } catch (IllegalAccessException e) {
+            Assert.fail(e.getMessage());
+        }
+    }
 
-	/**
-	 * Should success without exception
-	 */
-	@Test
-	public void testQueryParseAndBuildFilterSuccess(){
-		String[] queries = new String[]{
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job_1234\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"PigLatin: \\\"quoted_pig_job_name_value\\\"\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\",\"job_4567\")",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"job_4567\")",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"sample job name: \\\"quoted_job_name_value\\\"\")",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS \"job_1234\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS job_1234",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID NOT CONTAINS \"job_456\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is \"job_789\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not \"job_789\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is null",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not null",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is NULL",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not NULL",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = NULL",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID != null",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \".*job_1234.*\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \".*job_1234.*\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \"\\\\|_\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 ",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field3 = 100000",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 = 1.56",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 > 1.56",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 >= 1.56",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 < 1.56",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 <= 1.56",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 in (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 not in (\"100000\",\"1\"))\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 NOT IN (\"100000\",\"1\"))\"",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 NOT IN (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"",
-			// expression filter
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > 12",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field5 > EXP{field3/field7 - field2}",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}",
-			"@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}",
-		};
-		for(String query: queries){
-			try {
-				Filter filter = buildFilter(query);
-				Assert.assertNotNull(filter);
-			} catch (EagleQueryParseException e) {
-				Assert.fail(e.getMessage());
-			} catch (Exception ex){
-				Assert.fail(ex.getMessage());
-			}
-		}
-	}
+    /**
+     * Should success without exception
+     */
+    @Test
+    public void testQueryParseAndBuildFilterSuccess() {
+        String[] queries = new String[] {
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job_1234\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"PigLatin: \\\"quoted_pig_job_name_value\\\"\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\",\"job_4567\")",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"job_4567\")",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (1234,\"sample job name: \\\"quoted_job_name_value\\\"\")",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS \"job_1234\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID CONTAINS job_1234",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID NOT CONTAINS \"job_456\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is \"job_789\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not \"job_789\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is null",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not null",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID is not NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID != null",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \".*job_1234.*\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \".*job_1234.*\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID !=~ \"\\\\|_\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 ",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field3 = 100000",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 = 1.56",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 > 1.56",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 >= 1.56",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 < 1.56",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 = 1 and @field5 <= 1.56",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 in (\"100000\",\"1\"))\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 in (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 not in (\"100000\",\"1\"))\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 NOT IN (\"100000\",\"1\"))\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field7 NOT IN (\"\\\"value1-part1,value1-part2\\\"\",\"value2\"))\"",
+                                         // expression filter
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > 12",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field5 > EXP{field3/field7 - field2}",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and EXP{field3/field7 - field2} > EXP{field1 * field2}",
+        };
+        for (String query : queries) {
+            try {
+                Filter filter = buildFilter(query);
+                Assert.assertNotNull(filter);
+            } catch (EagleQueryParseException e) {
+                Assert.fail(e.getMessage());
+            } catch (Exception ex) {
+                Assert.fail(ex.getMessage());
+            }
+        }
+    }
 
-	/**
-	 * Should throw exception
-	 */
-	@Test
-	public void testNegativeQueryParseSuccessfullyButBuildFilterFailed(){
-		String[] queries = new String[]{
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag < \"job_1234\"",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag <= \"job_1234\"",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag >= \"job_1234\"",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 < null",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 <= null",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 > NULL",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 >= NULL",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 =~ NULL",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 !=~ NULL",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 contains NULL",
-				"@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 not contains NULL"
-		};
-		for(String query: queries){
-			try {
-				@SuppressWarnings("unused")
-				Filter filter = buildFilter(query);
-				Assert.fail("Should throw exception: "+query);
-			} catch (IllegalArgumentException e) {
-				LOG.info("Expect exception: " + e.getMessage());
-			} catch (EagleQueryParseException e) {
-				Assert.fail("Should parse successfully: "+query);
-			}
-		}
-	}
+    /**
+     * Should throw exception
+     */
+    @Test
+    public void testNegativeQueryParseSuccessfullyButBuildFilterFailed() {
+        String[] queries = new String[] {
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag < \"job_1234\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag <= \"job_1234\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @tag >= \"job_1234\"",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 < null",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 <= null",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 > NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 >= NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 =~ NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 !=~ NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 contains NULL",
+                                         "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field1 not contains NULL"
+        };
+        for (String query : queries) {
+            try {
+                @SuppressWarnings("unused")
+                Filter filter = buildFilter(query);
+                Assert.fail("Should throw exception: " + query);
+            } catch (IllegalArgumentException e) {
+                LOG.info("Expect exception: " + e.getMessage());
+            } catch (EagleQueryParseException e) {
+                Assert.fail("Should parse successfully: " + query);
+            }
+        }
+    }
 
-	@Test
-	public void testParsedFilter(){
-		String q1 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field3 = 100000";
-		try {
-			FilterList filterList = (FilterList) buildFilter(q1);
-			Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE,filterList.getOperator());
-			Assert.assertEquals(1,filterList.getFilters().size());
-			Assert.assertEquals(2,((FilterList) filterList.getFilters().get(0)).getFilters().size());
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		}
+    @Test
+    public void testParsedFilter() {
+        String q1 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field3 = 100000";
+        try {
+            FilterList filterList = (FilterList)buildFilter(q1);
+            Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE, filterList.getOperator());
+            Assert.assertEquals(1, filterList.getFilters().size());
+            Assert.assertEquals(2, ((FilterList)filterList.getFilters().get(0)).getFilters().size());
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        }
 
-		String q2 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)";
-		try {
-			FilterList filterList = (FilterList) buildFilter(q2);
-			Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE,filterList.getOperator());
-			Assert.assertEquals(2,filterList.getFilters().size());
-			Assert.assertEquals(2,((FilterList) filterList.getFilters().get(0)).getFilters().size());
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		}
+        String q2 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < 100000)";
+        try {
+            FilterList filterList = (FilterList)buildFilter(q2);
+            Assert.assertEquals(FilterList.Operator.MUST_PASS_ONE, filterList.getOperator());
+            Assert.assertEquals(2, filterList.getFilters().size());
+            Assert.assertEquals(2, ((FilterList)filterList.getFilters().get(0)).getFilters().size());
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        }
 
-		// Test parse success but bad type of value
-		String q3 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < \"bad_int_100000\")";
-		boolean q3Ex = false;
-		try {
-			Assert.assertNull(buildFilter(q3));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (IllegalArgumentException e){
-			LOG.debug("Expect: ", e);
-			Assert.assertTrue(e.getCause() instanceof NumberFormatException);
-			q3Ex = true;
-		}
-		Assert.assertTrue(q3Ex);
-	}
+        // Test parse success but bad type of value
+        String q3 = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and ( @field3 = 100000 or @field3 < \"bad_int_100000\")";
+        boolean q3Ex = false;
+        try {
+            Assert.assertNull(buildFilter(q3));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (IllegalArgumentException e) {
+            LOG.debug("Expect: ", e);
+            Assert.assertTrue(e.getCause() instanceof NumberFormatException);
+            q3Ex = true;
+        }
+        Assert.assertTrue(q3Ex);
+    }
 
-	@Test
-	public void testWithUnescapedString(){
-		///////////////////////////////////
-		// Tag filter with IN or EQUAL
-		// Should use RowKeyFilter only
-		///////////////////////////////////
-		String query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job.1234\"";
-		try {
-			FilterList filter = (FilterList) buildFilter(query);
-			Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass());
-			Assert.assertFalse("Should use rowkey filter only",filter.toString().matches(".*job.1234.*"));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (Exception ex){
-			Assert.fail(ex.getMessage());
-		}
+    @Test
+    public void testWithUnescapedString() {
+        ///////////////////////////////////
+        // Tag filter with IN or EQUAL
+        // Should use RowKeyFilter only
+        ///////////////////////////////////
+        String query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID = \"job.1234\"";
+        try {
+            FilterList filter = (FilterList)buildFilter(query);
+            Assert.assertEquals(RowFilter.class,
+                                ((FilterList)filter.getFilters().get(0)).getFilters().get(0).getClass());
+            Assert.assertFalse("Should use rowkey filter only", filter.toString().matches(".*job.1234.*"));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (Exception ex) {
+            Assert.fail(ex.getMessage());
+        }
 
-		query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\")";
-		try {
-			FilterList filter = (FilterList) buildFilter(query);
-			Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass());
-			Assert.assertFalse("Should use rowkey filter only",filter.toString().matches(".*job_1234.*"));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (Exception ex){
-			Assert.fail(ex.getMessage());
-		}
+        query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job_1234\")";
+        try {
+            FilterList filter = (FilterList)buildFilter(query);
+            Assert.assertEquals(RowFilter.class,
+                                ((FilterList)filter.getFilters().get(0)).getFilters().get(0).getClass());
+            Assert.assertFalse("Should use rowkey filter only", filter.toString().matches(".*job_1234.*"));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (Exception ex) {
+            Assert.fail(ex.getMessage());
+        }
 
-		query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job.1234\")";
-		try {
-			FilterList filter = (FilterList) buildFilter(query);
-			Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass());
-			Assert.assertFalse("Should use rowkey filter only",filter.toString().matches(".*job.*1234.*"));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (Exception ex){
-			Assert.fail(ex.getMessage());
-		}
+        query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID in (\"job.1234\")";
+        try {
+            FilterList filter = (FilterList)buildFilter(query);
+            Assert.assertEquals(RowFilter.class,
+                                ((FilterList)filter.getFilters().get(0)).getFilters().get(0).getClass());
+            Assert.assertFalse("Should use rowkey filter only", filter.toString().matches(".*job.*1234.*"));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (Exception ex) {
+            Assert.fail(ex.getMessage());
+        }
 
-		///////////////////////////////
-		// Tag with other operators
-		///////////////////////////////
-		query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job_1234\"";
+        ///////////////////////////////
+        // Tag with other operators
+        ///////////////////////////////
+        query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job_1234\"";
 
-		try {
-			FilterList filter = (FilterList) buildFilter(query);
-			Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass());
-			Assert.assertTrue(filter.toString().matches(".*job_1234.*"));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (Exception ex){
-			Assert.fail(ex.getMessage());
-		}
+        try {
+            FilterList filter = (FilterList)buildFilter(query);
+            Assert.assertEquals(RowFilter.class,
+                                ((FilterList)filter.getFilters().get(0)).getFilters().get(0).getClass());
+            Assert.assertTrue(filter.toString().matches(".*job_1234.*"));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (Exception ex) {
+            Assert.fail(ex.getMessage());
+        }
 
-		query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job.1234\"";
+        query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @jobID =~ \"job.1234\"";
 
-		try {
-			FilterList filter = (FilterList) buildFilter(query);
-			Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass());
-			Assert.assertTrue(filter.toString().matches(".*job.1234.*"));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (Exception ex){
-			Assert.fail(ex.getMessage());
-		}
+        try {
+            FilterList filter = (FilterList)buildFilter(query);
+            Assert.assertEquals(RowFilter.class,
+                                ((FilterList)filter.getFilters().get(0)).getFilters().get(0).getClass());
+            Assert.assertTrue(filter.toString().matches(".*job.1234.*"));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (Exception ex) {
+            Assert.fail(ex.getMessage());
+        }
 
-		///////////////////////////////
-		// Tag with IN
-		// Should escape regexp chars
-		///////////////////////////////
-		query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 = \"job_1234\"";
+        ///////////////////////////////
+        // Tag with IN
+        // Should escape regexp chars
+        ///////////////////////////////
+        query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 = \"job_1234\"";
 
-		try {
-			FilterList filter = (FilterList) buildFilter(query);
-			Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass());
-			Assert.assertTrue(filter.toString().matches(".*job_1234.*"));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (Exception ex){
-			ex.printStackTrace();
-			Assert.fail(ex.getMessage());
-		}
+        try {
+            FilterList filter = (FilterList)buildFilter(query);
+            Assert.assertEquals(RowFilter.class,
+                                ((FilterList)filter.getFilters().get(0)).getFilters().get(0).getClass());
+            Assert.assertTrue(filter.toString().matches(".*job_1234.*"));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (Exception ex) {
+            ex.printStackTrace();
+            Assert.fail(ex.getMessage());
+        }
 
-		query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 in (\"job.1234\",\"others\")";
+        query = "@cluster = \"cluster1\" and @datacenter = \"dc1\" and @field7 in (\"job.1234\",\"others\")";
 
-		try {
-			FilterList filter = (FilterList) buildFilter(query);
-			Assert.assertEquals(RowFilter.class, ((FilterList) filter.getFilters().get(0)).getFilters().get(0).getClass());
-			Assert.assertTrue(filter.toString().matches(".*job\\.1234.*"));
-		} catch (EagleQueryParseException e) {
-			Assert.fail(e.getMessage());
-		} catch (Exception ex){
-			Assert.fail(ex.getMessage());
-		}
-	}
+        try {
+            FilterList filter = (FilterList)buildFilter(query);
+            Assert.assertEquals(RowFilter.class,
+                                ((FilterList)filter.getFilters().get(0)).getFilters().get(0).getClass());
+            Assert.assertTrue(filter.toString().matches(".*job\\.1234.*"));
+        } catch (EagleQueryParseException e) {
+            Assert.fail(e.getMessage());
+        } catch (Exception ex) {
+            Assert.fail(ex.getMessage());
+        }
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java
index 1c0f416..c680f8f 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/filter/TestTypedByteArrayComparator.java
@@ -28,23 +28,24 @@
  */

 public class TestTypedByteArrayComparator {

     @Test

-    public void testCompare(){

+    public void testCompare() {

         EntitySerDeser serDeser = new DoubleSerDeser();

-        TypedByteArrayComparator comparator = new TypedByteArrayComparator(serDeser.serialize(0.9),serDeser.type());

+        TypedByteArrayComparator comparator = new TypedByteArrayComparator(serDeser.serialize(0.9),

+                                                                           serDeser.type());

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(0.8)) > 0);

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(1.1)) < 0);

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(0.9)) == 0);

-        Assert.assertTrue(comparator.compareTo(serDeser.serialize(- 0.9)) > 0);

+        Assert.assertTrue(comparator.compareTo(serDeser.serialize(-0.9)) > 0);

 

         serDeser = new IntSerDeser();

-        comparator = new TypedByteArrayComparator(serDeser.serialize(9),serDeser.type());

+        comparator = new TypedByteArrayComparator(serDeser.serialize(9), serDeser.type());

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(8)) > 0);

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(11)) < 0);

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(9)) == 0);

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(-9)) > 0);

 

         serDeser = new LongSerDeser();

-        comparator = new TypedByteArrayComparator(serDeser.serialize(9l),serDeser.type());

+        comparator = new TypedByteArrayComparator(serDeser.serialize(9l), serDeser.type());

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(8l)) > 0);

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(11l)) < 0);

         Assert.assertTrue(comparator.compareTo(serDeser.serialize(9l)) == 0);

@@ -52,10 +53,10 @@
     }

 

     @Test

-    public void testClassName(){

-        Assert.assertEquals("long",long.class.getName());

+    public void testClassName() {

+        Assert.assertEquals("long", long.class.getName());

         Assert.assertEquals("java.lang.Long", Long.class.getName());

-        Assert.assertEquals("long",long.class.toString());

+        Assert.assertEquals("long", long.class.toString());

         Assert.assertEquals("class java.lang.Long", Long.class.toString());

     }

 }

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java
index 98db12f..eb31b15 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestArraySerDeser.java
@@ -21,44 +21,50 @@
 import org.junit.Test;
 
 public class TestArraySerDeser {
-	
-	@Test
-	public void testIntArraySerDeser(){
-		IntArraySerDeser serDeser = new IntArraySerDeser();
-		int[] ints = new int[] {1, 34, 21, 82};
-		byte[] bytes = serDeser.serialize(ints);
-		Assert.assertEquals((ints.length+1)*4, bytes.length);
-		int[] targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(ints.length, targets.length);
-		for(int i=0; i<ints.length; i++){
-			Assert.assertEquals(ints[i], targets[i]);
-		}
-	}
-	
-	@Test
-	public void testDoubleArraySerDeser(){
-		DoubleArraySerDeser serDeser = new DoubleArraySerDeser();
-		double[] doubles = new double[] {1.0, 34.0, 21.0, 82.0};
-		byte[] bytes = serDeser.serialize(doubles);
-		Assert.assertEquals(4 + doubles.length*8, bytes.length);
-		double[] targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(doubles.length, targets.length);
-		for(int i=0; i<doubles.length; i++){
-			Assert.assertEquals(doubles[i], targets[i], 0.0001);
-		}
-	}
 
-	@Test
-	public void testStringArraySerDeser(){
-		StringArraySerDeser serDeser = new StringArraySerDeser();
-		String[] sources = new String[] {"a", "", "1", "2", "3"};
-		byte[] bytes = serDeser.serialize(sources);
-		Assert.assertEquals(4 + sources.length*4 + 4, bytes.length);
-		String[] targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(sources.length, targets.length);
-		for(int i=0; i<sources.length; i++){
-			Assert.assertEquals(sources[i], targets[i]);
-		}
-	}
+    @Test
+    public void testIntArraySerDeser() {
+        IntArraySerDeser serDeser = new IntArraySerDeser();
+        int[] ints = new int[] {
+                                1, 34, 21, 82
+        };
+        byte[] bytes = serDeser.serialize(ints);
+        Assert.assertEquals((ints.length + 1) * 4, bytes.length);
+        int[] targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(ints.length, targets.length);
+        for (int i = 0; i < ints.length; i++) {
+            Assert.assertEquals(ints[i], targets[i]);
+        }
+    }
+
+    @Test
+    public void testDoubleArraySerDeser() {
+        DoubleArraySerDeser serDeser = new DoubleArraySerDeser();
+        double[] doubles = new double[] {
+                                         1.0, 34.0, 21.0, 82.0
+        };
+        byte[] bytes = serDeser.serialize(doubles);
+        Assert.assertEquals(4 + doubles.length * 8, bytes.length);
+        double[] targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(doubles.length, targets.length);
+        for (int i = 0; i < doubles.length; i++) {
+            Assert.assertEquals(doubles[i], targets[i], 0.0001);
+        }
+    }
+
+    @Test
+    public void testStringArraySerDeser() {
+        StringArraySerDeser serDeser = new StringArraySerDeser();
+        String[] sources = new String[] {
+                                         "a", "", "1", "2", "3"
+        };
+        byte[] bytes = serDeser.serialize(sources);
+        Assert.assertEquals(4 + sources.length * 4 + 4, bytes.length);
+        String[] targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(sources.length, targets.length);
+        for (int i = 0; i < sources.length; i++) {
+            Assert.assertEquals(sources[i], targets[i]);
+        }
+    }
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestEntityDefinitionManager.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestEntityDefinitionManager.java
index 3b5669d..b7c858f 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestEntityDefinitionManager.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestEntityDefinitionManager.java
@@ -23,13 +23,16 @@
 

 public class TestEntityDefinitionManager {

     @Test

-    public void testCreateEntityDefinition(){

-        EntityDefinition entityDefinition = EntityDefinitionManager.createEntityDefinition(TestLogAPIEntity.class);

+    public void testCreateEntityDefinition() {

+        EntityDefinition entityDefinition = EntityDefinitionManager

+            .createEntityDefinition(TestLogAPIEntity.class);

         Assert.assertNotNull(entityDefinition);

     }

+

     @Test

-    public void testCreateMetricEntityDefinition(){

-        EntityDefinition entityDefinition = EntityDefinitionManager.createEntityDefinition(GenericMetricEntity.class);

+    public void testCreateMetricEntityDefinition() {

+        EntityDefinition entityDefinition = EntityDefinitionManager

+            .createEntityDefinition(GenericMetricEntity.class);

         Assert.assertNotNull(entityDefinition);

     }

 }

diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestListSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestListSerDeser.java
index fb0018c..9160311 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestListSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestListSerDeser.java
@@ -25,77 +25,77 @@
 
 public class TestListSerDeser {
 
-	@SuppressWarnings("rawtypes")
-	@Test
-	public void testStringListSerDeser() {
-		ListSerDeser serDeser = new ListSerDeser();
-		List<String> sources = new ArrayList<String>();
-		sources.add("value1");
-		sources.add("value2");
-		sources.add("value3");		
-				
-		byte[] bytes = serDeser.serialize(sources);
-		Assert.assertEquals(4 + sources.size() * 8 + 18, bytes.length);
-		List targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(sources.size(), targets.size());
-		
-		Assert.assertTrue(targets.contains("value1"));
-		Assert.assertTrue(targets.contains("value2"));
-		Assert.assertTrue(targets.contains("value3"));
-	}
+    @SuppressWarnings("rawtypes")
+    @Test
+    public void testStringListSerDeser() {
+        ListSerDeser serDeser = new ListSerDeser();
+        List<String> sources = new ArrayList<String>();
+        sources.add("value1");
+        sources.add("value2");
+        sources.add("value3");
 
-	
-	@SuppressWarnings("rawtypes")
-	@Test
-	public void testIntegerMapSerDeser() {
-		ListSerDeser serDeser = new ListSerDeser();
-		List<Integer> sources = new ArrayList<Integer>();
-		sources.add(1);
-		sources.add(2);
-		sources.add(3);
-		
-		byte[] bytes = serDeser.serialize(sources);
-		Assert.assertEquals(4 + sources.size() * 8 + 12, bytes.length);
-		List targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(sources.size(), targets.size());
-		
-		Assert.assertTrue(targets.contains(1));
-		Assert.assertTrue(targets.contains(2));
-		Assert.assertTrue(targets.contains(3));
-	}
+        byte[] bytes = serDeser.serialize(sources);
+        Assert.assertEquals(4 + sources.size() * 8 + 18, bytes.length);
+        List targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(sources.size(), targets.size());
 
-	
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testListListSerDeser() {
-		ListSerDeser serDeser = new ListSerDeser();
-		List<List<String>> sources = new ArrayList<List<String>>();
-		List<String> list1 = new ArrayList<String>();
-		list1.add("value1");
-		list1.add("value2");
-		list1.add("value3");
-		sources.add(list1);
-		
-		List<String> list2 = new ArrayList<String>();
-		list2.add("value4");
-		list2.add("value5");		
-		sources.add(list2);
-		
-		byte[] bytes = serDeser.serialize(sources);
-		List targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(sources.size(), targets.size());
+        Assert.assertTrue(targets.contains("value1"));
+        Assert.assertTrue(targets.contains("value2"));
+        Assert.assertTrue(targets.contains("value3"));
+    }
 
-		list1 = (List)targets.get(0);
-		Assert.assertNotNull(list1);
-		Assert.assertEquals(3, list1.size());
-		Assert.assertTrue(list1.contains("value1"));
-		Assert.assertTrue(list1.contains("value2"));
-		Assert.assertTrue(list1.contains("value3"));
+    @SuppressWarnings("rawtypes")
+    @Test
+    public void testIntegerMapSerDeser() {
+        ListSerDeser serDeser = new ListSerDeser();
+        List<Integer> sources = new ArrayList<Integer>();
+        sources.add(1);
+        sources.add(2);
+        sources.add(3);
 
-		list2 = (List)targets.get(1);
-		Assert.assertNotNull(list2);
-		Assert.assertEquals(2, list2.size());
-		Assert.assertTrue(list2.contains("value4"));
-		Assert.assertTrue(list2.contains("value5"));
-	}
+        byte[] bytes = serDeser.serialize(sources);
+        Assert.assertEquals(4 + sources.size() * 8 + 12, bytes.length);
+        List targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(sources.size(), targets.size());
+
+        Assert.assertTrue(targets.contains(1));
+        Assert.assertTrue(targets.contains(2));
+        Assert.assertTrue(targets.contains(3));
+    }
+
+    @SuppressWarnings({
+                       "rawtypes", "unchecked"
+    })
+    @Test
+    public void testListListSerDeser() {
+        ListSerDeser serDeser = new ListSerDeser();
+        List<List<String>> sources = new ArrayList<List<String>>();
+        List<String> list1 = new ArrayList<String>();
+        list1.add("value1");
+        list1.add("value2");
+        list1.add("value3");
+        sources.add(list1);
+
+        List<String> list2 = new ArrayList<String>();
+        list2.add("value4");
+        list2.add("value5");
+        sources.add(list2);
+
+        byte[] bytes = serDeser.serialize(sources);
+        List targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(sources.size(), targets.size());
+
+        list1 = (List)targets.get(0);
+        Assert.assertNotNull(list1);
+        Assert.assertEquals(3, list1.size());
+        Assert.assertTrue(list1.contains("value1"));
+        Assert.assertTrue(list1.contains("value2"));
+        Assert.assertTrue(list1.contains("value3"));
+
+        list2 = (List)targets.get(1);
+        Assert.assertNotNull(list2);
+        Assert.assertEquals(2, list2.size());
+        Assert.assertTrue(list2.contains("value4"));
+        Assert.assertTrue(list2.contains("value5"));
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java
index 96051f4..39c53cc 100644
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/meta/TestMapSerDeser.java
@@ -25,82 +25,82 @@
 
 public class TestMapSerDeser {
 
-	@SuppressWarnings("rawtypes")
-	@Test
-	public void testStringToStringMapSerDeser() {
-		MapSerDeser serDeser = new MapSerDeser();
-		Map<String, String> sources = new HashMap<String, String>();
-		sources.put("test1", "value1");
-		sources.put("test2", null);
-		sources.put("test3", "value3");
-		
-		byte[] bytes = serDeser.serialize(sources);
-		Assert.assertEquals(4 + sources.size() * 16 + 27, bytes.length);
-		Map targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(sources.size(), targets.size());
-		
-		Assert.assertEquals("value1", targets.get("test1"));
-		Assert.assertNull(targets.get("test2"));
-		Assert.assertEquals("value3", targets.get("test3"));
-	}
+    @SuppressWarnings("rawtypes")
+    @Test
+    public void testStringToStringMapSerDeser() {
+        MapSerDeser serDeser = new MapSerDeser();
+        Map<String, String> sources = new HashMap<String, String>();
+        sources.put("test1", "value1");
+        sources.put("test2", null);
+        sources.put("test3", "value3");
 
-	
-	@SuppressWarnings("rawtypes")
-	@Test
-	public void testStringToIntegerMapSerDeser() {
-		MapSerDeser serDeser = new MapSerDeser();
-		Map<String, Integer> sources = new HashMap<String, Integer>();
-		sources.put("test1", 1);
-		sources.put("test2", null);
-		sources.put("test3", 3);
-		
-		byte[] bytes = serDeser.serialize(sources);
-		Assert.assertEquals(4 + sources.size() * 16 + 23, bytes.length);
-		Map targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(sources.size(), targets.size());
-		
-		Assert.assertEquals(1, targets.get("test1"));
-		Assert.assertNull(targets.get("test2"));
-		Assert.assertEquals(3, targets.get("test3"));
-	}
+        byte[] bytes = serDeser.serialize(sources);
+        Assert.assertEquals(4 + sources.size() * 16 + 27, bytes.length);
+        Map targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(sources.size(), targets.size());
 
-	
-	@SuppressWarnings({ "rawtypes", "unchecked" })
-	@Test
-	public void testStringToMapMapSerDeser() {
-		MapSerDeser serDeser = new MapSerDeser();
-		Map<String, Map<String, String>> sources = new HashMap<String, Map<String, String>>();
-		Map<String, String> map1 = new HashMap<String, String>();
-		map1.put("key11", "value11");
-		map1.put("key12", null);
-		map1.put("key13", "value13");
-		sources.put("test1", map1);
-		sources.put("test2", null);
-		Map<String, String> map3 = new HashMap<String, String>();
-		map3.put("key31", "value31");
-		map3.put("key32", null);
-		map3.put("key33", "value33");
-		sources.put("test3", map3);
-		
-		byte[] bytes = serDeser.serialize(sources);
-		Map targets = serDeser.deserialize(bytes);
-		Assert.assertEquals(sources.size(), targets.size());
+        Assert.assertEquals("value1", targets.get("test1"));
+        Assert.assertNull(targets.get("test2"));
+        Assert.assertEquals("value3", targets.get("test3"));
+    }
 
-		map1 = (Map)targets.get("test1");
-		Assert.assertNotNull(map1);
-		Assert.assertEquals(3, map1.size());
-		Assert.assertEquals("value11", map1.get("key11"));
-		Assert.assertNull(map1.get("key12"));
-		Assert.assertEquals("value13", map1.get("key13"));
-		
-		Assert.assertNull(targets.get("test2"));
-		
-		map3 = (Map)targets.get("test3");
-		Assert.assertNotNull(map3);
-		Assert.assertEquals(3, map3.size());
-		Assert.assertEquals("value31", map3.get("key31"));
-		Assert.assertNull(map3.get("key32"));
-		Assert.assertEquals("value33", map3.get("key33"));
-	}
+    @SuppressWarnings("rawtypes")
+    @Test
+    public void testStringToIntegerMapSerDeser() {
+        MapSerDeser serDeser = new MapSerDeser();
+        Map<String, Integer> sources = new HashMap<String, Integer>();
+        sources.put("test1", 1);
+        sources.put("test2", null);
+        sources.put("test3", 3);
+
+        byte[] bytes = serDeser.serialize(sources);
+        Assert.assertEquals(4 + sources.size() * 16 + 23, bytes.length);
+        Map targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(sources.size(), targets.size());
+
+        Assert.assertEquals(1, targets.get("test1"));
+        Assert.assertNull(targets.get("test2"));
+        Assert.assertEquals(3, targets.get("test3"));
+    }
+
+    @SuppressWarnings({
+                       "rawtypes", "unchecked"
+    })
+    @Test
+    public void testStringToMapMapSerDeser() {
+        MapSerDeser serDeser = new MapSerDeser();
+        Map<String, Map<String, String>> sources = new HashMap<String, Map<String, String>>();
+        Map<String, String> map1 = new HashMap<String, String>();
+        map1.put("key11", "value11");
+        map1.put("key12", null);
+        map1.put("key13", "value13");
+        sources.put("test1", map1);
+        sources.put("test2", null);
+        Map<String, String> map3 = new HashMap<String, String>();
+        map3.put("key31", "value31");
+        map3.put("key32", null);
+        map3.put("key33", "value33");
+        sources.put("test3", map3);
+
+        byte[] bytes = serDeser.serialize(sources);
+        Map targets = serDeser.deserialize(bytes);
+        Assert.assertEquals(sources.size(), targets.size());
+
+        map1 = (Map)targets.get("test1");
+        Assert.assertNotNull(map1);
+        Assert.assertEquals(3, map1.size());
+        Assert.assertEquals("value11", map1.get("key11"));
+        Assert.assertNull(map1.get("key12"));
+        Assert.assertEquals("value13", map1.get("key13"));
+
+        Assert.assertNull(targets.get("test2"));
+
+        map3 = (Map)targets.get("test3");
+        Assert.assertNotNull(map3);
+        Assert.assertEquals(3, map3.size());
+        Assert.assertEquals("value31", map3.get("key31"));
+        Assert.assertNull(map3.get("key32"));
+        Assert.assertEquals("value33", map3.get("key33"));
+    }
 
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java
index 2904153..3c89c14 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/entity/repo/TestEntityRepositoryScanner.java
@@ -23,9 +23,9 @@
 
 public class TestEntityRepositoryScanner {
 
-	@Test
-	public void testScan() throws InstantiationException, IllegalAccessException {
-		Assert.assertEquals(MetricMetadataEntity.class,
-				EntityDefinitionManager.getEntityByServiceName("MetricMetadataService").getEntityClass());
-	}
-}
\ No newline at end of file
+    @Test
+    public void testScan() throws InstantiationException, IllegalAccessException {
+        Assert.assertEquals(MetricMetadataEntity.class, EntityDefinitionManager
+            .getEntityByServiceName("MetricMetadataService").getEntityClass());
+    }
+}
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java
index ea548b8..146f5cd 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionParser.java
@@ -28,229 +28,163 @@
  * @since Nov 10, 2014
  */
 public class TestExpressionParser {
-			
-	@Test
-	public void testSingleVariable() throws Exception{
-		String exprStr = "mapProgress";		
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("mapProgress", 100.0)
-							 .eval();
-		Assert.assertEquals(value, 100.0, 0.001);
-		List<String> dependentFields = parser.getDependentFields();
-		Assert.assertEquals(dependentFields.size(), 1);
-		Assert.assertEquals(dependentFields.get(0), "mapProgress");
-	}
-	
-	@Test
-	public void testgetDependency() throws Exception{
-		/** NOTICE: expression should be enclosure with "EXP{}" , This is for making antlr easy to parse  
-		  * variable name cannot be "pi" OR "E", there are parssi builtin constants */
-		String exprStr = "min(mAx, Max) / abs(MAX)";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		List<String> variables =  parser.getDependentFields();
-		Assert.assertEquals(variables.size(), 3);
-		Assert.assertTrue(variables.contains("mAx"));
-		Assert.assertTrue(variables.contains("Max"));
-		Assert.assertTrue(variables.contains("MAX"));
-	}
 
-	@Test
-	public void testFunction() throws Exception{
-		String exprStr = "min(mapProgress, reduceProgress) / abs(endTime - startTime)";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("mapProgress", 100.0)
-							 .setVariable("reduceProgress", 20.0)
-							 .setVariable("endTime", 1415590100000.0)
-							 .setVariable("startTime", 1415590000000.0)
-							 .eval();
-		Assert.assertEquals(value, 0.0002, 0.001);
-	}
-	
-	@Test
-	public void testOperator() throws Exception{
-		String exprStr = "(a+b*c) / (2*(d-e))";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("a", 200.0)
-							 .setVariable("b", 400.0)
-							 .setVariable("c", 3.0)
-							 .setVariable("d", 225.0)
-							 .setVariable("e", -125.0)
-							 .eval();
-		Assert.assertEquals(value, 2.0, 0.001);
-	}
-	
-	@Test
-	public void testOperatorWithFunction() throws Exception{
-		String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("a", 300.0)
-							 .setVariable("b", 200.0)
-							 .setVariable("c", -300.0)
-							 .setVariable("d", -300.0)
-							 .eval();
-		Assert.assertEquals(value, 600.0, 0.001);
-	}
+    @Test
+    public void testSingleVariable() throws Exception {
+        String exprStr = "mapProgress";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("mapProgress", 100.0).eval();
+        Assert.assertEquals(value, 100.0, 0.001);
+        List<String> dependentFields = parser.getDependentFields();
+        Assert.assertEquals(dependentFields.size(), 1);
+        Assert.assertEquals(dependentFields.get(0), "mapProgress");
+    }
 
-	@Test
-	public void testWithAtFieldName() throws Exception{
-		String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("a", 300.0)
-							 .setVariable("b", 200.0)
-							 .setVariable("c", -300.0)
-							 .setVariable("d", -300.0)
-							 .eval();
-		Assert.assertEquals(value, 600.0, 0.001);
-	}
+    @Test
+    public void testgetDependency() throws Exception {
+        /**
+         * NOTICE: expression should be enclosure with "EXP{}" , This is for making antlr easy to parse
+         * variable name cannot be "pi" OR "E", there are parssi builtin constants
+         */
+        String exprStr = "min(mAx, Max) / abs(MAX)";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        List<String> variables = parser.getDependentFields();
+        Assert.assertEquals(variables.size(), 3);
+        Assert.assertTrue(variables.contains("mAx"));
+        Assert.assertTrue(variables.contains("Max"));
+        Assert.assertTrue(variables.contains("MAX"));
+    }
 
-	@Test
-	public void testConstant() throws Exception {
-		String exprStr = "a";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("a", 300.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertEquals(value, 300.0, 0.001);
+    @Test
+    public void testFunction() throws Exception {
+        String exprStr = "min(mapProgress, reduceProgress) / abs(endTime - startTime)";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("mapProgress", 100.0).setVariable("reduceProgress", 20.0)
+            .setVariable("endTime", 1415590100000.0).setVariable("startTime", 1415590000000.0).eval();
+        Assert.assertEquals(value, 0.0002, 0.001);
+    }
 
-		value = parser.setVariable("a", 200.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertEquals(value, 200.0, 0.001);
-	}
+    @Test
+    public void testOperator() throws Exception {
+        String exprStr = "(a+b*c) / (2*(d-e))";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("a", 200.0).setVariable("b", 400.0).setVariable("c", 3.0)
+            .setVariable("d", 225.0).setVariable("e", -125.0).eval();
+        Assert.assertEquals(value, 2.0, 0.001);
+    }
 
-	@Test
-	public void testBooleanExpression() throws Exception {
-		String exprStr = "a > b";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("a", 300.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertEquals(value, 1.0, 0.001);
+    @Test
+    public void testOperatorWithFunction() throws Exception {
+        String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("a", 300.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 600.0, 0.001);
+    }
 
-		value = parser.setVariable("a", 100.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertEquals(value, 0.0, 0.001);
+    @Test
+    public void testWithAtFieldName() throws Exception {
+        String exprStr = "(max(a, b)* min(a, b)) / abs(a-b+c-d)";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("a", 300.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 600.0, 0.001);
+    }
 
-		exprStr = "a < b";
-		parser = new ExpressionParser(exprStr);
-		value = parser.setVariable("a", 300.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 0.0);
+    @Test
+    public void testConstant() throws Exception {
+        String exprStr = "a";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("a", 300.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 300.0, 0.001);
 
-		value = parser.setVariable("a", 400.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 0.0);
+        value = parser.setVariable("a", 200.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 200.0, 0.001);
+    }
 
-		value = parser.setVariable("a", 100.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 1.0);
+    @Test
+    public void testBooleanExpression() throws Exception {
+        String exprStr = "a > b";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("a", 300.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 1.0, 0.001);
 
-		// !!! Not support well >=
-		exprStr = "a >= b";
-		parser = new ExpressionParser(exprStr);
-		value = parser.setVariable("a", 300.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 0.0); // expect 1.0
+        value = parser.setVariable("a", 100.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 0.0, 0.001);
 
-		value = parser.setVariable("a", 400.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 1.0); // expect 1.0
+        exprStr = "a < b";
+        parser = new ExpressionParser(exprStr);
+        value = parser.setVariable("a", 300.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 0.0);
 
-		value = parser.setVariable("a", 100.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 1.0); // expect 0.0
+        value = parser.setVariable("a", 400.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 0.0);
 
-		exprStr = "a <= b";
-		parser = new ExpressionParser(exprStr);
-		value = parser.setVariable("a", 300.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 1.0);
+        value = parser.setVariable("a", 100.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 1.0);
 
-		value = parser.setVariable("a", 400.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 0.0);
+        // !!! Not support well >=
+        exprStr = "a >= b";
+        parser = new ExpressionParser(exprStr);
+        value = parser.setVariable("a", 300.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 0.0); // expect 1.0
 
-		value = parser.setVariable("a", 100.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 1.0);
+        value = parser.setVariable("a", 400.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 1.0); // expect 1.0
 
-		exprStr = "a = b";
-		parser = new ExpressionParser(exprStr);
-		value = parser.setVariable("a", 300.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertEquals(value, 1.0, 0.001);
+        value = parser.setVariable("a", 100.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 1.0); // expect 0.0
 
-		value = parser.setVariable("a", 100.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertEquals(value, 0.0, 0.001);
-	}
+        exprStr = "a <= b";
+        parser = new ExpressionParser(exprStr);
+        value = parser.setVariable("a", 300.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 1.0);
 
-	@Test
-	public void testParsiiBug() throws Exception {
-		// !!! Not support >=
-		String exprStr = "a >= b";
-		ExpressionParser parser = new ExpressionParser(exprStr);
-		Double value = parser.setVariable("a", 300.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 0.0); // expect 1.0
+        value = parser.setVariable("a", 400.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 0.0);
 
-		value = parser.setVariable("a", 400.0)
-				.setVariable("b", 300.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 1.0); // expect 1.0
+        value = parser.setVariable("a", 100.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 1.0);
 
-		value = parser.setVariable("a", 100.0)
-				.setVariable("b", 200.0)
-				.setVariable("c", -300.0)
-				.setVariable("d", -300.0)
-				.eval();
-		Assert.assertTrue(value == 1.0); // expect 0.0
-	}
+        exprStr = "a = b";
+        parser = new ExpressionParser(exprStr);
+        value = parser.setVariable("a", 300.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 1.0, 0.001);
+
+        value = parser.setVariable("a", 100.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertEquals(value, 0.0, 0.001);
+    }
+
+    @Test
+    public void testParsiiBug() throws Exception {
+        // !!! Not support >=
+        String exprStr = "a >= b";
+        ExpressionParser parser = new ExpressionParser(exprStr);
+        Double value = parser.setVariable("a", 300.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 0.0); // expect 1.0
+
+        value = parser.setVariable("a", 400.0).setVariable("b", 300.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 1.0); // expect 1.0
+
+        value = parser.setVariable("a", 100.0).setVariable("b", 200.0).setVariable("c", -300.0)
+            .setVariable("d", -300.0).eval();
+        Assert.assertTrue(value == 1.0); // expect 0.0
+    }
 }
diff --git a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java
index 0a2b729..0c3edc3 100755
--- a/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java
+++ b/eagle-core/eagle-query/eagle-entity-base/src/test/java/org/apache/eagle/log/expression/TestExpressionPerformance.java
@@ -38,83 +38,84 @@
 
 public class TestExpressionPerformance {
 
-	public interface ExpressionParser {
-		double parse(String exprStr, Map<String, Double> tuple) throws Exception;
-	}
-	
-	public class ParsiiParser implements ExpressionParser{
-		public Expression expression;
-		
-		public double parse(String exprStr, Map<String, Double> tuple) throws Exception{		
-			Scope scope = Scope.create();
-			if (expression == null) {
-				expression = Parser.parse(exprStr, scope);
-			}
-			for(String valName : tuple.keySet()) {
-				Object value = tuple.get(valName);
-				if(value instanceof Number) {
-					scope.getVariable(valName).setValue(((Number)value).doubleValue());
-				}
-			}
-			return expression.evaluate();
-		}
-	}
-	
-	public long doParse(ExpressionParser parser, String exprStr, List<String> parameters) throws Exception{
-		long startTime = System.currentTimeMillis();
-		int parNum = parameters.size();
-		Map<String, Double> tuple = new HashMap<String, Double>();
-		for (int i = 1; i < 100000; i++) {
-			for (int j = 0; j < parNum; j++) {
-				tuple.put(parameters.get(j), (double) (i * 3 + j));				
-			}
-			parser.parse(exprStr, tuple);
-		}
-		long endTime = System.currentTimeMillis();
-		return endTime - startTime;
-	}
-	
-	@Test
-	public void TestPerformance() throws Exception{
-		List<ExpressionParser> parsers = new ArrayList<ExpressionParser>();
-		parsers.add(new ParsiiParser());
+    public interface ExpressionParser {
+        double parse(String exprStr, Map<String, Double> tuple) throws Exception;
+    }
 
-		String exprStr = "a + b / c * 2"; 
-		List<String> parameters = new ArrayList<String>();
-		parameters.add("a");
-		parameters.add("b");
-		parameters.add("c");
-		
-		Map<String, Long> timeComsued = new HashMap<String, Long>();
-		
-		for (int i = 0; i < 10; i++) {
-			for (ExpressionParser parser : parsers) {
-				String name = parser.getClass().getName();
-				if (timeComsued.get(name) == null) {
-					timeComsued.put(name, 0L);
-				}
-				timeComsued.put(name, timeComsued.get(name) + doParse(parser, exprStr, parameters));			
-			}
-		}
-		for (Entry<String, Long> time : timeComsued.entrySet()) {
-			System.out.println("time consumed of " + time.getKey() + ": " + time.getValue() +"ms");
-		}
-	}
+    public class ParsiiParser implements ExpressionParser {
+        public Expression expression;
 
-	@Test
-	public void TestEvaluatoinValid() throws Exception{
-		List<ExpressionParser> parsers = new ArrayList<ExpressionParser>();
-		parsers.add(new ParsiiParser());
+        @Override
+        public double parse(String exprStr, Map<String, Double> tuple) throws Exception {
+            Scope scope = Scope.create();
+            if (expression == null) {
+                expression = Parser.parse(exprStr, scope);
+            }
+            for (String valName : tuple.keySet()) {
+                Object value = tuple.get(valName);
+                if (value instanceof Number) {
+                    scope.getVariable(valName).setValue(((Number)value).doubleValue());
+                }
+            }
+            return expression.evaluate();
+        }
+    }
 
-		String exprStr = "max(a, 3 * b) + min(b, 10000) / abs(c * 2)";
-		Map<String ,Double> tuples = new HashMap<String, Double>();
-		tuples.put("a", 20.5);
-		tuples.put("b", 123.7);
-		tuples.put("c", 97.57);
-		DecimalFormat df = new DecimalFormat("#.00");
-		for (ExpressionParser parser : parsers) {			
-			System.out.println(parser.getClass().getName() + " : " + parser.parse(exprStr, tuples));
-			Assert.assertEquals(df.format(parser.parse(exprStr, tuples)), "371.73");
-		}
-	}
+    public long doParse(ExpressionParser parser, String exprStr, List<String> parameters) throws Exception {
+        long startTime = System.currentTimeMillis();
+        int parNum = parameters.size();
+        Map<String, Double> tuple = new HashMap<String, Double>();
+        for (int i = 1; i < 100000; i++) {
+            for (int j = 0; j < parNum; j++) {
+                tuple.put(parameters.get(j), (double)(i * 3 + j));
+            }
+            parser.parse(exprStr, tuple);
+        }
+        long endTime = System.currentTimeMillis();
+        return endTime - startTime;
+    }
+
+    @Test
+    public void TestPerformance() throws Exception {
+        List<ExpressionParser> parsers = new ArrayList<ExpressionParser>();
+        parsers.add(new ParsiiParser());
+
+        String exprStr = "a + b / c * 2";
+        List<String> parameters = new ArrayList<String>();
+        parameters.add("a");
+        parameters.add("b");
+        parameters.add("c");
+
+        Map<String, Long> timeComsued = new HashMap<String, Long>();
+
+        for (int i = 0; i < 10; i++) {
+            for (ExpressionParser parser : parsers) {
+                String name = parser.getClass().getName();
+                if (timeComsued.get(name) == null) {
+                    timeComsued.put(name, 0L);
+                }
+                timeComsued.put(name, timeComsued.get(name) + doParse(parser, exprStr, parameters));
+            }
+        }
+        for (Entry<String, Long> time : timeComsued.entrySet()) {
+            System.out.println("time consumed of " + time.getKey() + ": " + time.getValue() + "ms");
+        }
+    }
+
+    @Test
+    public void TestEvaluatoinValid() throws Exception {
+        List<ExpressionParser> parsers = new ArrayList<ExpressionParser>();
+        parsers.add(new ParsiiParser());
+
+        String exprStr = "max(a, 3 * b) + min(b, 10000) / abs(c * 2)";
+        Map<String, Double> tuples = new HashMap<String, Double>();
+        tuples.put("a", 20.5);
+        tuples.put("b", 123.7);
+        tuples.put("c", 97.57);
+        DecimalFormat df = new DecimalFormat("#.00");
+        for (ExpressionParser parser : parsers) {
+            System.out.println(parser.getClass().getName() + " : " + parser.parse(exprStr, tuples));
+            Assert.assertEquals(df.format(parser.parse(exprStr, tuples)), "371.73");
+        }
+    }
 }