Upgraded to elasticsearch 5.x. Added sankey endpoint.
diff --git a/.gitignore b/.gitignore
index fb05fa0..a42b3b8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -14,6 +14,7 @@
 # limitations under the License.

 

 .DS_Store

+.idea

 

 # Byte-compiled / optimized / DLL files

 __pycache__/

diff --git a/MANIFEST.in b/MANIFEST.in
index 54c3542..ec629be 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,8 +1,7 @@
 include MANIFEST.in
-include README.md
-include CHANGELOG.md
-include LICENSE.txt
-include setup.py
+include *.rst
+include NOTICE LICENSE DISCLAIMER.txt
+include setup.py setup.cfg
 
 # Include all sources
 graft distill
diff --git a/distill/__init__.py b/distill/__init__.py
index 405850a..aa24b84 100644
--- a/distill/__init__.py
+++ b/distill/__init__.py
@@ -13,28 +13,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from flask import Flask
 from elasticsearch_dsl.connections import connections
 
-# Initialize Flask instance
-app = Flask(__name__)
+from distill.config.elasticsearch import ELASTICSEARCH_PARAMS
 
-# Load Configurations
-app.config.from_pyfile('config/config.cfg')
 
 # Unpack Elasticsearch configuration and create elasticsearch connection
-host = app.config['ES_HOST']
-port = app.config['ES_PORT']
-http_auth = app.config['HTTP_AUTH']
-use_ssl = app.config['USE_SSL']
-verify_certs = app.config['VERIFY_CERTS']
-ca_certs = app.config['CA_CERTS']
-client_cert = app.config['CLIENT_CERT']
-client_key = app.config['CLIENT_KEY']
-timeout = app.config['TIMEOUT']
+host = ELASTICSEARCH_PARAMS['host']
+port = ELASTICSEARCH_PARAMS['port']
+http_auth = ELASTICSEARCH_PARAMS['http_auth']
+use_ssl = ELASTICSEARCH_PARAMS['use_ssl']
+verify_certs = ELASTICSEARCH_PARAMS['verify_certs']
+ca_certs = ELASTICSEARCH_PARAMS['ca_certs']
+client_cert = ELASTICSEARCH_PARAMS['client_cert']
+client_key = ELASTICSEARCH_PARAMS['client_key']
+timeout = ELASTICSEARCH_PARAMS['timeout']
 
 # Initialize Elasticsearch instance
-es = connections.create_connection(hosts=[host],
+es = connections.create_connection(hosts=host,
                                    port=port,
                                    http_auth=http_auth,
                                    use_ssl=use_ssl,
diff --git a/distill/algorithms/graphs/graph.py b/distill/algorithms/graphs/graph.py
index cd238f5..e3d0f04 100644
--- a/distill/algorithms/graphs/graph.py
+++ b/distill/algorithms/graphs/graph.py
@@ -13,13 +13,387 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import itertools
+import json
+import collections
+
+from elasticsearch import helpers
+
+from distill import es
 
 class GraphAnalytics (object):
     """
     Distill's graph analytics package. Apply graph algorithms
-    to User Ale log data segmented with Stout.
+    to User Ale log data.
     """
 
     @staticmethod
-    def foo():
-        pass
+    def generate_graph(app,
+                       app_type='logs',
+                       log_type='raw',
+                       # version='0.2.0',
+                       target_events=[],
+                       time_range=['now-1h', 'now'],
+                       size=20):
+        """
+        Return all elements from an application, possible matching against
+        a specific event (e.g. click, mouseover, etc)
+        """
+        # @TODO ref_url filter
+
+        # Filtering
+        should_query = []
+        if (target_events):
+            for event in target_events:
+                res = {
+                    "term": {
+                        "type": event
+                    }
+                }
+                should_query.append(res)
+
+        must_not_query = [
+            {
+                "term": {
+                    "type": "mousedown"
+                }
+            },
+            {
+                "term": {
+                    "type": "mouseup"
+                }
+            }
+        ]
+
+        filter_query = [
+            {
+                "term": {
+                    "logType": log_type
+                }
+            },
+            # {
+            #     "term": {
+            #         "useraleVersion": version
+            #     }
+            # }
+        ]
+
+        # Sort By Time
+        sort_query = [
+            {
+                "clientTime": {
+                    "order": "asc"
+                }
+            }
+        ]
+
+        # Timestamp range - date math
+        timestamp_query = {
+            "range": {
+                "@timestamp": {
+                    "gte": time_range[0],
+                    "lte": time_range[1]
+                }
+            }
+        }
+        filter_query.append(timestamp_query)
+
+        agg_query = dict()
+
+        # Get all unique sessions
+        session_query = {
+                "terms": {
+                    "field": "sessionID.keyword",
+                    "min_doc_count": 1
+                }
+            }
+
+        agg_query['sessions'] = session_query
+
+        # Generating all top targets
+        target_query = {
+                "terms": {
+                    "field": "target.keyword",
+                    "min_doc_count": 1,
+                    "size": size
+                },
+                "aggs": {
+                    "events": {
+                        "terms": {
+                            "field": "type.keyword",
+                            "min_doc_count": 1,
+                            "size": size
+                        }
+                    }
+                }
+            }
+
+        agg_query['targets'] = target_query
+
+        # Main query
+        query = {
+            "sort": sort_query,
+            "query": {
+                "bool": {
+                    "should": should_query,
+                    "filter": filter_query,
+                    "must_not": must_not_query,
+                }
+            },
+            "aggs": agg_query
+        }
+
+        # Process Aggregate Results
+        response = es.search(app, doc_type=app_type, body=query, size=0)
+
+        # Only want to look at aggregations
+        sessions = response['aggregations']['sessions']['buckets']
+        allSessions = { x['key']: [] for x in sessions }
+        intervalSessions = { x['key']: [] for x in sessions }
+
+        # Deal with bar chart
+        allTargets = response['aggregations']['targets']['buckets']
+
+        # Re-execute query to get all hits
+        iter = helpers.scan(es,
+                            query=query,
+                            index=app,
+                            doc_type=app_type,
+                            preserve_order=True)
+
+        # Store all hits in the user's bucket.
+        for elem in iter:
+            data = elem['_source']
+            if 'sessionID' in data:
+                sessionID = data['sessionID']
+                if sessionID in allSessions:
+                    allSessions[sessionID].append(data)
+
+        # Remove all duplicates (and only leave behind interval)
+        # More than likely will need to create new list
+        for sessionID in allSessions:
+            data = allSessions[sessionID]
+
+            newData = []
+            intervalLog = []
+            for curr, next in zip(data, data[1:]):
+                target1 = curr['target']
+                event1 = curr['type']
+                target2 = next['target']
+                event2 = next['type']
+
+                if target1 != target2:
+                    targetChange = int(True)
+                    eventChange = int(False)
+                    if event1 != event2:
+                        eventChange = int(True)
+
+                    # Starting over no matter what
+                    # Based off of curr, update the log
+                    curr['targetChange'] = targetChange
+                    curr['typeChange'] = eventChange
+                    curr['intervalCount'] = len(intervalLog)   # some number maybe 0
+                    # if len(intervalLog) >= 2:
+                    #     # Calculate duration
+                    #     curr['duration'] = intervalLog[-1:]['clientTime'] - \
+                    #                        intervalLog[0]['clientTime']
+                    # else:
+                    #     curr['duration'] = 0
+                    newData.append(curr)
+                    intervalLog = []
+                else:
+                    # They are the same
+                    targetChange = int(False)
+                    eventChange = int(False)
+                    if event1 != event2:
+                        eventChange = int(True)
+                        # starting over
+                        curr['targetChange'] = targetChange
+                        curr['typeChange'] = eventChange
+                        curr['intervalCount'] = len(intervalLog)
+                        # if len(intervalLog) >= 2:
+                        #     # Calculate duration
+                        #     curr['duration'] = intervalLog[-1:]['clientTime'] - \
+                        #                        intervalLog[0]['clientTime']
+                        # else:
+                        #     curr['duration'] = 0
+                        newData.append(curr)
+                        intervalLog = []
+                    else:
+                        # increase counter
+                        intervalLog.append(curr)
+            intervalSessions[sessionID] = newData
+
+
+        newSessions = []
+
+        # Generate all edges tied to a user
+        # [ edge list, edge list, ... ]
+        for k, v in intervalSessions.items():
+            pairs = pairwise(v) # list of edges for a user
+            newSessions.append(pairs)
+
+        # Node Map
+        node_map = []   # Need to keep 0based index for sankey diagram
+        links = []      # Aggregate sequence list
+
+        # Align the sequences
+        alignment = itertools.izip_longest(*newSessions)
+
+        for step in alignment:
+            # step through every users sequence
+            c = collections.Counter()
+            visitedLinks = []
+            nodenames = set([])
+
+            # Process all the edges
+            for edge in step:
+                if edge:
+                    node1 = edge[0]
+                    node2 = edge[1]
+
+                    nodename1 = node1['target']
+                    nodename2 = node2['target']
+
+                    # Add src and targetids
+                    nodenames.add(nodename1)
+                    nodenames.add(nodename2)
+
+                    # Generate sequence ID
+                    seqID = '%s->%s' % (nodename1, nodename2)
+
+                    # @todo Ensure src and target are not the same (self-loop)
+                    link = {
+                        'sequenceID': seqID,
+                        'sourceName': nodename1,
+                        'targetName': nodename2,
+                        'type': node1['type'],
+                        # 'duration': node1['duration'],
+                        'pathLength': len(node1['path']),
+                        'targetChange': node1['targetChange'],
+                        'typeChange': node1['typeChange']
+                    }
+                    visitedLinks.append(link)
+
+            # How many users visited a sequence at this step
+            counts = collections.Counter(k['sequenceID'] for k in visitedLinks if k.get('sequenceID'))
+
+            # Append into growing nodeMap
+            map(lambda x: node_map.append(x), nodenames)
+
+            for v in visitedLinks:
+                # Pass through and update count, also generate src and target id
+                v['value'] = c[v['sequenceID']]
+                # Last occurence is the src and target id
+                v['sourceID'] = len(node_map) -1 - node_map[::-1].index(v['sourceName'])
+                v['targetID'] = len(node_map) -1 - node_map[::-1].index(v['targetName'])
+                links.append(v)
+
+        # Save everything
+        res  = dict()
+        res['bargraph'] = generate_bargraph(allTargets)
+        res['sankey'] = {
+            'links': links,
+            'nodes': node_map
+        }
+
+        # with open('sankey.json', 'w') as outfile:
+        #     json.dump(res, outfile, indent=4, sort_keys=False)
+
+        # with open('data.txt', 'w') as outfile:
+        #     json.dump(intervalSessions, outfile, indent=4, sort_keys=False)
+        #
+        # with open('query.json', 'w') as outfile:
+        #     json.dump(query, outfile, indent=4, sort_keys=False)
+        # Iterate first to get nodes
+        # pairs = pairwise(iter)
+        #
+        # nodes = []
+        # links = []
+
+        # for p in pairs:
+        #     node1 = p[0]['_source']
+        #     node2 = p[1]['_source']
+
+        #     # Append nodes to list
+        #     nodes.append(node1['target'])
+        #     nodes.append(node2['target'])
+
+        # Iterate again to get edges
+        # pairs = pairwise(iter2)
+
+        # srcID = targetID = None
+        # for p in pairs:
+        #     node1 = p[0]['_source']
+        #     node2 = p[1]['_source']
+        #
+        #     # Append nodes to list
+        #     nodes.append(node1['target'])
+        #     # nodes.append(node2['target'])
+        #
+        #     srcID = len(nodes) - 1
+        #     targetID = len(nodes)
+        #
+        #     # if (node1['target'] != node2['target']):
+        #     # Append links to list (remove self-loops)
+        #     link = {
+        #         'sourceID': srcID,
+        #         'targetID': targetID,
+        #         'sourceName': node1['target'],
+        #         'targetName': node2['target'],
+        #         'type': node1['type'],
+        #         'duration': node1['duration'],
+        #         'value': node1['count'],
+        #         'pathLength': len(node1['path']),
+        #         'targetChange': int(node1['targetChange']),
+        #         'typeChange': int(node1['typeChange'])
+        #     }
+        #     links.append(link)
+        #
+        # # Get all unique nodes
+        # # node_names = np.unique(nodes).tolist()
+        # node_list = []
+        #
+        # for indx, name in enumerate(nodes):
+        #     n = {'id': indx, 'name': name}
+        #     node_list.append(n)
+        #
+        # # Remove self-loops
+        # newLinks = []
+        # for indx, elem in enumerate(links):
+        #     srcID = elem['sourceID']
+        #     targetID = elem['targetID']
+        #
+        #     if srcID != targetID:
+        #         newLinks.append(elem)
+        #
+
+        #
+        return res
+
+
+def pairwise(iterable):
+    "s -> (s0, s1), (s1, s2), (s2, s3), ..."
+    a, b = itertools.tee(iterable)
+    next(b, None)
+    return itertools.izip(a, b)
+
+
+def generate_bargraph(data, filename='bargraph.json'):
+    results = []
+    for target in data:
+        target_name = target['key']
+        type_bucket = target['events']['buckets']
+        for t in type_bucket:
+            event = t['key']
+            event_count = t['doc_count']
+            res = {"target": target_name, "count": event_count, "type": event}
+            results.append(res)
+
+    return results
+
+    # with open(filename, 'w') as outfile:
+    #     json.dump(results, outfile, indent=4, sort_keys=False)
+
+def generate_sankey(data, filename='sankey.json'):
+    pass
\ No newline at end of file
diff --git a/distill/app.py b/distill/app.py
index a2bb380..3f9cb6a 100644
--- a/distill/app.py
+++ b/distill/app.py
@@ -13,14 +13,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from flask import request
-from flask import jsonify
-from distill import app
+from flask import Flask, request, jsonify
+
 from distill.models.brew import Brew
 from distill.models.userale import UserAle
-from distill.models.stout import Stout
+# from distill.models.stout import Stout
 from distill.algorithms.stats.hist import Hist
+from distill.version import __version__
 
+app = Flask(__name__)
+app.config.from_pyfile('config/config.cfg')
 
 @app.route('/', methods=['GET'])
 def index():
@@ -33,11 +35,9 @@
             $ curl -XGET http://localhost:8090
 
             {
-                    "author" : "Michelle Beard",
-                    "email" : "mbeard@draper.com",
                     "name": "Distill",
+                    "version" : "0.1.0",
                     "status" : true,
-                    "version" : "1.0",
                     "applications" : {
                             "xdata_v3" : {
                                     testing: 205,
@@ -50,12 +50,10 @@
                     }
             }
 
-    :return: Distill's status information as JSON blob
+    :return: Distill's status information
     """
     return jsonify(name="Distill",
-                   version="1.0 alpha",
-                   author="Michelle Beard",
-                   email="mbeard@draper.com",
+                   version=__version__,
                    status=Brew.get_status(),
                    applications=Brew.get_applications())
 
@@ -63,17 +61,43 @@
 @app.route('/create/<app_id>', methods=['POST', 'PUT'])
 def create(app_id):
     """
-    Registers an application in Distill.
+    Register an application.
+    @todo Need to include UserALE.js mapping information in a general sense.
 
     .. code-block:: bash
 
-            $ curl -XPOST http://localhost:8090/xdata_v3
+            $ curl -XPOST http://localhost:8090/create/xdata_v3
 
     :param app_id: Application name
-    :return: Newly created application's status as JSON blob
+    :return: Newly created application's status
     """
-    return Brew.create(app_id)
+    return jsonify(Brew.create(app_id))
 
+@app.route('/sankey/<app_id>', methods=['GET'])
+def sankey(app_id):
+    """
+    Generate a node-link diagram
+    """
+    from distill.algorithms.graphs.graph import GraphAnalytics
+
+    # Time range using date math
+    from_range = 'now-15m'
+    to_range = 'now'
+    if 'from' in request.args:
+        from_range = request.args.get('from')
+
+        if 'to' in request.args:
+            to_range = request.args.get('to')
+            ts_range = [from_range, to_range]
+
+    # Size
+    size = 20
+    if 'size' in request.args:
+        size = request.args.get('size')
+
+    return jsonify(GraphAnalytics.generate_graph(app_id,
+                                                 time_range=ts_range,
+                                                 size=size))
 
 @app.route('/status/<app_id>', defaults={"app_type": None}, methods=['GET'])
 @app.route('/status/<app_id>/<app_type>', methods=['GET'])
@@ -94,9 +118,11 @@
             }
 
     :param app_id: Application name
-    :return: Registered applications meta data as JSON blob
+    :param app_type: Application type
+    :return: Registered applications meta data
     """
-    return Brew.read(app_id, app_type=app_type)
+    res = Brew.read(app_id, app_type=app_type)
+    return jsonify(res)
 
 
 @app.route('/update/<app_id>', methods=['POST', 'PUT'])
@@ -109,9 +135,9 @@
             $ curl -XPOST http://localhost:8090/update/xdata_v3?name="xdata_v4"
 
     :param app_id: Application name
-    :return: Boolean response message as JSON blob
+    :return: Boolean response message
     """
-    return Brew.update(app_id)
+    return jsonify(Brew.update(app_id))
 
 
 @app.route('/delete/<app_id>', methods=['DELETE'])
@@ -124,9 +150,10 @@
             $ curl -XDELETE http://localhost:8090/xdata_v3
 
     :param app_id: Application name
-    :return: Boolean response message as JSON blob
+    :return: Boolean response message
     """
-    return Brew.delete(app_id)
+    res = Brew.delete(app_id)
+    jsonify(status="Deleted index %s" % app_id)
 
 
 @app.route('/search/<app_id>', defaults={"app_type": None}, methods=['GET'])
@@ -182,53 +209,31 @@
         return jsonify(error=msg)
 
 
-@app.route('/denoise/<app_id>', methods=['GET'])
-def denoise(app_id):
-    """
-    Bootstrap script to cleanup the raw logs. A document type called "parsed"
-    will be stored with new log created unless specified in the request.
-    Have option to save parsed results back to data store.
-    These parsed logs can be intergrated with STOUT results
-    by running the stout bootstrap script.
-
-    .. code-block:: bash
-
-            $ curl -XGET http://localhost:8090/denoise/xdata_v3?save=true&type=parsed
-
-    :param app_id: Application name
-    :return: [dict]
-    """
-    doc_type = 'parsed'
-    save = False
-    # q = request.args
-    # if 'save' in q:
-    #     save = str2bool(q.get('save'))
-    #     if 'type' in q:
-    #         # @TODO: Proper cleanup script needs to happen
-    #         doc_type = q.get('type')
-    return UserAle.denoise(app_id, doc_type=doc_type, save=save)
-
-
-@app.route('/stout', methods=['GET'])
-def merge_stout():
-    """
-    Bootstrap script to aggregate user ale logs to stout master answer table
-    This will save the merged results back to ES instance at new index stout
-    OR denoise data first, then merge with the stout index...
-    If STOUT is enabled, the select method expects a stout index
-    to exist or otherwise it will return an error message.
-
-    .. code-block:: bash
-
-            $ curl -XGET http://locahost:8090/stout/xdata_v3
-
-    :return: Status message
-    """
-    flag = app.config['ENABLE_STOUT']
-    if flag:
-        return Stout.ingest()
-    return jsonify(status="STOUT is disabled.")
-
+# @app.route('/denoise/<app_id>', methods=['GET'])
+# def denoise(app_id):
+#     """
+#     Bootstrap script to cleanup the raw logs. A document type called "parsed"
+#     will be stored with new log created unless specified in the request.
+#     Have option to save parsed results back to data store.
+#     These parsed logs can be integrated with STOUT results
+#     by running the stout bootstrap script.
+#
+#     .. code-block:: bash
+#
+#             $ curl -XGET http://localhost:8090/denoise/xdata_v3?save=true&type=parsed
+#
+#     :param app_id: Application name
+#     :return: [dict]
+#     """
+#     doc_type = 'parsed'
+#     save = False
+#     # q = request.args
+#     # if 'save' in q:
+#     #     save = str2bool(q.get('save'))
+#     #     if 'type' in q:
+#     #         # @TODO: Proper cleanup script needs to happen
+#     #         doc_type = q.get('type')
+#     return UserAle.denoise(app_id, doc_type=doc_type, save=save)
 
 @app.errorhandler(404)
 def page_not_found(error):
diff --git a/.travis.yml b/distill/config/__init__.py
similarity index 85%
rename from .travis.yml
rename to distill/config/__init__.py
index 9862ceb..6acb5d1 100644
--- a/.travis.yml
+++ b/distill/config/__init__.py
@@ -12,11 +12,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
-language: python
-python:
-  - "2.7"
-install:
-  - "pip install -r requirements.txt"
-script:
-  - python setup.py develop test
diff --git a/distill/config/config.cfg b/distill/config/config.cfg
index 189e3ea..1717873 100644
--- a/distill/config/config.cfg
+++ b/distill/config/config.cfg
@@ -20,25 +20,7 @@
 HOST = '0.0.0.0'
 
 # Port
-PORT = 8090
-
-# Enable STOUT integration into Distill 
-ENABLE_STOUT = False
-SQLITEDB = '../path/to/stout/stout.db'
-MASTER = '../path/to/master/master_ans.csv'
-MAPPINGS = '../path/to/mappings/MOT_Mappings.csv'
-SELECTED = '../path/to/stout/selected_vars_for_distill.csv'
-
-# Elasticsearch Configuration
-ES_HOST = 'http://elasticsearch'
-ES_PORT = 9200
-HTTP_AUTH = None
-USE_SSL = False
-VERIFY_CERTS = False
-CA_CERTS = None
-CLIENT_CERT = None
-CLIENT_KEY = None
-TIMEOUT = 3
+PORT = 5000
 
 # Application threads. A common general assumption is
 # using 2 per available processor cores - to handle
@@ -47,7 +29,7 @@
 THREADS_PER_PAGE = 2
 
 # Enable protection agains *Cross-site Request Forgery (CSRF)*
-# CSRF_ENABLED     = True
+# CSRF_ENABLED = True
 
 # Use a secure, unique and absolutely secret key for
 # signing the data. 
diff --git a/.travis.yml b/distill/config/elasticsearch.py
similarity index 75%
copy from .travis.yml
copy to distill/config/elasticsearch.py
index 9862ceb..7e9489a 100644
--- a/.travis.yml
+++ b/distill/config/elasticsearch.py
@@ -13,10 +13,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-language: python
-python:
-  - "2.7"
-install:
-  - "pip install -r requirements.txt"
-script:
-  - python setup.py develop test
+ELASTICSEARCH_PARAMS = {
+    'host': ['vlsmsbx.draper.com'],
+    'port': 9200,
+    'http_auth': None,
+    'use_ssl': False,
+    'verify_certs': False,
+    'ca_certs': None,
+    'client_cert': None,
+    'client_key': None,
+    'timeout': 3
+}
\ No newline at end of file
diff --git a/distill/models/brew.py b/distill/models/brew.py
index 8357de6..ba4c871 100644
--- a/distill/models/brew.py
+++ b/distill/models/brew.py
@@ -15,15 +15,14 @@
 
 
 from elasticsearch import TransportError
-from flask import jsonify
+
 from distill import es
 
 
 class Brew (object):
     """
-    Distill supports basic CRUD operations and publishes the status
-    of an persistenct database. Eventually it will support ingesting
-    logs sent from a registered application.
+    Brew supports basic CRUD operations and publishes the status
+    of Elasticsearch.
     """
 
     @staticmethod
@@ -46,15 +45,16 @@
         :return: [dict] dictionary of all registered applications and meta info
         """
         doc = {}
-        query = {"aggs": {
-            "count_by_type": {
-                "terms": {
-                    "field": "_type",
-                    "size": 100
+        query = {
+            "aggs": {
+                "count_by_type": {
+                    "terms": {
+                        "field": "_type",
+                        "size": 100
+                    }
                 }
             }
         }
-        }
 
         try:
             cluster_status = es.cat.indices(h=["index"], pri=False)
@@ -97,7 +97,7 @@
         # ignore 400 cause by IndexAlreadyExistsException when creating index
         res = es.indices.create(index=app, ignore=[400, 404])
         doc = _get_cluster_status(app)
-        return jsonify(doc)
+        return doc
 
     @staticmethod
     def read(app, app_type=None):
@@ -134,7 +134,7 @@
         :return: [dict] dictionary of application and its meta information
         """
 
-        return jsonify(_get_cluster_status(app, app_type=app_type))
+        return _get_cluster_status(app, app_type=app_type)
 
     @staticmethod
     def update(app):
@@ -143,7 +143,7 @@
                 Currently  not implemented
         """
 
-        return jsonify(status="not implemented")
+        raise NotImplementedError()
 
     @staticmethod
     def delete(app):
@@ -161,8 +161,7 @@
         :return: [dict] status message of the event
         """
 
-        es.indices.close(index=app, ignore=[400, 404])
-        return jsonify(status="Deleted index %s" % app)
+        return es.indices.close(index=app, ignore=[400, 404])
 
 
 def _get_cluster_status(app, app_type=None):
@@ -226,15 +225,16 @@
     :return: [list] list of strings representing the fields names
     """
     d = list()
-    query = {"aggs": {
-        "fields": {
-            "terms": {
-                "field": "_field_names",
-                "size": 100
+    query = {
+        "aggs": {
+            "fields": {
+                "terms": {
+                    "field": "_field_names",
+                    "size": 100
+                }
             }
         }
     }
-    }
 
     try:
         response = es.search(index=app, doc_type=app_type, body=query)
diff --git a/distill/models/userale.py b/distill/models/userale.py
index a512a50..0d58bbf 100644
--- a/distill/models/userale.py
+++ b/distill/models/userale.py
@@ -15,7 +15,6 @@
 
 from flask import jsonify
 from distill import es
-from distill import Stout
 
 
 class UserAle (object):
@@ -92,10 +91,10 @@
         log_result = es.search(index=app, doc_type=app_type,
                                body=query, fields=filters, size=size)
 
-        stout_result = Stout.getSessions()
-
-        data = merged_results(log_result, stout_result)
-        return data
+        # stout_result = Stout.getSessions()
+        #
+        # data = merged_results(log_result, stout_result)
+        return log_result
 
     @staticmethod
     def denoise(app, app_type='parsed', save=False):
@@ -103,7 +102,6 @@
         """
         pass
 
-
 def merge_dicts(lst):
     """
     Combine a list of dictionaries together to form one complete dictionary
diff --git a/distill/server.py b/distill/server.py
index 9cd3d12..6c18b95 100644
--- a/distill/server.py
+++ b/distill/server.py
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from distill import app
+from distill.app import app
 
 
 def dev_server():
diff --git a/distill/utils/exceptions.py b/distill/utils/exceptions.py
index 5839bf1..a7eb630 100644
--- a/distill/utils/exceptions.py
+++ b/distill/utils/exceptions.py
@@ -14,12 +14,12 @@
 # limitations under the License.
 
 
-class Error (Exception):
+class Error(Exception):
     """Base class for exceptions."""
     pass
 
 
-class ValidationError (Error):
+class ValidationError(Error):
     """ Exceptions raised for errors in validated a url."""
 
     def __init__(self, url, msg):
diff --git a/distill/utils/validation.py b/distill/utils/validation.py
index 88c9661..0a1b3a7 100644
--- a/distill/utils/validation.py
+++ b/distill/utils/validation.py
@@ -16,26 +16,26 @@
 from distill.utils.exceptions import ValidationError
 
 
-def validate_request(q):
+def validate_request(query):
     """
     Parse out request message and validate inputs
 
-    :param q: Url query string
+    :param query: Url query string
     :raises ValidationError: if the query is missing required parameters
     """
-    if 'q' not in q:
+    if 'q' not in query:
         raise ValidationError("Missing required parameter: %s" % 'q')
     else:
         # Handle rest of parsing
         pass
 
 
-def str2bool(v):
+def str2bool(val):
     """
     Convert string expression to boolean
 
-    :param v: Input value
+    :param val: Input value
     :returns: Converted message as boolean type
     :rtype: bool
     """
-    return v.lower() in ("yes", "true", "t", "1")
+    return val.lower() in ("yes", "true", "t", "1")
diff --git a/distill/version.py b/distill/version.py
index ad2632f..d63fbb7 100644
--- a/distill/version.py
+++ b/distill/version.py
@@ -19,4 +19,4 @@
 and parsed by ``setup.py``.
 """
 
-__version__ = "0.0.5"
+__version__ = "0.1.0"
diff --git a/setup.py b/setup.py
index c64db18..a8b9853 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import absolute_import
 import setuptools
 import io
 import os
@@ -87,7 +86,7 @@
 ]
 
 install_requires = [
-    'elasticsearch-dsl >= 5.0.0',
+    'elasticsearch-dsl >= 5.0.0, < 6.0.0',
     'pandas >= 0.20.2',
     'Flask >= 0.12.2',
     'celery >= 4.0.2',
@@ -128,7 +127,6 @@
     'Programming Language :: Python :: 3.5',
     'Environment :: Web Environment',
     'Framework :: Flask',
-    'Framework :: Pytest',
     'Topic :: Internet :: Log Analysis'
 ]
 
@@ -138,8 +136,8 @@
     url="http://senssoft.incubator.apache.org",
     license="Apache Software License 2.0",
     author="Michelle Beard",
-    author_email="msbeard@apache.org",
-    description="An analytical framework for UserALE.",
+    author_email="mbeard@apache.org",
+    description="An analytical framework for UserALE and TAP.",
     long_description=__doc__,
     classifiers=classifiers,
     keywords="stout userale tap distill",
diff --git a/tests/test_app.py b/tests/test_app.py
index 3f44294..e39e847 100644
--- a/tests/test_app.py
+++ b/tests/test_app.py
@@ -22,4 +22,5 @@
 
 
 def test_example():
+    """Generic test"""
     assert True
diff --git a/tests/test_userale.py b/tests/test_userale.py
index dc64027..adba272 100644
--- a/tests/test_userale.py
+++ b/tests/test_userale.py
@@ -13,29 +13,27 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+import unittest
 
-def test_example():
-    assert True
-    # with test_app.test_client () as c:
-    # 	rv = c.get ('/?tequila=42')
-    # 	assert request.args ['tequila'] == '42'
+from distill import app
 
-# import os
-# import flaskr
-# import unittest
-# import tempfile
+class AppTests(unittest.TestCase):
+    """
+    App Setup
+    """
+    @classmethod
+    def setUpClass(cls):
+        pass
 
-# class FlaskrTestCase(unittest.TestCase):
+    @classmethod
+    def tearDownClass(cls):
+        pass
 
-#     def setUp(self):
-#         self.db_fd, flaskr.app.config['DATABASE'] = tempfile.mkstemp()
-#         flaskr.app.config['TESTING'] = True
-#         self.app = flaskr.app.test_client()
-#         flaskr.init_db()
+    def setUp(self):
+        # creates a test client
+        self.app = app.test_client()
+        # propagate the exceptions to the test client
+        self.app.testing = True
 
-#     def tearDown(self):
-#         os.close(self.db_fd)
-#         os.unlink(flaskr.app.config['DATABASE'])
-
-# if __name__ == '__main__':
-#     unittest.main()
+    def tearDown(self):
+        pass