[MARVIN-72] Add python-daemon
diff --git a/.travis.yml b/.travis.yml
index a28e24e..dd769f5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -33,65 +33,3 @@
       script: travis_retry sbt -batch ++$TRAVIS_SCALA_VERSION coverage test coverageReport
       after_success:
       - bash <(curl -s https://codecov.io/bash)
-    # Python Toolbox
-    # Python Toolbox - Linux
-    - language: python
-      os: linux
-      python:
-        - 2.7
-        - 3.6
-      before_install:
-        - cd python-toolbox
-        - travis_retry curl https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz -o ./spark-2.1.1-bin-hadoop2.6.tgz
-        - sudo tar -xf ./spark-2.1.1-bin-hadoop2.6.tgz
-        - mkdir -p marvin_data
-        - mkdir -p marvin_home
-        - export MARVIN_HOME=./marvin_home
-        - export MARVIN_DATA_PATH=./marvin_data
-        - export SPARK_HOME=./spark-2.1.1-bin-hadoop2.6
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update                    ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew unlink python@2           ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install openssl graphviz  ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py  ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo python get-pip.py         ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libsasl2-dev python-pip graphviz -y    ; fi
-        - travis_retry sudo pip install --upgrade pip
-        - travis_retry sudo pip install virtualenvwrapper --ignore-installed six
-        - source virtualenvwrapper.sh
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then mkvirtualenv marvin-env        ; fi
-      install:
-        - travis_retry pip install codecov
-        - travis_retry pip install unidecode
-        - make marvin
-      script:
-        - marvin test
-        - codecov
-    # Python Toolbox - OSX
-    - language: generic
-      os: osx
-      before_install:
-        - cd python-toolbox
-        - travis_retry curl https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz -o ./spark-2.1.1-bin-hadoop2.6.tgz
-        - sudo tar -xf ./spark-2.1.1-bin-hadoop2.6.tgz
-        - mkdir -p marvin_data
-        - mkdir -p marvin_home
-        - export MARVIN_HOME=./marvin_home
-        - export MARVIN_DATA_PATH=./marvin_data
-        - export SPARK_HOME=./spark-2.1.1-bin-hadoop2.6
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update                    ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew unlink python@2           ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install openssl graphviz  ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py  ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo python get-pip.py         ; fi
-        - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libsasl2-dev python-pip graphviz -y    ; fi
-        - travis_retry sudo pip install --upgrade pip
-        - travis_retry sudo pip install virtualenvwrapper --ignore-installed six
-        - source virtualenvwrapper.sh
-        - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then mkvirtualenv marvin-env        ; fi
-      install:
-        - travis_retry pip install codecov
-        - travis_retry pip install unidecode
-        - make marvin
-      script:
-        - marvin test
-        - codecov
diff --git a/python-toolbox/.gitignore b/python-daemon/.gitignore
similarity index 90%
rename from python-toolbox/.gitignore
rename to python-daemon/.gitignore
index 5b5eb4a..f8e099a 100644
--- a/python-toolbox/.gitignore
+++ b/python-daemon/.gitignore
@@ -13,7 +13,7 @@
 .DS_Store
 build
 .idea
-python-toolbox
+python-daemon
 .profiling
 dist
-.dev
+.dev
\ No newline at end of file
diff --git a/python-daemon/MANIFEST.in b/python-daemon/MANIFEST.in
new file mode 100644
index 0000000..d02cbfd
--- /dev/null
+++ b/python-daemon/MANIFEST.in
@@ -0,0 +1,6 @@
+include README.md
+include marvin_python_daemon/VERSION
+include tests/fixtures/config.sample
+include marvin_python_daemon/daemon_base/protos/*
+include marvin_python_daemon/engine_base/protos/*
+include marvin_python_daemon/extras/notebook_extensions/*
\ No newline at end of file
diff --git a/python-daemon/README.md b/python-daemon/README.md
new file mode 100644
index 0000000..83450e1
--- /dev/null
+++ b/python-daemon/README.md
@@ -0,0 +1,3 @@
+# Marvin Daemon
+
+A daemon using the gRPC protocol with contains all the functionality of the old toolbox. This daemon will be in the docker container to maintain communication between the CLI and the container.
\ No newline at end of file
diff --git a/python-toolbox/bin/marvin b/python-daemon/bin/marvin-daemon
similarity index 67%
rename from python-toolbox/bin/marvin
rename to python-daemon/bin/marvin-daemon
index ac87ad5..deef365 100644
--- a/python-toolbox/bin/marvin
+++ b/python-daemon/bin/marvin-daemon
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -15,15 +15,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-from __future__ import print_function
-
-import sys
+import copy
+import configparser
 import os.path
-
-from marvin_python_toolbox.config import find_inidir, parse_ini
-from marvin_python_toolbox.management import create_cli
-import marvin_python_toolbox as toolbox
-from marvin_python_toolbox import __version__ as TOOLBOX_VERSION
+import sys
+import os
+import grpc
+from marvin_python_daemon.common.utils import find_inidir, parse_ini
+import marvin_python_daemon as daemon
+from marvin_python_daemon import __version__ as DAEMON_VERSION
+from marvin_python_daemon.daemon_base.command_server import init_server
 
 # Find the ini directory
 inifilename = 'marvin.ini'
@@ -43,8 +44,8 @@
 
 os.environ["DEFAULT_CONFIG_PATH"] = inipath
 os.environ["MARVIN_ENGINE_PATH"] = inidir
-os.environ["MARVIN_TOOLBOX_PATH"] = toolbox.__path__[0]
-os.environ["TOOLBOX_VERSION"] = TOOLBOX_VERSION
+os.environ["MARVIN_DAEMON_PATH"] = daemon.__path__[0]
+os.environ["DAEMON_VERSION"] = DAEMON_VERSION
 
 if not os.getenv("LOG_LEVEL"):
     os.environ["LOG_LEVEL"] = 'INFO'
@@ -56,25 +57,20 @@
 
 config = parse_ini(inipath, config_defaults)
 
-package_name = config['marvin_package']
-package_path = config['marvin_packagedir']
-
 home = os.environ['HOME']
 os.environ.setdefault('SPARK_HOME', '/opt/spark')
-os.environ.setdefault('WORKON_HOME', os.path.join(home, '.virtualenvs'))
 os.environ.setdefault('MARVIN_HOME', os.path.join(home, 'marvin'))
 os.environ.setdefault('MARVIN_DATA_PATH', os.path.join(home, 'marvin/data'))
+os.environ.setdefault('MARVIN_LOG', os.path.join(home, 'marvin/logs'))
 
 marvin_data = os.environ['MARVIN_DATA_PATH']
 if not os.path.exists(marvin_data):
     os.makedirs(marvin_data)
 
+marvin_logs = os.environ['MARVIN_LOG']
+if not os.path.exists(marvin_logs):
+    os.makedirs(marvin_logs)
 
-type_ = config.get('marvin_type', None)
+config['base_path'] = os.getcwd()
 
-exclude_commands = config.get('marvin_exclude', None)
-
-cli = create_cli(package_name, package_path, type_=type_, exclude=exclude_commands, config=config)
-
-cli()
-
+init_server(config)
diff --git a/python-toolbox/marvin_python_toolbox/VERSION b/python-daemon/marvin_python_daemon/VERSION
similarity index 83%
rename from python-toolbox/marvin_python_toolbox/VERSION
rename to python-daemon/marvin_python_daemon/VERSION
index bbdeab6..fa3de58 100644
--- a/python-toolbox/marvin_python_toolbox/VERSION
+++ b/python-daemon/marvin_python_daemon/VERSION
@@ -1 +1 @@
-0.0.5
+0.0.5
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/__init__.py b/python-daemon/marvin_python_daemon/__init__.py
similarity index 89%
rename from python-toolbox/marvin_python_toolbox/__init__.py
rename to python-daemon/marvin_python_daemon/__init__.py
index 9ba50c3..939fc7a 100644
--- a/python-toolbox/marvin_python_toolbox/__init__.py
+++ b/python-daemon/marvin_python_daemon/__init__.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -17,8 +17,6 @@
 
 import os
 
-from .manage import *
-from .common import *
 from .engine_base import *
 
 # Get package version number from "VERSION" file
diff --git a/python-toolbox/marvin_python_toolbox/common/__init__.py b/python-daemon/marvin_python_daemon/common/__init__.py
similarity index 91%
copy from python-toolbox/marvin_python_toolbox/common/__init__.py
copy to python-daemon/marvin_python_daemon/common/__init__.py
index efb0a92..305068e 100644
--- a/python-toolbox/marvin_python_toolbox/common/__init__.py
+++ b/python-daemon/marvin_python_daemon/common/__init__.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/python-toolbox/marvin_python_toolbox/common/config.py b/python-daemon/marvin_python_daemon/common/config.py
similarity index 85%
rename from python-toolbox/marvin_python_toolbox/common/config.py
rename to python-daemon/marvin_python_daemon/common/config.py
index 9aa86c2..e2b5db3 100644
--- a/python-toolbox/marvin_python_toolbox/common/config.py
+++ b/python-daemon/marvin_python_daemon/common/config.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -24,8 +24,7 @@
 
 # Use six to create code compatible with Python 2 and 3.
 # See http://pythonhosted.org/six/
-from .._compatibility import six
-from .._logging import get_logger
+from .log import get_logger
 
 from .exceptions import InvalidConfigException
 from .utils import from_json
@@ -41,10 +40,12 @@
     data = {}
     config_path = path  # try to get config path from args
     if not config_path:  # try to get config file from env
-        config_path = os.getenv('MARVIN_CONFIG_FILE') or os.getenv('CONFIG_FILE')
+        config_path = os.getenv(
+            'MARVIN_CONFIG_FILE') or os.getenv('CONFIG_FILE')
     if not config_path:  # use default file
         config_path = os.getenv("DEFAULT_CONFIG_PATH")
-    logger.info('Loading configuration values from "{path}"...'.format(path=config_path))
+    logger.info(
+        'Loading configuration values from "{path}"...'.format(path=config_path))
     config_parser = ConfigObj(config_path)
     try:
         data = config_parser[section]
@@ -100,15 +101,18 @@
 
         # if not found in context read default
         if not value and section != cls._default_sect:
-            value = cls._conf[cls._default_sect].get(key) if cls._default_sect in cls._conf else None
+            value = cls._conf[cls._default_sect].get(
+                key) if cls._default_sect in cls._conf else None
 
         if value is None:
             if 'default' in kwargs:  # behave as {}.get(x, default='fallback')
                 _def_value = kwargs['default']
-                logger.warn("Static configuration [{}] was not found. Using the default value [{}].".format(key, _def_value))
+                logger.warn("Static configuration [{}] was not found. Using the default value [{}].".format(
+                    key, _def_value))
                 return _def_value
             else:
-                raise InvalidConfigException(u'Not found entry: {}'.format(key))
+                raise InvalidConfigException(
+                    u'Not found entry: {}'.format(key))
 
         try:
             value = from_json(value)  # parse value
@@ -128,4 +132,3 @@
 
 # Alias
 Config = Configuration
-
diff --git a/python-toolbox/marvin_python_toolbox/common/data.py b/python-daemon/marvin_python_daemon/common/data.py
similarity index 81%
rename from python-toolbox/marvin_python_toolbox/common/data.py
rename to python-daemon/marvin_python_daemon/common/data.py
index 12b905a..601645e 100644
--- a/python-toolbox/marvin_python_toolbox/common/data.py
+++ b/python-daemon/marvin_python_daemon/common/data.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -25,22 +25,14 @@
 
 # Use six to create code compatible with Python 2 and 3.
 # See http://pythonhosted.org/six/
-from .._compatibility import six
 from .utils import check_path
 from .exceptions import InvalidConfigException
-from six import with_metaclass
-from .._logging import get_logger
+from .log import get_logger
 
 logger = get_logger('common.data')
 
 
-class AbstractMarvinData(type):
-    @property
-    def data_path(cls):
-        return cls.get_data_path()
-
-
-class MarvinData(with_metaclass(AbstractMarvinData)):
+class MarvinData():
     _key = 'MARVIN_DATA_PATH'
 
     @classmethod
@@ -67,7 +59,7 @@
     @classmethod
     def _convert_path_to_key(cls, path):
         if path.startswith(os.path.sep):
-            path = os.path.relpath(path, start=cls.data_path)
+            path = os.path.relpath(path, start=cls.get_data_path())
         return '/'.join(path.split(os.path.sep))
 
     @classmethod
@@ -80,7 +72,7 @@
         :param relpath: path relative to "data_path"
         :return: str - data content
         """
-        filepath = os.path.join(cls.data_path, relpath)
+        filepath = os.path.join(cls.get_data_path(), relpath)
         with open(filepath) as fp:
             content = fp.read()
 
@@ -92,15 +84,17 @@
         Download file from a given url
         """
 
-        local_file_name = local_file_name if local_file_name else url.split('/')[-1]
-        filepath = os.path.join(cls.data_path, local_file_name)
+        local_file_name = local_file_name if local_file_name else url.split(
+            '/')[-1]
+        filepath = os.path.join(cls.get_data_path(), local_file_name)
 
         if not os.path.exists(filepath) or force:
             try:
                 headers = requests.head(url, allow_redirects=True).headers
                 length = headers.get('Content-Length')
 
-                logger.info("Starting download of {} file with {} bytes ...".format(url, length))
+                logger.info(
+                    "Starting download of {} file with {} bytes ...".format(url, length))
 
                 widgets = [
                     'Downloading file please wait...', progressbar.Percentage(),
@@ -108,7 +102,8 @@
                     ' ', progressbar.ETA(),
                     ' ', progressbar.FileTransferSpeed(),
                 ]
-                bar = progressbar.ProgressBar(widgets=widgets, max_value=int(length) + chunk_size).start()
+                bar = progressbar.ProgressBar(
+                    widgets=widgets, max_value=int(length) + chunk_size).start()
 
                 r = requests.get(url, stream=True)
 
diff --git a/python-toolbox/marvin_python_toolbox/common/data_source_provider.py b/python-daemon/marvin_python_daemon/common/data_source_provider.py
similarity index 95%
rename from python-toolbox/marvin_python_toolbox/common/data_source_provider.py
rename to python-daemon/marvin_python_daemon/common/data_source_provider.py
index 17f982b..d0855f9 100644
--- a/python-toolbox/marvin_python_toolbox/common/data_source_provider.py
+++ b/python-daemon/marvin_python_daemon/common/data_source_provider.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/python-toolbox/marvin_python_toolbox/common/exceptions.py b/python-daemon/marvin_python_daemon/common/exceptions.py
similarity index 94%
rename from python-toolbox/marvin_python_toolbox/common/exceptions.py
rename to python-daemon/marvin_python_daemon/common/exceptions.py
index 3801298..1e03964 100644
--- a/python-toolbox/marvin_python_toolbox/common/exceptions.py
+++ b/python-daemon/marvin_python_daemon/common/exceptions.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -23,16 +23,19 @@
     Marvin Config Base Exception
     """
 
+
 class InvalidConfigException(ConfigException):
     """
     Invalid Marvin Config Base Exception
     """
 
+
 class InvalidJsonException(Exception):
     """
     Invalid JSON
     """
 
+
 class HTTPException(HTTPExceptionBase):
     """
     HTTP exception
diff --git a/python-toolbox/marvin_python_toolbox/common/http_client.py b/python-daemon/marvin_python_daemon/common/http_client.py
similarity index 90%
rename from python-toolbox/marvin_python_toolbox/common/http_client.py
rename to python-daemon/marvin_python_daemon/common/http_client.py
index ad73f49..34a06f0 100644
--- a/python-toolbox/marvin_python_toolbox/common/http_client.py
+++ b/python-daemon/marvin_python_daemon/common/http_client.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -25,8 +25,7 @@
 
 # Use six to create code compatible with Python 2 and 3.
 # See http://pythonhosted.org/six/
-from .._compatibility import six
-from .._logging import get_logger
+from .log import get_logger
 from .exceptions import HTTPException
 
 
@@ -105,25 +104,29 @@
     def get(self, path, data=None):
         """Encapsulates GET requests"""
         data = data or {}
-        response = requests.get(self.url(path), params=data, headers=self.request_header())
+        response = requests.get(
+            self.url(path), params=data, headers=self.request_header())
         return self.parse_response(response)
 
     def post(self, path, data=None):
         """Encapsulates POST requests"""
         data = data or {}
-        response = requests.post(self.url(path), data=to_json(data), headers=self.request_header())
+        response = requests.post(self.url(path), data=to_json(
+            data), headers=self.request_header())
         return self.parse_response(response)
 
     def put(self, path, data=None):
         """Encapsulates PUT requests"""
         data = data or {}
-        response = requests.put(self.url(path), data=to_json(data), headers=self.request_header())
+        response = requests.put(self.url(path), data=to_json(
+            data), headers=self.request_header())
         return self.parse_response(response)
 
     def delete(self, path, data=None):
         """Encapsulates DELETE requests"""
         data = data or {}
-        response = requests.delete(self.url(path), data=to_json(data), headers=self.request_header())
+        response = requests.delete(self.url(path), data=to_json(
+            data), headers=self.request_header())
         return self.parse_response(response)
 
 
@@ -170,7 +173,8 @@
         url = api_client.url(self.path)
         self.params.update({'page': self.page, 'per_page': self.limit})
 
-        response = requests.get(url, params=self.params, headers=api_client.request_header())
+        response = requests.get(url, params=self.params,
+                                headers=api_client.request_header())
         response = api_client.parse_response(response)
 
         try:
diff --git a/python-toolbox/marvin_python_toolbox/_logging.py b/python-daemon/marvin_python_daemon/common/log.py
similarity index 91%
rename from python-toolbox/marvin_python_toolbox/_logging.py
rename to python-daemon/marvin_python_daemon/common/log.py
index e869bde..4036a13 100644
--- a/python-toolbox/marvin_python_toolbox/_logging.py
+++ b/python-daemon/marvin_python_daemon/common/log.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -25,7 +25,7 @@
 import logging
 
 DEFAULT_LOG_LEVEL = logging.INFO
-DEFAULT_LOG_DIR = '/tmp'
+DEFAULT_LOG_DIR = os.environ['MARVIN_LOG']
 
 
 class Logger(logging.getLoggerClass()):
@@ -46,7 +46,7 @@
 logging.setLoggerClass(Logger)
 
 
-def get_logger(name, namespace='marvin_python_toolbox',
+def get_logger(name, namespace='marvin_ai',
                log_level=DEFAULT_LOG_LEVEL, log_dir=DEFAULT_LOG_DIR):
     """Build a logger that outputs to a file and to the console,"""
 
@@ -74,8 +74,8 @@
 
         file_path = str(os.path.join(log_path, log_filename))
 
-        if not os.path.exists(log_path):  # pragma: no cover
-            os.makedirs(log_path, mode=774)
+        if not os.path.exists(log_path):
+            os.makedirs(log_path)
 
         # Create a file handler
         file_handler = logging.FileHandler(file_path)
diff --git a/python-toolbox/marvin_python_toolbox/common/profiling.py b/python-daemon/marvin_python_daemon/common/profiling.py
similarity index 91%
rename from python-toolbox/marvin_python_toolbox/common/profiling.py
rename to python-daemon/marvin_python_daemon/common/profiling.py
index 5528427..2b0b908 100644
--- a/python-toolbox/marvin_python_toolbox/common/profiling.py
+++ b/python-daemon/marvin_python_daemon/common/profiling.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,7 +14,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
 import os
 import json
 import subprocess
@@ -22,9 +21,8 @@
 import pstats
 import uuid
 from functools import wraps
-from .._compatibility import StringIO
-
-from .._logging import get_logger
+from io import StringIO
+from .log import get_logger
 
 logger = get_logger('profiling')
 
@@ -42,7 +40,8 @@
         stats_value = s.getvalue()
         html = '<pre>{}</pre>'.format(stats_value)
         if self.image_path:
-            html += '<img src="{}" style="margin: 0 auto;">'.format(self.image_path)
+            html += '<img src="{}" style="margin: 0 auto;">'.format(
+                self.image_path)
         return html
 
 
@@ -125,11 +124,13 @@
                     with open(info_path, 'w') as fp:
                         json.dump(info, fp, indent=2, encoding='utf-8')
                 except Exception as e:
-                    logger.error('An error occurred while saving %s: %s.', info_path, e)
+                    logger.error(
+                        'An error occurred while saving %s: %s.', info_path, e)
             stats.dump_stats(stats_path)
             # create profiling graph
             try:
-                subprocess.call(['gprof2dot', '-f', 'pstats', '-o', dot_path, stats_path])
+                subprocess.call(['gprof2dot', '-f', 'pstats',
+                                 '-o', dot_path, stats_path])
                 subprocess.call(['dot', '-Tpng', '-o', png_path, dot_path])
                 pr.image_path = png_path
             except Exception:
diff --git a/python-toolbox/marvin_python_toolbox/common/utils.py b/python-daemon/marvin_python_daemon/common/utils.py
similarity index 80%
rename from python-toolbox/marvin_python_toolbox/common/utils.py
rename to python-daemon/marvin_python_daemon/common/utils.py
index 9979f65..49aa3b3 100644
--- a/python-toolbox/marvin_python_toolbox/common/utils.py
+++ b/python-daemon/marvin_python_daemon/common/utils.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -28,12 +28,14 @@
 import hashlib
 import jsonschema
 import warnings
+import configparser
+import copy
 from slugify import slugify
 
 # Use six to create code compatible with Python 2 and 3.
 # See http://pythonhosted.org/six/
-from .._compatibility import xrange, text_type, quote
-from .._logging import get_logger
+from urllib.parse import quote
+from .log import get_logger
 from .exceptions import InvalidJsonException
 
 
@@ -109,7 +111,7 @@
 
 def chunks(lst, size):
     """Yield successive n-sized chunks from lst."""
-    for i in xrange(0, len(lst), size):
+    for i in range(0, len(lst), size):
         yield lst[i:i + size]
 
 
@@ -290,6 +292,60 @@
     :param url: str
     :return: str - encoded url
     """
-    if isinstance(url, text_type):
+    if isinstance(url, str):
         url = url.encode('utf8')
     return quote(url, ':/%?&=')
+
+
+def find_inidir(inifilename='marvin.ini'):
+    inidir = None
+    currentdir = os.getcwd()
+
+    while True:
+        logger.info('Looking for marvinini in {}'.format(currentdir))
+        if os.path.exists(os.path.join(currentdir, inifilename)):
+            inidir = currentdir
+            logger.info('marvinini found {}'.format(inidir))
+            break
+
+        parentdir = os.path.abspath(os.path.join(currentdir, os.pardir))
+        if currentdir == parentdir:
+            # currentdir is '/'
+            logger.info('marvinini not found')
+            break
+
+        currentdir = parentdir
+
+    return inidir
+
+
+def parse_ini(inipath, defaults=None):
+    if defaults is None:
+        defaults = {}
+
+    logger.info(
+        "Parsing marvinini '{}' with defaults '{}'".format(inipath, defaults))
+
+    config_raw = configparser.ConfigParser()
+    config_raw.read(inipath)
+
+    config = copy.deepcopy(defaults)
+
+    for section in config_raw.sections():
+        # Firt pass
+        for key, value in config_raw.items(section):
+            key = '_'.join((section, key)).lower()
+            logger.debug('Processing {}: {}'.format(key, value))
+            processed_value = value.format(**config)
+            config[key] = processed_value
+
+    # Second pass
+    for key, value in config.items():
+        processed_value = value.format(**config)
+        if ',' in processed_value:
+            processed_value = processed_value.split(',')
+        config[key] = processed_value
+
+    logger.info('marvinini loaded: {}'.format(config))
+
+    return config
diff --git a/python-toolbox/marvin_python_toolbox/common/__init__.py b/python-daemon/marvin_python_daemon/daemon_base/__init__.py
similarity index 91%
rename from python-toolbox/marvin_python_toolbox/common/__init__.py
rename to python-daemon/marvin_python_daemon/daemon_base/__init__.py
index efb0a92..77c2bbc 100644
--- a/python-toolbox/marvin_python_toolbox/common/__init__.py
+++ b/python-daemon/marvin_python_daemon/daemon_base/__init__.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,5 +13,4 @@
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
-# limitations under the License.
-
+# limitations under the License.
\ No newline at end of file
diff --git a/python-daemon/marvin_python_daemon/daemon_base/command_server.py b/python-daemon/marvin_python_daemon/daemon_base/command_server.py
new file mode 100644
index 0000000..2dfb7bf
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/daemon_base/command_server.py
@@ -0,0 +1,166 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import grpc
+import time
+import multiprocessing
+from .stubs import daemon_pb2
+from .stubs import daemon_pb2_grpc
+from concurrent import futures
+from distutils.util import strtobool
+from ..common.log import get_logger
+from ..management.engine import dryrun, engine_server
+from ..management.notebook import notebook, lab
+from ..management.test import test, tox, tdd
+
+logger = get_logger('daemon_base.command_server')
+
+command_list = ['DRYRUN', 'TEST', 'TDD',
+                'TOX', 'NOTEBOOK', 'LAB', 'GRPC']
+
+
+def call_dryrun(config, parameters):
+    profiling = strtobool(parameters['profiling'])
+
+    dryrun(config, parameters['action'], bool(profiling))
+
+
+def call_grpc(config, parameters):
+    max_workers = int(parameters['max_workers']) if parameters['max_workers'] else multiprocessing.cpu_count()
+    max_rpc_workers = int(parameters['max_rpc_workers']) if parameters['max_rpc_workers'] else multiprocessing.cpu_count()
+
+    return engine_server(config, parameters['action'], max_workers,
+                        max_rpc_workers)
+
+
+def call_notebook(config, parameters):
+    security = strtobool(parameters['enable_security'])
+
+    notebook(config, bool(security), parameters['port'])
+
+
+def call_lab(config, parameters):
+    security = strtobool(parameters['enable_security'])
+
+    lab(config, bool(security), parameters['port'])
+
+
+def call_test(config, parameters):
+    cov = strtobool(parameters['cov'])
+    no_capture = strtobool(parameters['no_capture'])
+    pdb = strtobool(parameters['pdb'])
+
+    test(config, bool(cov), bool(no_capture), bool(pdb), parameters['args'])
+
+
+def call_tdd(config, parameters):
+    cov = strtobool(parameters['cov'])
+    no_capture = strtobool(parameters['no_capture'])
+    pdb = strtobool(parameters['pdb'])
+    partial = strtobool(parameters['partial'])
+
+    tdd(config, bool(cov), bool(no_capture), bool(
+        pdb), bool(partial), parameters['args'])
+
+
+def call_tox(config, parameters):
+    tox(config, parameters['args'])
+
+
+CALLS = {
+    'DRYRUN': call_dryrun,
+    'TEST': call_test,
+    'TDD': call_tdd,
+    'TOX': call_tox,
+    'NOTEBOOK': call_notebook,
+    'LAB': call_lab,
+    'GRPC': call_grpc
+}
+
+
+class CommandServicer(daemon_pb2_grpc.CommandCall):
+
+    def __init__(self, config):
+        self.config = config
+        self.command_running = None
+        self.command_processes = None
+
+    def callCommand(self, request, context):
+        response = daemon_pb2.Status()
+        self.command_running = command_list[request.command]
+        command_call = CALLS[self.command_running]
+
+        try:
+            logger.info("Command {0} called!".format(self.command_running))
+            self.command_processes = command_call(
+                self.config, request.parameters)
+            logger.info("Command {0} successful!".format(self.command_running))
+            print(self.command_processes)
+            if not self.command_processes:
+                self.command_running = None
+            response.status = daemon_pb2.Status.StatusType.OK
+        except:
+            logger.exception(
+                "Command {0} failed!".format(self.command_running))
+            response.status = daemon_pb2.Status.StatusType.NOK
+
+        return response
+
+    def stopCommand(self, request, context):
+        response = daemon_pb2.Status()
+
+        try:
+            n_servers = 1
+            for server in self.command_processes:
+                logger.info("{0} servers terminated.".format(n_servers))
+                server.stop(0)
+                n_servers += 1
+            logger.info("Command {0} terminated.".format(self.command_running))
+            self.command_running = None
+            self.command_processes = None
+            response.status = daemon_pb2.Status.StatusType.OK
+        except:
+            logger.exception(
+                "Unable to stop command: {0}.".format(self.command_running))
+            response.status = daemon_pb2.Status.StatusType.NOK
+
+        return response
+
+    def getState(self, request, response):
+        response = daemon_pb2.State()
+
+        response.engine_name = self.config['marvin_package']
+        response.command = 'None' if not self.command_running else self.command_running
+
+        return response
+
+
+def init_server(config):
+    server = grpc.server(futures.ThreadPoolExecutor(max_workers=multiprocessing.cpu_count()))
+
+    daemon_pb2_grpc.add_CommandCallServicer_to_server(
+        CommandServicer(config), server)
+
+    logger.info('Starting server. Listening on port 50057.')
+    server.add_insecure_port('[::]:50057')
+    server.start()
+
+    try:
+        while True:
+            time.sleep(10)
+    except KeyboardInterrupt:
+        server.stop(0)
diff --git a/python-daemon/marvin_python_daemon/daemon_base/protos/daemon.proto b/python-daemon/marvin_python_daemon/daemon_base/protos/daemon.proto
new file mode 100644
index 0000000..f603ba4
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/daemon_base/protos/daemon.proto
@@ -0,0 +1,54 @@
+/*
+Copyright [2020] [Apache Software Foundation]
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+ */
+syntax = "proto3";
+
+message Command {
+    enum CommandType {
+        DRYRUN = 0;
+        TEST = 1;
+        TDD = 2;
+        TOX = 3;
+        NOTEBOOK = 4;
+        LAB = 5;
+        GRPC = 6;
+        HTTP = 7;
+    }
+    CommandType command = 1;
+    map<string, string> parameters = 2;
+}
+
+//grpc doesn't allow rpc calls without pass a message
+message Interruption {}
+message Request {}
+
+message Status {
+    enum StatusType{
+        OK = 0;
+        NOK = 1;
+    }
+    StatusType status = 1;
+}
+
+message State {
+    string engine_name = 1;
+    string command = 2;
+}
+
+service CommandCall {
+    rpc callCommand(Command) returns (Status) {}
+    rpc stopCommand(Interruption) returns (Status) {}
+    rpc getState(Request) returns (State) {}
+}
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/common/__init__.py b/python-daemon/marvin_python_daemon/daemon_base/stubs/__init__.py
similarity index 91%
copy from python-toolbox/marvin_python_toolbox/common/__init__.py
copy to python-daemon/marvin_python_daemon/daemon_base/stubs/__init__.py
index efb0a92..77c2bbc 100644
--- a/python-toolbox/marvin_python_toolbox/common/__init__.py
+++ b/python-daemon/marvin_python_daemon/daemon_base/stubs/__init__.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -13,5 +13,4 @@
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
-# limitations under the License.
-
+# limitations under the License.
\ No newline at end of file
diff --git a/python-daemon/marvin_python_daemon/daemon_base/stubs/daemon_pb2.py b/python-daemon/marvin_python_daemon/daemon_base/stubs/daemon_pb2.py
new file mode 100644
index 0000000..6c6c6be
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/daemon_base/stubs/daemon_pb2.py
@@ -0,0 +1,400 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: daemon.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+    name='daemon.proto',
+    package='',
+    syntax='proto3',
+    serialized_options=None,
+    serialized_pb=b'\n\x0c\x64\x61\x65mon.proto\"\xf3\x01\n\x07\x43ommand\x12%\n\x07\x63ommand\x18\x01 \x01(\x0e\x32\x14.Command.CommandType\x12,\n\nparameters\x18\x02 \x03(\x0b\x32\x18.Command.ParametersEntry\x1a\x31\n\x0fParametersEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"`\n\x0b\x43ommandType\x12\n\n\x06\x44RYRUN\x10\x00\x12\x08\n\x04TEST\x10\x01\x12\x07\n\x03TDD\x10\x02\x12\x07\n\x03TOX\x10\x03\x12\x0c\n\x08NOTEBOOK\x10\x04\x12\x07\n\x03LAB\x10\x05\x12\x08\n\x04GRPC\x10\x06\x12\x08\n\x04HTTP\x10\x07\"\x0e\n\x0cInterruption\"\t\n\x07Request\"K\n\x06Status\x12\"\n\x06status\x18\x01 \x01(\x0e\x32\x12.Status.StatusType\"\x1d\n\nStatusType\x12\x06\n\x02OK\x10\x00\x12\x07\n\x03NOK\x10\x01\"-\n\x05State\x12\x13\n\x0b\x65ngine_name\x18\x01 \x01(\t\x12\x0f\n\x07\x63ommand\x18\x02 \x01(\t2z\n\x0b\x43ommandCall\x12\"\n\x0b\x63\x61llCommand\x12\x08.Command\x1a\x07.Status\"\x00\x12\'\n\x0bstopCommand\x12\r.Interruption\x1a\x07.Status\"\x00\x12\x1e\n\x08getState\x12\x08.Request\x1a\x06.State\"\x00\x62\x06proto3'
+)
+
+
+_COMMAND_COMMANDTYPE = _descriptor.EnumDescriptor(
+    name='CommandType',
+    full_name='Command.CommandType',
+    filename=None,
+    file=DESCRIPTOR,
+    values=[
+        _descriptor.EnumValueDescriptor(
+            name='DRYRUN', index=0, number=0,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='TEST', index=1, number=1,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='TDD', index=2, number=2,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='TOX', index=3, number=3,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='NOTEBOOK', index=4, number=4,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='LAB', index=5, number=5,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='GRPC', index=6, number=6,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='HTTP', index=7, number=7,
+            serialized_options=None,
+            type=None),
+    ],
+    containing_type=None,
+    serialized_options=None,
+    serialized_start=164,
+    serialized_end=260,
+)
+_sym_db.RegisterEnumDescriptor(_COMMAND_COMMANDTYPE)
+
+_STATUS_STATUSTYPE = _descriptor.EnumDescriptor(
+    name='StatusType',
+    full_name='Status.StatusType',
+    filename=None,
+    file=DESCRIPTOR,
+    values=[
+        _descriptor.EnumValueDescriptor(
+            name='OK', index=0, number=0,
+            serialized_options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='NOK', index=1, number=1,
+            serialized_options=None,
+            type=None),
+    ],
+    containing_type=None,
+    serialized_options=None,
+    serialized_start=335,
+    serialized_end=364,
+)
+_sym_db.RegisterEnumDescriptor(_STATUS_STATUSTYPE)
+
+
+_COMMAND_PARAMETERSENTRY = _descriptor.Descriptor(
+    name='ParametersEntry',
+    full_name='Command.ParametersEntry',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='key', full_name='Command.ParametersEntry.key', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=b"".decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            serialized_options=None, file=DESCRIPTOR),
+        _descriptor.FieldDescriptor(
+            name='value', full_name='Command.ParametersEntry.value', index=1,
+            number=2, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=b"".decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            serialized_options=None, file=DESCRIPTOR),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    serialized_options=b'8\001',
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=113,
+    serialized_end=162,
+)
+
+_COMMAND = _descriptor.Descriptor(
+    name='Command',
+    full_name='Command',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='command', full_name='Command.command', index=0,
+            number=1, type=14, cpp_type=8, label=1,
+            has_default_value=False, default_value=0,
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            serialized_options=None, file=DESCRIPTOR),
+        _descriptor.FieldDescriptor(
+            name='parameters', full_name='Command.parameters', index=1,
+            number=2, type=11, cpp_type=10, label=3,
+            has_default_value=False, default_value=[],
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            serialized_options=None, file=DESCRIPTOR),
+    ],
+    extensions=[
+    ],
+    nested_types=[_COMMAND_PARAMETERSENTRY, ],
+    enum_types=[
+        _COMMAND_COMMANDTYPE,
+    ],
+    serialized_options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=17,
+    serialized_end=260,
+)
+
+
+_INTERRUPTION = _descriptor.Descriptor(
+    name='Interruption',
+    full_name='Interruption',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    serialized_options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=262,
+    serialized_end=276,
+)
+
+
+_REQUEST = _descriptor.Descriptor(
+    name='Request',
+    full_name='Request',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    serialized_options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=278,
+    serialized_end=287,
+)
+
+
+_STATUS = _descriptor.Descriptor(
+    name='Status',
+    full_name='Status',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='status', full_name='Status.status', index=0,
+            number=1, type=14, cpp_type=8, label=1,
+            has_default_value=False, default_value=0,
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            serialized_options=None, file=DESCRIPTOR),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+        _STATUS_STATUSTYPE,
+    ],
+    serialized_options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=289,
+    serialized_end=364,
+)
+
+
+_STATE = _descriptor.Descriptor(
+    name='State',
+    full_name='State',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='engine_name', full_name='State.engine_name', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=b"".decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            serialized_options=None, file=DESCRIPTOR),
+        _descriptor.FieldDescriptor(
+            name='command', full_name='State.command', index=1,
+            number=2, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=b"".decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            serialized_options=None, file=DESCRIPTOR),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    serialized_options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=366,
+    serialized_end=411,
+)
+
+_COMMAND_PARAMETERSENTRY.containing_type = _COMMAND
+_COMMAND.fields_by_name['command'].enum_type = _COMMAND_COMMANDTYPE
+_COMMAND.fields_by_name['parameters'].message_type = _COMMAND_PARAMETERSENTRY
+_COMMAND_COMMANDTYPE.containing_type = _COMMAND
+_STATUS.fields_by_name['status'].enum_type = _STATUS_STATUSTYPE
+_STATUS_STATUSTYPE.containing_type = _STATUS
+DESCRIPTOR.message_types_by_name['Command'] = _COMMAND
+DESCRIPTOR.message_types_by_name['Interruption'] = _INTERRUPTION
+DESCRIPTOR.message_types_by_name['Request'] = _REQUEST
+DESCRIPTOR.message_types_by_name['Status'] = _STATUS
+DESCRIPTOR.message_types_by_name['State'] = _STATE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+Command = _reflection.GeneratedProtocolMessageType('Command', (_message.Message,), {
+
+    'ParametersEntry': _reflection.GeneratedProtocolMessageType('ParametersEntry', (_message.Message,), {
+        'DESCRIPTOR': _COMMAND_PARAMETERSENTRY,
+        '__module__': 'daemon_pb2'
+        # @@protoc_insertion_point(class_scope:Command.ParametersEntry)
+    }),
+    'DESCRIPTOR': _COMMAND,
+    '__module__': 'daemon_pb2'
+    # @@protoc_insertion_point(class_scope:Command)
+})
+_sym_db.RegisterMessage(Command)
+_sym_db.RegisterMessage(Command.ParametersEntry)
+
+Interruption = _reflection.GeneratedProtocolMessageType('Interruption', (_message.Message,), {
+    'DESCRIPTOR': _INTERRUPTION,
+    '__module__': 'daemon_pb2'
+    # @@protoc_insertion_point(class_scope:Interruption)
+})
+_sym_db.RegisterMessage(Interruption)
+
+Request = _reflection.GeneratedProtocolMessageType('Request', (_message.Message,), {
+    'DESCRIPTOR': _REQUEST,
+    '__module__': 'daemon_pb2'
+    # @@protoc_insertion_point(class_scope:Request)
+})
+_sym_db.RegisterMessage(Request)
+
+Status = _reflection.GeneratedProtocolMessageType('Status', (_message.Message,), {
+    'DESCRIPTOR': _STATUS,
+    '__module__': 'daemon_pb2'
+    # @@protoc_insertion_point(class_scope:Status)
+})
+_sym_db.RegisterMessage(Status)
+
+State = _reflection.GeneratedProtocolMessageType('State', (_message.Message,), {
+    'DESCRIPTOR': _STATE,
+    '__module__': 'daemon_pb2'
+    # @@protoc_insertion_point(class_scope:State)
+})
+_sym_db.RegisterMessage(State)
+
+
+_COMMAND_PARAMETERSENTRY._options = None
+
+_COMMANDCALL = _descriptor.ServiceDescriptor(
+    name='CommandCall',
+    full_name='CommandCall',
+    file=DESCRIPTOR,
+    index=0,
+    serialized_options=None,
+    serialized_start=413,
+    serialized_end=535,
+    methods=[
+        _descriptor.MethodDescriptor(
+            name='callCommand',
+            full_name='CommandCall.callCommand',
+            index=0,
+            containing_service=None,
+            input_type=_COMMAND,
+            output_type=_STATUS,
+            serialized_options=None,
+        ),
+        _descriptor.MethodDescriptor(
+            name='stopCommand',
+            full_name='CommandCall.stopCommand',
+            index=1,
+            containing_service=None,
+            input_type=_INTERRUPTION,
+            output_type=_STATUS,
+            serialized_options=None,
+        ),
+        _descriptor.MethodDescriptor(
+            name='getState',
+            full_name='CommandCall.getState',
+            index=2,
+            containing_service=None,
+            input_type=_REQUEST,
+            output_type=_STATE,
+            serialized_options=None,
+        ),
+    ])
+_sym_db.RegisterServiceDescriptor(_COMMANDCALL)
+
+DESCRIPTOR.services_by_name['CommandCall'] = _COMMANDCALL
+
+# @@protoc_insertion_point(module_scope)
diff --git a/python-daemon/marvin_python_daemon/daemon_base/stubs/daemon_pb2_grpc.py b/python-daemon/marvin_python_daemon/daemon_base/stubs/daemon_pb2_grpc.py
new file mode 100644
index 0000000..b0ab79c
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/daemon_base/stubs/daemon_pb2_grpc.py
@@ -0,0 +1,147 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
+
+# do this relative import to get it right
+from . import daemon_pb2 as daemon__pb2
+
+
+class CommandCallStub(object):
+    """Missing associated documentation comment in .proto file"""
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+            channel: A grpc.Channel.
+        """
+        self.callCommand = channel.unary_unary(
+            '/CommandCall/callCommand',
+            request_serializer=daemon__pb2.Command.SerializeToString,
+            response_deserializer=daemon__pb2.Status.FromString,
+        )
+        self.stopCommand = channel.unary_unary(
+            '/CommandCall/stopCommand',
+            request_serializer=daemon__pb2.Interruption.SerializeToString,
+            response_deserializer=daemon__pb2.Status.FromString,
+        )
+        self.getState = channel.unary_unary(
+            '/CommandCall/getState',
+            request_serializer=daemon__pb2.Request.SerializeToString,
+            response_deserializer=daemon__pb2.State.FromString,
+        )
+
+
+class CommandCallServicer(object):
+    """Missing associated documentation comment in .proto file"""
+
+    def callCommand(self, request, context):
+        """Missing associated documentation comment in .proto file"""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def stopCommand(self, request, context):
+        """Missing associated documentation comment in .proto file"""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def getState(self, request, context):
+        """Missing associated documentation comment in .proto file"""
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_CommandCallServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+        'callCommand': grpc.unary_unary_rpc_method_handler(
+            servicer.callCommand,
+            request_deserializer=daemon__pb2.Command.FromString,
+            response_serializer=daemon__pb2.Status.SerializeToString,
+        ),
+        'stopCommand': grpc.unary_unary_rpc_method_handler(
+            servicer.stopCommand,
+            request_deserializer=daemon__pb2.Interruption.FromString,
+            response_serializer=daemon__pb2.Status.SerializeToString,
+        ),
+        'getState': grpc.unary_unary_rpc_method_handler(
+            servicer.getState,
+            request_deserializer=daemon__pb2.Request.FromString,
+            response_serializer=daemon__pb2.State.SerializeToString,
+        ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+        'CommandCall', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+ # This class is part of an EXPERIMENTAL API.
+
+
+class CommandCall(object):
+    """Missing associated documentation comment in .proto file"""
+
+    @staticmethod
+    def callCommand(request,
+                    target,
+                    options=(),
+                    channel_credentials=None,
+                    call_credentials=None,
+                    compression=None,
+                    wait_for_ready=None,
+                    timeout=None,
+                    metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/CommandCall/callCommand',
+                                             daemon__pb2.Command.SerializeToString,
+                                             daemon__pb2.Status.FromString,
+                                             options, channel_credentials,
+                                             call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def stopCommand(request,
+                    target,
+                    options=(),
+                    channel_credentials=None,
+                    call_credentials=None,
+                    compression=None,
+                    wait_for_ready=None,
+                    timeout=None,
+                    metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/CommandCall/stopCommand',
+                                             daemon__pb2.Interruption.SerializeToString,
+                                             daemon__pb2.Status.FromString,
+                                             options, channel_credentials,
+                                             call_credentials, compression, wait_for_ready, timeout, metadata)
+
+    @staticmethod
+    def getState(request,
+                 target,
+                 options=(),
+                 channel_credentials=None,
+                 call_credentials=None,
+                 compression=None,
+                 wait_for_ready=None,
+                 timeout=None,
+                 metadata=None):
+        return grpc.experimental.unary_unary(request, target, '/CommandCall/getState',
+                                             daemon__pb2.Request.SerializeToString,
+                                             daemon__pb2.State.FromString,
+                                             options, channel_credentials,
+                                             call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/__init__.py b/python-daemon/marvin_python_daemon/engine_base/__init__.py
similarity index 95%
rename from python-toolbox/marvin_python_toolbox/engine_base/__init__.py
rename to python-daemon/marvin_python_daemon/engine_base/__init__.py
index 70dd9f9..1b3d4ee 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/__init__.py
+++ b/python-daemon/marvin_python_daemon/engine_base/__init__.py
@@ -19,5 +19,4 @@
 from .engine_base_prediction import EngineBasePrediction
 from .engine_base_data_handler import EngineBaseDataHandler
 from .engine_base_training import EngineBaseTraining
-from .stubs import actions_pb2, actions_pb2_grpc
-from .serializers import KerasSerializer
\ No newline at end of file
+from .stubs import actions_pb2, actions_pb2_grpc
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_action.py b/python-daemon/marvin_python_daemon/engine_base/engine_base_action.py
similarity index 76%
rename from python-toolbox/marvin_python_toolbox/engine_base/engine_base_action.py
rename to python-daemon/marvin_python_daemon/engine_base/engine_base_action.py
index bb2eda1..485b620 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_action.py
+++ b/python-daemon/marvin_python_daemon/engine_base/engine_base_action.py
@@ -27,11 +27,11 @@
 from .stubs.actions_pb2 import BatchActionResponse, OnlineActionResponse, ReloadResponse, HealthCheckResponse
 from .stubs import actions_pb2_grpc
 
-from .._compatibility import six
-from .._logging import get_logger
+from ..common.log import get_logger
 
 
-__all__ = ['EngineBaseAction', 'EngineBaseBatchAction', 'EngineBaseOnlineAction']
+__all__ = ['EngineBaseAction',
+           'EngineBaseBatchAction', 'EngineBaseOnlineAction']
 logger = get_logger('engine_base_action')
 
 
@@ -48,27 +48,35 @@
     def __init__(self, **kwargs):
         self.action_name = self.__class__.__name__
         self._params = self._get_arg(kwargs=kwargs, arg='params')
-        self._persistence_mode = self._get_arg(kwargs=kwargs, arg='persistence_mode', default_value='memory')
-        self._default_root_path = self._get_arg(kwargs=kwargs, arg='default_root_path', default_value=os.path.join(os.environ['MARVIN_DATA_PATH'], '.artifacts'))
-        self._is_remote_calling = self._get_arg(kwargs=kwargs, arg='is_remote_calling', default_value=False)
-        logger.debug("Starting {} engine action with {} persistence mode...".format(self.__class__.__name__, self._persistence_mode))
+        self._persistence_mode = self._get_arg(
+            kwargs=kwargs, arg='persistence_mode', default_value='memory')
+        self._default_root_path = self._get_arg(kwargs=kwargs, arg='default_root_path', default_value=os.path.join(
+            os.environ['MARVIN_DATA_PATH'], '.artifacts'))
+        logger.info("default_root_path: {}".format(self._default_root_path))
+        self._is_remote_calling = self._get_arg(
+            kwargs=kwargs, arg='is_remote_calling', default_value=False)
+        logger.info("Starting {} engine action with {} persistence mode...".format(
+            self.__class__.__name__, self._persistence_mode))
 
     def _get_arg(self, kwargs, arg, default_value=None):
         return kwargs.get(arg, default_value)
 
     def _get_object_file_path(self, object_reference):
-        engine_name = self.__module__.split('.')[0].replace('marvin_', '').replace('_engine', '')
+        engine_name = self.__module__.split('.')[0].replace(
+            'marvin_', '').replace('_engine', '')
         directory = os.path.join(self._default_root_path, engine_name)
 
         if not os.path.exists(directory):
             os.makedirs(directory)
 
+        logger.info(os.path.join(directory, "{}".format(object_reference.replace('_', ''))))
         return os.path.join(directory, "{}".format(object_reference.replace('_', '')))
 
     def _serializer_dump(self, obj, object_file_path):
         if object_file_path.split(os.sep)[-1] == 'metrics':
             with open(object_file_path, 'w') as f:
-                json.dump(obj, f, sort_keys=True, indent=4, separators=(',', ': '))
+                json.dump(obj, f, sort_keys=True,
+                          indent=4, separators=(',', ': '))
         else:
             serializer.dump(obj, object_file_path, protocol=2, compress=3)
 
@@ -82,7 +90,8 @@
     def _save_obj(self, object_reference, obj):
         if not self._is_remote_calling:
             if getattr(self, object_reference, None) is not None:
-                logger.error("Object {} must be assign only once in each action".format(object_reference))
+                logger.error(
+                    "Object {} must be assign only once in each action".format(object_reference))
                 raise Exception('MultipleAssignException', object_reference)
 
         setattr(self, object_reference, obj)
@@ -95,18 +104,21 @@
             self._local_saved_objects[object_reference] = object_file_path
 
     def _load_obj(self, object_reference, force=False):
-        object_reference = object_reference if object_reference.startswith('_') else '_%s' % object_reference
+        object_reference = object_reference if object_reference.startswith(
+            '_') else '_%s' % object_reference
         if (getattr(self, object_reference, None) is None and self._persistence_mode == 'local') or force:
             object_file_path = self._get_object_file_path(object_reference)
             logger.info("Loading object from {}".format(object_file_path))
-            setattr(self, object_reference, self._serializer_load(object_file_path))
+            setattr(self, object_reference,
+                    self._serializer_load(object_file_path))
             logger.info("Object {} loaded!".format(object_reference))
 
         return getattr(self, object_reference)
 
     def _release_local_saved_objects(self):
         for object_reference in self._local_saved_objects.keys():
-            logger.info("Removing object {} from memory..".format(object_reference))
+            logger.info(
+                "Removing object {} from memory..".format(object_reference))
             setattr(self, object_reference, None)
 
         self._local_saved_objects = {}
@@ -114,13 +126,14 @@
     @classmethod
     def retrieve_obj(self, object_file_path):
         logger.info("Retrieve object from {}".format(object_file_path))
-        return serializer.load(object_file_path)
+        return self._serializer_load(self, object_file_path)
 
     def _remote_reload(self, request, context):
         protocol = request.protocol
         artifacts = request.artifacts
 
-        logger.info("Received message from client with protocol [{}] to reload the [{}] artifacts...".format(protocol, artifacts))
+        logger.info("Received message from client with protocol [{}] to reload the [{}] artifacts...".format(
+            protocol, artifacts))
 
         message = "Reloaded"
 
@@ -137,11 +150,12 @@
         return response_message
 
     def _health_check(self, request, context):
-        logger.info("Received message from client with protocol health check [{}] artifacts...".format(request.artifacts))
+        logger.info("Received message from client with protocol health check [{}] artifacts...".format(
+            request.artifacts))
         try:
             if request.artifacts:
                 for artifact in request.artifacts.split(","):
-                    if not getattr(self, artifact):
+                    if getattr(self, artifact) is None:
                         return HealthCheckResponse(status=HealthCheckResponse.NOK)
             return HealthCheckResponse(status=HealthCheckResponse.OK)
 
@@ -166,7 +180,8 @@
         logger.info("Finish of the {} execute method!".format(self.action_name))
 
     def _remote_execute(self, request, context):
-        logger.info("Received message from client and sending to engine action...")
+        logger.info(
+            "Received message from client and sending to engine action...")
         logger.debug("Received Params: {}".format(request.params))
 
         params = json.loads(request.params) if request.params else self._params
@@ -182,7 +197,8 @@
         return response_message
 
     def _prepare_remote_server(self, port, workers, rpc_workers):
-        server = grpc.server(thread_pool=futures.ThreadPoolExecutor(max_workers=workers), maximum_concurrent_rpcs=rpc_workers)
+        server = grpc.server(thread_pool=futures.ThreadPoolExecutor(
+            max_workers=workers), maximum_concurrent_rpcs=rpc_workers)
         actions_pb2_grpc.add_BatchActionHandlerServicer_to_server(self, server)
         server.add_insecure_port('[::]:{}'.format(port))
         return server
@@ -197,21 +213,25 @@
 
     def _pipeline_execute(self, input_message, params):
         if self._previous_step:
-            input_message = self._previous_step._pipeline_execute(input_message, params)
+            input_message = self._previous_step._pipeline_execute(
+                input_message, params)
 
         logger.info("Start of the {} execute method!".format(self.action_name))
         return self.execute(input_message, params)
         logger.info("Finish of the {} execute method!".format(self.action_name))
 
     def _remote_execute(self, request, context):
-        logger.info("Received message from client and sending to engine action...")
+        logger.info(
+            "Received message from client and sending to engine action...")
         logger.debug("Received Params: {}".format(request.params))
         logger.debug("Received Message: {}".format(request.message))
 
-        input_message = json.loads(request.message) if request.message else None
+        input_message = json.loads(
+            request.message) if request.message else None
         params = json.loads(request.params) if request.params else self._params
 
-        _message = self._pipeline_execute(input_message=input_message, params=params)
+        _message = self._pipeline_execute(
+            input_message=input_message, params=params)
 
         logger.info("Handling returned message from engine action...")
 
@@ -224,7 +244,9 @@
         return response_message
 
     def _prepare_remote_server(self, port, workers, rpc_workers):
-        server = grpc.server(thread_pool=futures.ThreadPoolExecutor(max_workers=workers), maximum_concurrent_rpcs=rpc_workers)
-        actions_pb2_grpc.add_OnlineActionHandlerServicer_to_server(self, server)
+        server = grpc.server(thread_pool=futures.ThreadPoolExecutor(
+            max_workers=workers), maximum_concurrent_rpcs=rpc_workers)
+        actions_pb2_grpc.add_OnlineActionHandlerServicer_to_server(
+            self, server)
         server.add_insecure_port('[::]:{}'.format(port))
         return server
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_data_handler.py b/python-daemon/marvin_python_daemon/engine_base/engine_base_data_handler.py
similarity index 78%
rename from python-toolbox/marvin_python_toolbox/engine_base/engine_base_data_handler.py
rename to python-daemon/marvin_python_daemon/engine_base/engine_base_data_handler.py
index 3c798fb..5e0dc8b 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_data_handler.py
+++ b/python-daemon/marvin_python_daemon/engine_base/engine_base_data_handler.py
@@ -16,8 +16,7 @@
 # limitations under the License.
 
 from abc import ABCMeta
-from .._compatibility import six
-from .._logging import get_logger
+from ..common.log import get_logger
 
 from .engine_base_action import EngineBaseBatchAction
 
@@ -33,22 +32,28 @@
     _dataset = None
 
     def __init__(self, **kwargs):
-        self._initial_dataset = self._get_arg(kwargs=kwargs, arg='initial_dataset')
+        self._initial_dataset = self._get_arg(
+            kwargs=kwargs, arg='initial_dataset')
         self._dataset = self._get_arg(kwargs=kwargs, arg='dataset')
         super(EngineBaseDataHandler, self).__init__(**kwargs)
 
     @property
     def marvin_initial_dataset(self):
+        logger.info("initial_dataset loaded.")
         return self._load_obj(object_reference='_initial_dataset')
 
     @marvin_initial_dataset.setter
     def marvin_initial_dataset(self, initial_dataset):
-        self._save_obj(object_reference='_initial_dataset', obj=initial_dataset)
+        logger.info("initial_dataset saved.")
+        self._save_obj(object_reference='_initial_dataset',
+                       obj=initial_dataset)
 
     @property
     def marvin_dataset(self):
+        logger.info("dataset loaded.")
         return self._load_obj(object_reference='_dataset')
 
     @marvin_dataset.setter
     def marvin_dataset(self, dataset):
+        logger.info("dataset saved.")
         self._save_obj(object_reference='_dataset', obj=dataset)
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_prediction.py b/python-daemon/marvin_python_daemon/engine_base/engine_base_prediction.py
similarity index 95%
rename from python-toolbox/marvin_python_toolbox/engine_base/engine_base_prediction.py
rename to python-daemon/marvin_python_daemon/engine_base/engine_base_prediction.py
index af27228..a8ee303 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_prediction.py
+++ b/python-daemon/marvin_python_daemon/engine_base/engine_base_prediction.py
@@ -16,8 +16,7 @@
 # limitations under the License.
 
 from abc import ABCMeta
-from .._compatibility import six
-from .._logging import get_logger
+from ..common.log import get_logger
 
 from .engine_base_action import EngineBaseOnlineAction
 
@@ -53,4 +52,3 @@
     @marvin_metrics.setter
     def marvin_metrics(self, metrics):
         self._save_obj(object_reference='_metrics', obj=metrics)
-
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_training.py b/python-daemon/marvin_python_daemon/engine_base/engine_base_training.py
similarity index 96%
rename from python-toolbox/marvin_python_toolbox/engine_base/engine_base_training.py
rename to python-daemon/marvin_python_daemon/engine_base/engine_base_training.py
index 5ff8e84..6e32e91 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/engine_base_training.py
+++ b/python-daemon/marvin_python_daemon/engine_base/engine_base_training.py
@@ -16,8 +16,7 @@
 # limitations under the License.
 
 from abc import ABCMeta
-from .._compatibility import six
-from .._logging import get_logger
+from ..common.log import get_logger
 
 from .engine_base_action import EngineBaseBatchAction
 
@@ -63,4 +62,3 @@
     @marvin_metrics.setter
     def marvin_metrics(self, metrics):
         self._save_obj(object_reference='_metrics', obj=metrics)
-
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/protos/actions.proto b/python-daemon/marvin_python_daemon/engine_base/protos/actions.proto
similarity index 96%
rename from python-toolbox/marvin_python_toolbox/engine_base/protos/actions.proto
rename to python-daemon/marvin_python_daemon/engine_base/protos/actions.proto
index 4a63daf..da3a3d5 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/protos/actions.proto
+++ b/python-daemon/marvin_python_daemon/engine_base/protos/actions.proto
@@ -1,4 +1,4 @@
-/** Copyright [2019] [Apache Software Foundation]
+/** Copyright [2020] [Apache Software Foundation]
 
  Licensed under the Apache License, Version 2.0 (the "License");
  you may not use this file except in compliance with the License.
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/serializers/__init__.py b/python-daemon/marvin_python_daemon/engine_base/serializers/__init__.py
similarity index 92%
rename from python-toolbox/marvin_python_toolbox/engine_base/serializers/__init__.py
rename to python-daemon/marvin_python_daemon/engine_base/serializers/__init__.py
index eef28de..4b461a1 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/serializers/__init__.py
+++ b/python-daemon/marvin_python_daemon/engine_base/serializers/__init__.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/serializers/keras_serializer.py b/python-daemon/marvin_python_daemon/engine_base/serializers/keras_serializer.py
similarity index 80%
rename from python-toolbox/marvin_python_toolbox/engine_base/serializers/keras_serializer.py
rename to python-daemon/marvin_python_daemon/engine_base/serializers/keras_serializer.py
index 467d573..35c5344 100644
--- a/python-toolbox/marvin_python_toolbox/engine_base/serializers/keras_serializer.py
+++ b/python-daemon/marvin_python_daemon/engine_base/serializers/keras_serializer.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -16,25 +16,25 @@
 # limitations under the License.
 
 import os
-from ..._logging import get_logger
+from ...common.log import get_logger
 
 logger = get_logger('engine_base_data_handler')
+
 __all__ = ['KerasSerializer']
 
 
 class KerasSerializer(object):
     def _serializer_load(self, object_file_path):
         if object_file_path.split(os.sep)[-1] == 'model':
-            from keras.models import load_model
-
-            logger.debug("Loading model {} using keras serializer.".format(object_file_path))
+            from tensorflow.keras.models import load_model
+            logger.info("Loading model {} using keras serializer.".format(object_file_path))
             return load_model(object_file_path)
         else:
             return super(KerasSerializer, self)._serializer_load(object_file_path)
 
     def _serializer_dump(self, obj, object_file_path):
         if object_file_path.split(os.sep)[-1] == 'model':
-            logger.debug("Saving model {} using keras serializer.".format(object_file_path))
+            logger.info("Saving model {} using keras serializer.".format(object_file_path))
             obj.save(object_file_path)
         else:
             super(KerasSerializer, self)._serializer_dump(obj, object_file_path)
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/stubs/__init__.py b/python-daemon/marvin_python_daemon/engine_base/stubs/__init__.py
similarity index 100%
rename from python-toolbox/marvin_python_toolbox/engine_base/stubs/__init__.py
rename to python-daemon/marvin_python_daemon/engine_base/stubs/__init__.py
diff --git a/python-daemon/marvin_python_daemon/engine_base/stubs/actions_pb2.py b/python-daemon/marvin_python_daemon/engine_base/stubs/actions_pb2.py
new file mode 100644
index 0000000..f01c688
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/engine_base/stubs/actions_pb2.py
@@ -0,0 +1,820 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: actions.proto
+
+from google.protobuf import descriptor_pb2
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import reflection as _reflection
+from google.protobuf import message as _message
+from google.protobuf import descriptor as _descriptor
+import sys
+_b = sys.version_info[0] < 3 and (
+    lambda x: x) or (lambda x: x.encode('latin1'))
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+    name='actions.proto',
+    package='',
+    syntax='proto3',
+    serialized_pb=_b('\n\ractions.proto\"6\n\x13OnlineActionRequest\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x0e\n\x06params\x18\x02 \x01(\t\"\'\n\x14OnlineActionResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"$\n\x12\x42\x61tchActionRequest\x12\x0e\n\x06params\x18\x01 \x01(\t\"&\n\x13\x42\x61tchActionResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"4\n\rReloadRequest\x12\x10\n\x08protocol\x18\x01 \x01(\t\x12\x11\n\tartifacts\x18\x02 \x01(\t\"!\n\x0eReloadResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"\'\n\x12HealthCheckRequest\x12\x11\n\tartifacts\x18\x02 \x01(\t\"]\n\x13HealthCheckResponse\x12+\n\x06status\x18\x01 \x01(\x0e\x32\x1b.HealthCheckResponse.Status\"\x19\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\x07\n\x03NOK\x10\x01\x32\xca\x01\n\x13OnlineActionHandler\x12@\n\x0f_remote_execute\x12\x14.OnlineActionRequest\x1a\x15.OnlineActionResponse\"\x00\x12\x33\n\x0e_remote_reload\x12\x0e.ReloadRequest\x1a\x0f.ReloadResponse\"\x00\x12<\n\r_health_check\x12\x13.HealthCheckRequest\x1a\x14.HealthCheckResponse\"\x00\x32\xc7\x01\n\x12\x42\x61tchActionHandler\x12>\n\x0f_remote_execute\x12\x13.BatchActionRequest\x1a\x14.BatchActionResponse\"\x00\x12\x33\n\x0e_remote_reload\x12\x0e.ReloadRequest\x1a\x0f.ReloadResponse\"\x00\x12<\n\r_health_check\x12\x13.HealthCheckRequest\x1a\x14.HealthCheckResponse\"\x00\x62\x06proto3')
+)
+
+
+_HEALTHCHECKRESPONSE_STATUS = _descriptor.EnumDescriptor(
+    name='Status',
+    full_name='HealthCheckResponse.Status',
+    filename=None,
+    file=DESCRIPTOR,
+    values=[
+        _descriptor.EnumValueDescriptor(
+            name='OK', index=0, number=0,
+            options=None,
+            type=None),
+        _descriptor.EnumValueDescriptor(
+            name='NOK', index=1, number=1,
+            options=None,
+            type=None),
+    ],
+    containing_type=None,
+    options=None,
+    serialized_start=390,
+    serialized_end=415,
+)
+_sym_db.RegisterEnumDescriptor(_HEALTHCHECKRESPONSE_STATUS)
+
+
+_ONLINEACTIONREQUEST = _descriptor.Descriptor(
+    name='OnlineActionRequest',
+    full_name='OnlineActionRequest',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='message', full_name='OnlineActionRequest.message', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+        _descriptor.FieldDescriptor(
+            name='params', full_name='OnlineActionRequest.params', index=1,
+            number=2, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=17,
+    serialized_end=71,
+)
+
+
+_ONLINEACTIONRESPONSE = _descriptor.Descriptor(
+    name='OnlineActionResponse',
+    full_name='OnlineActionResponse',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='message', full_name='OnlineActionResponse.message', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=73,
+    serialized_end=112,
+)
+
+
+_BATCHACTIONREQUEST = _descriptor.Descriptor(
+    name='BatchActionRequest',
+    full_name='BatchActionRequest',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='params', full_name='BatchActionRequest.params', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=114,
+    serialized_end=150,
+)
+
+
+_BATCHACTIONRESPONSE = _descriptor.Descriptor(
+    name='BatchActionResponse',
+    full_name='BatchActionResponse',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='message', full_name='BatchActionResponse.message', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=152,
+    serialized_end=190,
+)
+
+
+_RELOADREQUEST = _descriptor.Descriptor(
+    name='ReloadRequest',
+    full_name='ReloadRequest',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='protocol', full_name='ReloadRequest.protocol', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+        _descriptor.FieldDescriptor(
+            name='artifacts', full_name='ReloadRequest.artifacts', index=1,
+            number=2, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=192,
+    serialized_end=244,
+)
+
+
+_RELOADRESPONSE = _descriptor.Descriptor(
+    name='ReloadResponse',
+    full_name='ReloadResponse',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='message', full_name='ReloadResponse.message', index=0,
+            number=1, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=246,
+    serialized_end=279,
+)
+
+
+_HEALTHCHECKREQUEST = _descriptor.Descriptor(
+    name='HealthCheckRequest',
+    full_name='HealthCheckRequest',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='artifacts', full_name='HealthCheckRequest.artifacts', index=0,
+            number=2, type=9, cpp_type=9, label=1,
+            has_default_value=False, default_value=_b("").decode('utf-8'),
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=281,
+    serialized_end=320,
+)
+
+
+_HEALTHCHECKRESPONSE = _descriptor.Descriptor(
+    name='HealthCheckResponse',
+    full_name='HealthCheckResponse',
+    filename=None,
+    file=DESCRIPTOR,
+    containing_type=None,
+    fields=[
+        _descriptor.FieldDescriptor(
+            name='status', full_name='HealthCheckResponse.status', index=0,
+            number=1, type=14, cpp_type=8, label=1,
+            has_default_value=False, default_value=0,
+            message_type=None, enum_type=None, containing_type=None,
+            is_extension=False, extension_scope=None,
+            options=None),
+    ],
+    extensions=[
+    ],
+    nested_types=[],
+    enum_types=[
+        _HEALTHCHECKRESPONSE_STATUS,
+    ],
+    options=None,
+    is_extendable=False,
+    syntax='proto3',
+    extension_ranges=[],
+    oneofs=[
+    ],
+    serialized_start=322,
+    serialized_end=415,
+)
+
+_HEALTHCHECKRESPONSE.fields_by_name['status'].enum_type = _HEALTHCHECKRESPONSE_STATUS
+_HEALTHCHECKRESPONSE_STATUS.containing_type = _HEALTHCHECKRESPONSE
+DESCRIPTOR.message_types_by_name['OnlineActionRequest'] = _ONLINEACTIONREQUEST
+DESCRIPTOR.message_types_by_name['OnlineActionResponse'] = _ONLINEACTIONRESPONSE
+DESCRIPTOR.message_types_by_name['BatchActionRequest'] = _BATCHACTIONREQUEST
+DESCRIPTOR.message_types_by_name['BatchActionResponse'] = _BATCHACTIONRESPONSE
+DESCRIPTOR.message_types_by_name['ReloadRequest'] = _RELOADREQUEST
+DESCRIPTOR.message_types_by_name['ReloadResponse'] = _RELOADRESPONSE
+DESCRIPTOR.message_types_by_name['HealthCheckRequest'] = _HEALTHCHECKREQUEST
+DESCRIPTOR.message_types_by_name['HealthCheckResponse'] = _HEALTHCHECKRESPONSE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+OnlineActionRequest = _reflection.GeneratedProtocolMessageType('OnlineActionRequest', (_message.Message,), dict(
+    DESCRIPTOR=_ONLINEACTIONREQUEST,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:OnlineActionRequest)
+))
+_sym_db.RegisterMessage(OnlineActionRequest)
+
+OnlineActionResponse = _reflection.GeneratedProtocolMessageType('OnlineActionResponse', (_message.Message,), dict(
+    DESCRIPTOR=_ONLINEACTIONRESPONSE,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:OnlineActionResponse)
+))
+_sym_db.RegisterMessage(OnlineActionResponse)
+
+BatchActionRequest = _reflection.GeneratedProtocolMessageType('BatchActionRequest', (_message.Message,), dict(
+    DESCRIPTOR=_BATCHACTIONREQUEST,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:BatchActionRequest)
+))
+_sym_db.RegisterMessage(BatchActionRequest)
+
+BatchActionResponse = _reflection.GeneratedProtocolMessageType('BatchActionResponse', (_message.Message,), dict(
+    DESCRIPTOR=_BATCHACTIONRESPONSE,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:BatchActionResponse)
+))
+_sym_db.RegisterMessage(BatchActionResponse)
+
+ReloadRequest = _reflection.GeneratedProtocolMessageType('ReloadRequest', (_message.Message,), dict(
+    DESCRIPTOR=_RELOADREQUEST,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:ReloadRequest)
+))
+_sym_db.RegisterMessage(ReloadRequest)
+
+ReloadResponse = _reflection.GeneratedProtocolMessageType('ReloadResponse', (_message.Message,), dict(
+    DESCRIPTOR=_RELOADRESPONSE,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:ReloadResponse)
+))
+_sym_db.RegisterMessage(ReloadResponse)
+
+HealthCheckRequest = _reflection.GeneratedProtocolMessageType('HealthCheckRequest', (_message.Message,), dict(
+    DESCRIPTOR=_HEALTHCHECKREQUEST,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:HealthCheckRequest)
+))
+_sym_db.RegisterMessage(HealthCheckRequest)
+
+HealthCheckResponse = _reflection.GeneratedProtocolMessageType('HealthCheckResponse', (_message.Message,), dict(
+    DESCRIPTOR=_HEALTHCHECKRESPONSE,
+    __module__='actions_pb2'
+    # @@protoc_insertion_point(class_scope:HealthCheckResponse)
+))
+_sym_db.RegisterMessage(HealthCheckResponse)
+
+
+_ONLINEACTIONHANDLER = _descriptor.ServiceDescriptor(
+    name='OnlineActionHandler',
+    full_name='OnlineActionHandler',
+    file=DESCRIPTOR,
+    index=0,
+    options=None,
+    serialized_start=418,
+    serialized_end=620,
+    methods=[
+        _descriptor.MethodDescriptor(
+            name='_remote_execute',
+            full_name='OnlineActionHandler._remote_execute',
+            index=0,
+            containing_service=None,
+            input_type=_ONLINEACTIONREQUEST,
+            output_type=_ONLINEACTIONRESPONSE,
+            options=None,
+        ),
+        _descriptor.MethodDescriptor(
+            name='_remote_reload',
+            full_name='OnlineActionHandler._remote_reload',
+            index=1,
+            containing_service=None,
+            input_type=_RELOADREQUEST,
+            output_type=_RELOADRESPONSE,
+            options=None,
+        ),
+        _descriptor.MethodDescriptor(
+            name='_health_check',
+            full_name='OnlineActionHandler._health_check',
+            index=2,
+            containing_service=None,
+            input_type=_HEALTHCHECKREQUEST,
+            output_type=_HEALTHCHECKRESPONSE,
+            options=None,
+        ),
+    ])
+_sym_db.RegisterServiceDescriptor(_ONLINEACTIONHANDLER)
+
+DESCRIPTOR.services_by_name['OnlineActionHandler'] = _ONLINEACTIONHANDLER
+
+
+_BATCHACTIONHANDLER = _descriptor.ServiceDescriptor(
+    name='BatchActionHandler',
+    full_name='BatchActionHandler',
+    file=DESCRIPTOR,
+    index=1,
+    options=None,
+    serialized_start=623,
+    serialized_end=822,
+    methods=[
+        _descriptor.MethodDescriptor(
+            name='_remote_execute',
+            full_name='BatchActionHandler._remote_execute',
+            index=0,
+            containing_service=None,
+            input_type=_BATCHACTIONREQUEST,
+            output_type=_BATCHACTIONRESPONSE,
+            options=None,
+        ),
+        _descriptor.MethodDescriptor(
+            name='_remote_reload',
+            full_name='BatchActionHandler._remote_reload',
+            index=1,
+            containing_service=None,
+            input_type=_RELOADREQUEST,
+            output_type=_RELOADRESPONSE,
+            options=None,
+        ),
+        _descriptor.MethodDescriptor(
+            name='_health_check',
+            full_name='BatchActionHandler._health_check',
+            index=2,
+            containing_service=None,
+            input_type=_HEALTHCHECKREQUEST,
+            output_type=_HEALTHCHECKRESPONSE,
+            options=None,
+        ),
+    ])
+_sym_db.RegisterServiceDescriptor(_BATCHACTIONHANDLER)
+
+DESCRIPTOR.services_by_name['BatchActionHandler'] = _BATCHACTIONHANDLER
+
+try:
+    # THESE ELEMENTS WILL BE DEPRECATED.
+    # Please use the generated *_pb2_grpc.py files instead.
+    import grpc
+    from grpc.beta import implementations as beta_implementations
+    from grpc.beta import interfaces as beta_interfaces
+    from grpc.framework.common import cardinality
+    from grpc.framework.interfaces.face import utilities as face_utilities
+
+    class OnlineActionHandlerStub(object):
+        # missing associated documentation comment in .proto file
+        pass
+
+        def __init__(self, channel):
+            """Constructor.
+
+            Args:
+              channel: A grpc.Channel.
+            """
+            self._remote_execute = channel.unary_unary(
+                '/OnlineActionHandler/_remote_execute',
+                request_serializer=OnlineActionRequest.SerializeToString,
+                response_deserializer=OnlineActionResponse.FromString,
+            )
+            self._remote_reload = channel.unary_unary(
+                '/OnlineActionHandler/_remote_reload',
+                request_serializer=ReloadRequest.SerializeToString,
+                response_deserializer=ReloadResponse.FromString,
+            )
+            self._health_check = channel.unary_unary(
+                '/OnlineActionHandler/_health_check',
+                request_serializer=HealthCheckRequest.SerializeToString,
+                response_deserializer=HealthCheckResponse.FromString,
+            )
+
+    class OnlineActionHandlerServicer(object):
+        # missing associated documentation comment in .proto file
+        pass
+
+        def _remote_execute(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+            context.set_details('Method not implemented!')
+            raise NotImplementedError('Method not implemented!')
+
+        def _remote_reload(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+            context.set_details('Method not implemented!')
+            raise NotImplementedError('Method not implemented!')
+
+        def _health_check(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+            context.set_details('Method not implemented!')
+            raise NotImplementedError('Method not implemented!')
+
+    def add_OnlineActionHandlerServicer_to_server(servicer, server):
+        rpc_method_handlers = {
+            '_remote_execute': grpc.unary_unary_rpc_method_handler(
+                servicer._remote_execute,
+                request_deserializer=OnlineActionRequest.FromString,
+                response_serializer=OnlineActionResponse.SerializeToString,
+            ),
+            '_remote_reload': grpc.unary_unary_rpc_method_handler(
+                servicer._remote_reload,
+                request_deserializer=ReloadRequest.FromString,
+                response_serializer=ReloadResponse.SerializeToString,
+            ),
+            '_health_check': grpc.unary_unary_rpc_method_handler(
+                servicer._health_check,
+                request_deserializer=HealthCheckRequest.FromString,
+                response_serializer=HealthCheckResponse.SerializeToString,
+            ),
+        }
+        generic_handler = grpc.method_handlers_generic_handler(
+            'OnlineActionHandler', rpc_method_handlers)
+        server.add_generic_rpc_handlers((generic_handler,))
+
+    class BatchActionHandlerStub(object):
+        # missing associated documentation comment in .proto file
+        pass
+
+        def __init__(self, channel):
+            """Constructor.
+
+            Args:
+              channel: A grpc.Channel.
+            """
+            self._remote_execute = channel.unary_unary(
+                '/BatchActionHandler/_remote_execute',
+                request_serializer=BatchActionRequest.SerializeToString,
+                response_deserializer=BatchActionResponse.FromString,
+            )
+            self._remote_reload = channel.unary_unary(
+                '/BatchActionHandler/_remote_reload',
+                request_serializer=ReloadRequest.SerializeToString,
+                response_deserializer=ReloadResponse.FromString,
+            )
+            self._health_check = channel.unary_unary(
+                '/BatchActionHandler/_health_check',
+                request_serializer=HealthCheckRequest.SerializeToString,
+                response_deserializer=HealthCheckResponse.FromString,
+            )
+
+    class BatchActionHandlerServicer(object):
+        # missing associated documentation comment in .proto file
+        pass
+
+        def _remote_execute(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+            context.set_details('Method not implemented!')
+            raise NotImplementedError('Method not implemented!')
+
+        def _remote_reload(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+            context.set_details('Method not implemented!')
+            raise NotImplementedError('Method not implemented!')
+
+        def _health_check(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+            context.set_details('Method not implemented!')
+            raise NotImplementedError('Method not implemented!')
+
+    def add_BatchActionHandlerServicer_to_server(servicer, server):
+        rpc_method_handlers = {
+            '_remote_execute': grpc.unary_unary_rpc_method_handler(
+                servicer._remote_execute,
+                request_deserializer=BatchActionRequest.FromString,
+                response_serializer=BatchActionResponse.SerializeToString,
+            ),
+            '_remote_reload': grpc.unary_unary_rpc_method_handler(
+                servicer._remote_reload,
+                request_deserializer=ReloadRequest.FromString,
+                response_serializer=ReloadResponse.SerializeToString,
+            ),
+            '_health_check': grpc.unary_unary_rpc_method_handler(
+                servicer._health_check,
+                request_deserializer=HealthCheckRequest.FromString,
+                response_serializer=HealthCheckResponse.SerializeToString,
+            ),
+        }
+        generic_handler = grpc.method_handlers_generic_handler(
+            'BatchActionHandler', rpc_method_handlers)
+        server.add_generic_rpc_handlers((generic_handler,))
+
+    class BetaOnlineActionHandlerServicer(object):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This class was generated
+        only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+        # missing associated documentation comment in .proto file
+        pass
+
+        def _remote_execute(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+        def _remote_reload(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+        def _health_check(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+    class BetaOnlineActionHandlerStub(object):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This class was generated
+        only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+        # missing associated documentation comment in .proto file
+        pass
+
+        def _remote_execute(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+            # missing associated documentation comment in .proto file
+            pass
+            raise NotImplementedError()
+        _remote_execute.future = None
+
+        def _remote_reload(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+            # missing associated documentation comment in .proto file
+            pass
+            raise NotImplementedError()
+        _remote_reload.future = None
+
+        def _health_check(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+            # missing associated documentation comment in .proto file
+            pass
+            raise NotImplementedError()
+        _health_check.future = None
+
+    def beta_create_OnlineActionHandler_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This function was
+        generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+        request_deserializers = {
+            ('OnlineActionHandler', '_health_check'): HealthCheckRequest.FromString,
+            ('OnlineActionHandler', '_remote_execute'): OnlineActionRequest.FromString,
+            ('OnlineActionHandler', '_remote_reload'): ReloadRequest.FromString,
+        }
+        response_serializers = {
+            ('OnlineActionHandler', '_health_check'): HealthCheckResponse.SerializeToString,
+            ('OnlineActionHandler', '_remote_execute'): OnlineActionResponse.SerializeToString,
+            ('OnlineActionHandler', '_remote_reload'): ReloadResponse.SerializeToString,
+        }
+        method_implementations = {
+            ('OnlineActionHandler', '_health_check'): face_utilities.unary_unary_inline(servicer._health_check),
+            ('OnlineActionHandler', '_remote_execute'): face_utilities.unary_unary_inline(servicer._remote_execute),
+            ('OnlineActionHandler', '_remote_reload'): face_utilities.unary_unary_inline(servicer._remote_reload),
+        }
+        server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers,
+                                                             thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
+        return beta_implementations.server(method_implementations, options=server_options)
+
+    def beta_create_OnlineActionHandler_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This function was
+        generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+        request_serializers = {
+            ('OnlineActionHandler', '_health_check'): HealthCheckRequest.SerializeToString,
+            ('OnlineActionHandler', '_remote_execute'): OnlineActionRequest.SerializeToString,
+            ('OnlineActionHandler', '_remote_reload'): ReloadRequest.SerializeToString,
+        }
+        response_deserializers = {
+            ('OnlineActionHandler', '_health_check'): HealthCheckResponse.FromString,
+            ('OnlineActionHandler', '_remote_execute'): OnlineActionResponse.FromString,
+            ('OnlineActionHandler', '_remote_reload'): ReloadResponse.FromString,
+        }
+        cardinalities = {
+            '_health_check': cardinality.Cardinality.UNARY_UNARY,
+            '_remote_execute': cardinality.Cardinality.UNARY_UNARY,
+            '_remote_reload': cardinality.Cardinality.UNARY_UNARY,
+        }
+        stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers,
+                                                         response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
+        return beta_implementations.dynamic_stub(channel, 'OnlineActionHandler', cardinalities, options=stub_options)
+
+    class BetaBatchActionHandlerServicer(object):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This class was generated
+        only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+        # missing associated documentation comment in .proto file
+        pass
+
+        def _remote_execute(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+        def _remote_reload(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+        def _health_check(self, request, context):
+            # missing associated documentation comment in .proto file
+            pass
+            context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+    class BetaBatchActionHandlerStub(object):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This class was generated
+        only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+        # missing associated documentation comment in .proto file
+        pass
+
+        def _remote_execute(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+            # missing associated documentation comment in .proto file
+            pass
+            raise NotImplementedError()
+        _remote_execute.future = None
+
+        def _remote_reload(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+            # missing associated documentation comment in .proto file
+            pass
+            raise NotImplementedError()
+        _remote_reload.future = None
+
+        def _health_check(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+            # missing associated documentation comment in .proto file
+            pass
+            raise NotImplementedError()
+        _health_check.future = None
+
+    def beta_create_BatchActionHandler_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This function was
+        generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+        request_deserializers = {
+            ('BatchActionHandler', '_health_check'): HealthCheckRequest.FromString,
+            ('BatchActionHandler', '_remote_execute'): BatchActionRequest.FromString,
+            ('BatchActionHandler', '_remote_reload'): ReloadRequest.FromString,
+        }
+        response_serializers = {
+            ('BatchActionHandler', '_health_check'): HealthCheckResponse.SerializeToString,
+            ('BatchActionHandler', '_remote_execute'): BatchActionResponse.SerializeToString,
+            ('BatchActionHandler', '_remote_reload'): ReloadResponse.SerializeToString,
+        }
+        method_implementations = {
+            ('BatchActionHandler', '_health_check'): face_utilities.unary_unary_inline(servicer._health_check),
+            ('BatchActionHandler', '_remote_execute'): face_utilities.unary_unary_inline(servicer._remote_execute),
+            ('BatchActionHandler', '_remote_reload'): face_utilities.unary_unary_inline(servicer._remote_reload),
+        }
+        server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers,
+                                                             thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
+        return beta_implementations.server(method_implementations, options=server_options)
+
+    def beta_create_BatchActionHandler_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
+        """The Beta API is deprecated for 0.15.0 and later.
+
+        It is recommended to use the GA API (classes and functions in this
+        file not marked beta) for all further purposes. This function was
+        generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+        request_serializers = {
+            ('BatchActionHandler', '_health_check'): HealthCheckRequest.SerializeToString,
+            ('BatchActionHandler', '_remote_execute'): BatchActionRequest.SerializeToString,
+            ('BatchActionHandler', '_remote_reload'): ReloadRequest.SerializeToString,
+        }
+        response_deserializers = {
+            ('BatchActionHandler', '_health_check'): HealthCheckResponse.FromString,
+            ('BatchActionHandler', '_remote_execute'): BatchActionResponse.FromString,
+            ('BatchActionHandler', '_remote_reload'): ReloadResponse.FromString,
+        }
+        cardinalities = {
+            '_health_check': cardinality.Cardinality.UNARY_UNARY,
+            '_remote_execute': cardinality.Cardinality.UNARY_UNARY,
+            '_remote_reload': cardinality.Cardinality.UNARY_UNARY,
+        }
+        stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers,
+                                                         response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
+        return beta_implementations.dynamic_stub(channel, 'BatchActionHandler', cardinalities, options=stub_options)
+except ImportError:
+    pass
+# @@protoc_insertion_point(module_scope)
diff --git a/python-daemon/marvin_python_daemon/engine_base/stubs/actions_pb2_grpc.py b/python-daemon/marvin_python_daemon/engine_base/stubs/actions_pb2_grpc.py
new file mode 100644
index 0000000..ba256fc
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/engine_base/stubs/actions_pb2_grpc.py
@@ -0,0 +1,158 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
+
+# from .actions_pb2 import * /python3
+# import actions_pb2 as actions__pb2 /python2
+from ..stubs import actions_pb2 as actions__pb2
+
+
+class OnlineActionHandlerStub(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+          channel: A grpc.Channel.
+        """
+        self._remote_execute = channel.unary_unary(
+            '/OnlineActionHandler/_remote_execute',
+            request_serializer=actions__pb2.OnlineActionRequest.SerializeToString,
+            response_deserializer=actions__pb2.OnlineActionResponse.FromString,
+        )
+        self._remote_reload = channel.unary_unary(
+            '/OnlineActionHandler/_remote_reload',
+            request_serializer=actions__pb2.ReloadRequest.SerializeToString,
+            response_deserializer=actions__pb2.ReloadResponse.FromString,
+        )
+        self._health_check = channel.unary_unary(
+            '/OnlineActionHandler/_health_check',
+            request_serializer=actions__pb2.HealthCheckRequest.SerializeToString,
+            response_deserializer=actions__pb2.HealthCheckResponse.FromString,
+        )
+
+
+class OnlineActionHandlerServicer(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def _remote_execute(self, request, context):
+        # missing associated documentation comment in .proto file
+        pass
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def _remote_reload(self, request, context):
+        # missing associated documentation comment in .proto file
+        pass
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def _health_check(self, request, context):
+        # missing associated documentation comment in .proto file
+        pass
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_OnlineActionHandlerServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+        '_remote_execute': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_execute,
+            request_deserializer=actions__pb2.OnlineActionRequest.FromString,
+            response_serializer=actions__pb2.OnlineActionResponse.SerializeToString,
+        ),
+        '_remote_reload': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_reload,
+            request_deserializer=actions__pb2.ReloadRequest.FromString,
+            response_serializer=actions__pb2.ReloadResponse.SerializeToString,
+        ),
+        '_health_check': grpc.unary_unary_rpc_method_handler(
+            servicer._health_check,
+            request_deserializer=actions__pb2.HealthCheckRequest.FromString,
+            response_serializer=actions__pb2.HealthCheckResponse.SerializeToString,
+        ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+        'OnlineActionHandler', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+class BatchActionHandlerStub(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def __init__(self, channel):
+        """Constructor.
+
+        Args:
+          channel: A grpc.Channel.
+        """
+        self._remote_execute = channel.unary_unary(
+            '/BatchActionHandler/_remote_execute',
+            request_serializer=actions__pb2.BatchActionRequest.SerializeToString,
+            response_deserializer=actions__pb2.BatchActionResponse.FromString,
+        )
+        self._remote_reload = channel.unary_unary(
+            '/BatchActionHandler/_remote_reload',
+            request_serializer=actions__pb2.ReloadRequest.SerializeToString,
+            response_deserializer=actions__pb2.ReloadResponse.FromString,
+        )
+        self._health_check = channel.unary_unary(
+            '/BatchActionHandler/_health_check',
+            request_serializer=actions__pb2.HealthCheckRequest.SerializeToString,
+            response_deserializer=actions__pb2.HealthCheckResponse.FromString,
+        )
+
+
+class BatchActionHandlerServicer(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def _remote_execute(self, request, context):
+        # missing associated documentation comment in .proto file
+        pass
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def _remote_reload(self, request, context):
+        # missing associated documentation comment in .proto file
+        pass
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+    def _health_check(self, request, context):
+        # missing associated documentation comment in .proto file
+        pass
+        context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+        context.set_details('Method not implemented!')
+        raise NotImplementedError('Method not implemented!')
+
+
+def add_BatchActionHandlerServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+        '_remote_execute': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_execute,
+            request_deserializer=actions__pb2.BatchActionRequest.FromString,
+            response_serializer=actions__pb2.BatchActionResponse.SerializeToString,
+        ),
+        '_remote_reload': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_reload,
+            request_deserializer=actions__pb2.ReloadRequest.FromString,
+            response_serializer=actions__pb2.ReloadResponse.SerializeToString,
+        ),
+        '_health_check': grpc.unary_unary_rpc_method_handler(
+            servicer._health_check,
+            request_deserializer=actions__pb2.HealthCheckRequest.FromString,
+            response_serializer=actions__pb2.HealthCheckResponse.SerializeToString,
+        ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+        'BatchActionHandler', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
diff --git a/python-toolbox/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py b/python-daemon/marvin_python_daemon/extras/notebook_extensions/jupyter_notebook_config.py
similarity index 80%
rename from python-toolbox/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py
rename to python-daemon/marvin_python_daemon/extras/notebook_extensions/jupyter_notebook_config.py
index 5b44b9a..ee97aba 100644
--- a/python-toolbox/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py
+++ b/python-daemon/marvin_python_daemon/extras/notebook_extensions/jupyter_notebook_config.py
@@ -21,7 +21,7 @@
     import autopep8
     import inspect
     import re
-    from marvin_python_toolbox.common.config import Config
+    from ..common.config import Config
 
     print("Executing the marvin export hook script...")
 
@@ -39,8 +39,10 @@
         'marvin_metrics': re.compile(r"(\bmarvin_metrics\b)")
     }
 
-    batch_exec_pattern = re.compile("(def\s+execute\s*\(\s*self\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)")
-    online_exec_pattern = re.compile("(def\s+execute\s*\(\s*self\s*,\s*input_message\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)")
+    batch_exec_pattern = re.compile(
+        "(def\s+execute\s*\(\s*self\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)")
+    online_exec_pattern = re.compile(
+        "(def\s+execute\s*\(\s*self\s*,\s*input_message\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)")
 
     CLAZZES = {
         "acquisitor": "AcquisitorAndCleaner",
@@ -55,10 +57,12 @@
     for cell in cells:
         if cell['cell_type'] == 'code' and cell["metadata"].get("marvin_cell", False):
             source = cell["source"]
-            new_source = autopep8.fix_code(source, options={'max_line_length': 160})
+            new_source = autopep8.fix_code(
+                source, options={'max_line_length': 160})
 
             marvin_action = cell["metadata"]["marvin_cell"]
-            marvin_action_clazz = getattr(__import__(Config.get("package")), CLAZZES[marvin_action])
+            marvin_action_clazz = getattr(__import__(
+                Config.get("package")), CLAZZES[marvin_action])
             source_path = inspect.getsourcefile(marvin_action_clazz)
 
             fnew_source_lines = []
@@ -67,7 +71,8 @@
 
                 if not new_line.startswith("import") and not new_line.startswith("from") and not new_line.startswith("print"):
                     for artifact in artifacts.keys():
-                        fnew_line = re.sub(artifacts[artifact], 'self.' + artifact, fnew_line)
+                        fnew_line = re.sub(
+                            artifacts[artifact], 'self.' + artifact, fnew_line)
 
                 fnew_source_lines.append(fnew_line)
 
@@ -80,7 +85,8 @@
                 exec_pattern = online_exec_pattern
 
             elif marvin_action == "feedback":
-                fnew_source_lines.append("        return \"Thanks for the feedback!\"\n")
+                fnew_source_lines.append(
+                    "        return \"Thanks for the feedback!\"\n")
                 exec_pattern = online_exec_pattern
 
             else:
@@ -101,7 +107,7 @@
                     else:
                         fp.write(line)
 
-            print ("File {} updated!".format(source_path))
+            print("File {} updated!".format(source_path))
 
     print("Finished the marvin export hook script...")
 
diff --git a/python-toolbox/marvin_python_toolbox/extras/notebook_extensions/main.js b/python-daemon/marvin_python_daemon/extras/notebook_extensions/main.js
similarity index 100%
rename from python-toolbox/marvin_python_toolbox/extras/notebook_extensions/main.js
rename to python-daemon/marvin_python_daemon/extras/notebook_extensions/main.js
diff --git a/python-toolbox/marvin_python_toolbox/common/__init__.py b/python-daemon/marvin_python_daemon/management/__init__.py
similarity index 91%
copy from python-toolbox/marvin_python_toolbox/common/__init__.py
copy to python-daemon/marvin_python_daemon/management/__init__.py
index efb0a92..305068e 100644
--- a/python-toolbox/marvin_python_toolbox/common/__init__.py
+++ b/python-daemon/marvin_python_daemon/management/__init__.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,3 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-
diff --git a/python-daemon/marvin_python_daemon/management/engine.py b/python-daemon/marvin_python_daemon/management/engine.py
new file mode 100644
index 0000000..e12c452
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/management/engine.py
@@ -0,0 +1,272 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import json
+import os
+import sys
+import time
+import os.path
+import subprocess
+import multiprocessing
+from ..common.profiling import profiling
+from ..common.data import MarvinData
+from ..common.log import get_logger
+from ..common.config import Config, load_conf_from_file
+
+logger = get_logger('management.engine')
+
+CLAZZES = {
+    "acquisitor": "AcquisitorAndCleaner",
+    "tpreparator": "TrainingPreparator",
+    "trainer": "Trainer",
+    "evaluator": "MetricsEvaluator",
+    "ppreparator": "PredictionPreparator",
+    "predictor": "Predictor",
+    "feedback": "Feedback"
+}
+
+ARTIFACTS = {
+    "AcquisitorAndCleaner": [],
+    "TrainingPreparator": ["initialdataset"],
+    "Trainer": ["dataset"],
+    "MetricsEvaluator": ["model"],
+    "PredictionPreparator": ["model", "metrics"],
+    "Predictor": ["model", "metrics"],
+    "Feedback": []
+}
+
+def dryrun(config, action, profiling):
+
+    # setting spark configuration directory
+    os.environ["SPARK_CONF_DIR"] = os.path.join(
+        os.environ["SPARK_HOME"], "conf")
+    os.environ["YARN_CONF_DIR"] = os.environ["SPARK_CONF_DIR"]
+
+    params = read_file('engine.params')
+    messages_file = read_file('engine.messages')
+    feedback_file = read_file('feedback.messages')
+
+    if action == 'all':
+        pipeline = ['acquisitor', 'tpreparator', 'trainer',
+                    'evaluator', 'ppreparator', 'predictor', 'feedback']
+    else:
+        pipeline = [action]
+
+    _dryrun = MarvinDryRun(config=config, messages=[
+                           messages_file, feedback_file])
+
+    initial_start_time = time.time()
+
+    for step in pipeline:
+        _dryrun.execute(clazz=CLAZZES[step],
+                        params=params, profiling_enabled=profiling)
+
+    logger.info("Total Time : {:.2f}s".format(
+        time.time() - initial_start_time))
+
+
+class MarvinDryRun(object):
+    def __init__(self, config, messages):
+        self.predictor_messages = messages[0]
+        self.feedback_messages = messages[1]
+        self.pmessages = []
+        self.package_name = config['marvin_package']
+        self.kwargs = None
+
+    def execute(self, clazz, params, profiling_enabled=False):
+        self.print_start_step(clazz)
+
+        _Step = dynamic_import("{}.{}".format(self.package_name, clazz))
+
+        if not self.kwargs:
+            self.kwargs = generate_kwargs(self.package_name, _Step, params)
+
+        step = _Step(**self.kwargs)
+
+        def call_online_actions(step, msg, msg_idx):
+            if profiling_enabled:
+                with profiling(output_path=".profiling", uid=clazz) as prof:
+                    result = step.execute(input_message=msg, params=params)
+
+                prof.disable
+                logger.info(
+                    "\nProfile images created in {}\n".format(prof.image_path))
+
+            else:
+                result = step.execute(input_message=msg, params=params)
+
+            return result
+
+        if clazz == 'PredictionPreparator':
+            for idx, msg in enumerate(self.predictor_messages):
+                self.pmessages.append(call_online_actions(step, msg, idx))
+
+        elif clazz == 'Feedback':
+            for idx, msg in enumerate(self.feedback_messages):
+                self.pmessages.append(call_online_actions(step, msg, idx))
+
+        elif clazz == 'Predictor':
+
+            self.execute("PredictionPreparator", params)
+
+            self.pmessages = self.messages if not self.pmessages else self.pmessages
+
+            for idx, msg in enumerate(self.pmessages):
+                call_online_actions(step, msg, idx)
+
+        else:
+            if profiling_enabled:
+                with profiling(output_path=".profiling", uid=clazz) as prof:
+                    step.execute(params=params)
+
+                prof.disable
+
+                logger.info(
+                    "\nProfile images created in {}\n".format(prof.image_path))
+
+            else:
+                step.execute(params=params)
+
+        self.print_finish_step()
+
+    def print_finish_step(self):
+        logger.info("STEP TAKES {:.4f} (seconds) ".format(
+            (time.time() - self.start_time)))
+
+    def print_start_step(self, name):
+        logger.info("MARVIN DRYRUN - STEP [{}]".format(name))
+        self.start_time = time.time()
+
+
+def dynamic_import(clazz):
+    components = clazz.split('.')
+    mod = __import__(components[0])
+    for comp in components[1:]:
+        mod = getattr(mod, comp)
+    return mod
+
+
+def read_file(filename):
+    fname = os.path.join("", filename)
+    if os.path.exists(fname):
+
+        logger.info("Engine file {} loaded!".format(filename))
+
+        with open(fname, 'r') as fp:
+            return json.load(fp)
+    else:
+        logger.info("Engine file {} doesn't exists...".format(filename))
+        return {}
+
+
+def generate_kwargs(package_name, clazz, params=None, initial_dataset='initialdataset', dataset='dataset', model='model', metrics='metrics'):
+    kwargs = {}
+
+    kwargs["persistence_mode"] = 'local'
+    kwargs["default_root_path"] = os.path.join(
+        os.getenv('MARVIN_DATA_PATH'), '.artifacts')
+    kwargs["is_remote_calling"] = True
+
+    _artifact_folder = package_name.replace(
+            'marvin_', '').replace('_engine', '')
+    _artifacts_to_load = ARTIFACTS[clazz.__name__]
+
+    if params:
+        kwargs["params"] = params
+    if dataset in _artifacts_to_load:
+        kwargs["dataset"] = clazz.retrieve_obj(os.path.join(kwargs["default_root_path"],
+                                                _artifact_folder, dataset))
+    if initial_dataset in _artifacts_to_load:
+        kwargs["initial_dataset"] = clazz.retrieve_obj(os.path.join(kwargs["default_root_path"],
+                                                _artifact_folder, initial_dataset))
+    if model in _artifacts_to_load:
+        kwargs["model"] = clazz.retrieve_obj(os.path.join(kwargs["default_root_path"],
+                                                _artifact_folder, model))
+    if metrics in _artifacts_to_load:
+        kwargs["metrics"] = clazz.retrieve_obj(os.path.join(kwargs["default_root_path"],
+                                                _artifact_folder, metrics))
+
+    return kwargs
+
+
+class MarvinEngineServer(object):
+    @classmethod
+    def create(self, config, action, port, workers, rpc_workers, params, pipeline):
+        package_name = config['marvin_package']
+
+        def create_object(act):
+            clazz = CLAZZES[act]
+            _Action = dynamic_import("{}.{}".format(package_name, clazz))
+            kwargs = generate_kwargs(package_name, _Action, params)
+            return _Action(**kwargs)
+
+        root_obj = create_object(action)
+        previous_object = root_obj
+
+        if pipeline:
+            for step in list(reversed(pipeline)):
+                previous_object._previous_step = create_object(step)
+                previous_object = previous_object._previous_step
+
+        server = root_obj._prepare_remote_server(
+            port=port, workers=workers, rpc_workers=rpc_workers)
+
+        logger.info(
+            "Starting GRPC server [{}] for {} Action".format(port, action))
+        server.start()
+
+        return server
+
+
+def engine_server(config, action, max_workers, max_rpc_workers):
+
+    logger.info("Starting server ...")
+
+    # setting spark configuration directory
+    os.environ["SPARK_CONF_DIR"] = os.path.join(
+        os.environ["SPARK_HOME"], "conf")
+    os.environ["YARN_CONF_DIR"] = os.environ["SPARK_CONF_DIR"]
+
+    params = read_file('engine.params')
+    metadata = read_file('engine.metadata')
+
+    default_actions = {action['name']
+        : action for action in metadata['actions']}
+
+    if action == 'all':
+        action = default_actions
+    else:
+        action = {action: default_actions[action]}
+
+    servers = []
+    for action_name in action.keys():
+        # initializing server configuration
+        engine_server = MarvinEngineServer.create(
+            config=config,
+            action=action_name,
+            port=action[action_name]["port"],
+            workers=max_workers,
+            rpc_workers=max_rpc_workers,
+            params=params,
+            pipeline=action[action_name]["pipeline"]
+        )
+
+        servers.append(engine_server)
+
+    return servers
diff --git a/python-daemon/marvin_python_daemon/management/notebook.py b/python-daemon/marvin_python_daemon/management/notebook.py
new file mode 100644
index 0000000..73f9684
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/management/notebook.py
@@ -0,0 +1,65 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+import subprocess
+import sys
+
+from ..common.log import get_logger
+
+logger = get_logger('management.notebook')
+
+
+def notebook(config, enable_security, port):
+    notebookdir = os.path.join(config['base_path'], 'notebooks')
+    command = [
+        "SPARK_CONF_DIR={0} YARN_CONF_DIR={0}".format(
+            os.path.join(os.environ["SPARK_HOME"], "conf")),
+        'jupyter', 'notebook',
+        '--notebook-dir', notebookdir,
+        '--ip', '0.0.0.0',
+        '--port', port,
+        '--no-browser',
+        '--config', os.path.join(os.environ["MARVIN_DAEMON_PATH"],
+                                 'extras', 'notebook_extensions', 'jupyter_notebook_config.py')
+    ]
+
+    command.append("--NotebookApp.token=") if not enable_security else None
+    command.append("--allow-root")
+
+    return_code = os.system(' '.join(command))
+    logger.info("Notebook call returned {0}".format(str(return_code)))
+
+
+def lab(config, enable_security, port):
+    notebookdir = os.path.join(config['base_path'], 'notebooks')
+    command = [
+        "SPARK_CONF_DIR={0} YARN_CONF_DIR={0}".format(
+            os.path.join(os.environ["SPARK_HOME"], "conf")),
+        'jupyter-lab',
+        '--notebook-dir', notebookdir,
+        '--ip', '0.0.0.0',
+        '--port', port,
+        '--no-browser',
+    ]
+
+    command.append("--NotebookApp.token=") if not enable_security else None
+
+    return_code = os.system(' '.join(command))
+    logger.info("Lab call returned {0}".format(str(return_code)))
diff --git a/python-daemon/marvin_python_daemon/management/test.py b/python-daemon/marvin_python_daemon/management/test.py
new file mode 100644
index 0000000..a4c6f18
--- /dev/null
+++ b/python-daemon/marvin_python_daemon/management/test.py
@@ -0,0 +1,129 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import sys
+import os
+import os.path
+import subprocess
+import shutil
+import tempfile
+import errno
+
+from ..common.log import get_logger
+
+logger = get_logger('management.test')
+
+
+def _copy(src, dest, ignore=('.git', '.pyc', '__pycache__')):
+    try:
+        shutil.copytree(src, dest, ignore=shutil.ignore_patterns(*ignore))
+    except OSError as e:
+        if e.errno == errno.ENOTDIR:
+            shutil.copy(src, dest)
+        else:
+            print('Directory not copied. Error: %s' % e)
+
+
+def test(config, cov, no_capture, pdb, args):
+    os.environ['TESTING'] = 'true'
+
+    if args:
+        args = args.split(' ')
+    else:
+        args = ['tests']
+
+    if no_capture:
+        args += ['--capture=no']
+
+    if pdb:
+        args += ['--pdb']
+
+    cov_args = []
+    if cov:
+        cov_args += ['--cov', os.path.relpath(config['marvin_package'],
+                                              start=config['base_path']),
+                     '--cov-report', 'html',
+                     '--cov-report', 'xml',
+                     '--cov-report', 'term-missing',
+                     ]
+
+    command = ['py.test'] + cov_args + args
+    print(' '.join(command))
+    env = os.environ.copy()
+    subprocess.call(command, cwd=config['base_path'], env=env)
+
+
+def tox(config, args):
+    os.environ['TESTING'] = 'true'
+
+    if args:
+        args = ['-a'] + args.split(' ')
+    else:
+        args = []
+    # Copy the project to a tmp dir
+    tmp_dir = tempfile.mkdtemp()
+    tox_dir = os.path.join(tmp_dir, config['marvin_package'])
+    _copy(config['base_path'], tox_dir)
+    command = ['python', 'setup.py', 'test'] + args
+    env = os.environ.copy()
+    subprocess.call(command, cwd=tox_dir, env=env)
+    shutil.rmtree(tmp_dir)
+
+
+def tdd(config, cov, no_capture, pdb, partial, args):
+    os.environ['TESTING'] = 'true'
+
+    if args:
+        args = args.split(' ')
+    else:
+        args = [os.path.relpath(
+            os.path.join(config['base_path'], 'tests'))]
+
+    if no_capture:
+        args += ['--capture=no']
+
+    if pdb:
+        args += ['--pdb']
+
+    if partial:
+        args += ['--testmon']
+
+    cov_args = []
+    if cov:
+        cov_args += ['--cov', os.path.relpath(config['marvin_package'],
+                                              start=config['base_path']),
+                     '--cov-report', 'html',
+                     '--cov-report', 'xml',
+                     '--cov-report', 'term-missing',
+                     ]
+
+    command = ['ptw', '-p', '--'] + cov_args + args
+    print(' '.join(command))
+    env = os.environ.copy()
+    ptw_process = subprocess.Popen(command, cwd=config['base_path'], env=env)
+    return ptw_process
+
+
+def pep8(config):
+    command = ['pep8', config['marvin_package']]
+    exitcode = subprocess.call(command, cwd=config['base_path'])
+    if exitcode == 0:
+        logger.info('Congratulations! Everything looks in PEP8 standard.')
+    else:
+        logger.info('Error in PEP8 call')
diff --git a/python-daemon/pytest.ini b/python-daemon/pytest.ini
new file mode 100644
index 0000000..e3f1f0e
--- /dev/null
+++ b/python-daemon/pytest.ini
@@ -0,0 +1,4 @@
+[pytest]
+minversion    = 2.0
+norecursedirs = .git .tox .eggs .cache *.egg build dist tmp* marvin_python_daemon/management
+python_files  = test*.py
\ No newline at end of file
diff --git a/python-toolbox/setup.py b/python-daemon/setup.py
similarity index 63%
rename from python-toolbox/setup.py
rename to python-daemon/setup.py
index 305c510..02f4f5d 100644
--- a/python-toolbox/setup.py
+++ b/python-daemon/setup.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -24,20 +24,20 @@
 from setuptools.command.test import test as TestCommand
 
 # Package basic info
-PACKAGE_NAME = 'marvin_python_toolbox'
-PACKAGE_DESCRIPTION = 'Apache Marvin Python Toolbox'
+PACKAGE_NAME = 'marvin_python_daemon'
+PACKAGE_DESCRIPTION = 'Marvin Python Daemon'
 
-URL = 'https://github.com/apache/incubator-marvin'
+URL = ''
 
-AUTHOR_NAME = 'Daniel Takabayashi'
-AUTHOR_EMAIL = 'dev@marvin.apache.org'
+AUTHOR_NAME = 'Lucas Cardoso'
+AUTHOR_EMAIL = ''
 
-PYTHON_2 = True
+PYTHON_2 = False
 PYTHON_3 = True
 
 # Project status
 # (should be 'planning', 'pre-alpha', 'alpha', 'beta', 'stable', 'mature' or 'inactive').
-STATUS = 'stable'
+STATUS = 'planning'
 
 # Project topic
 # See https://pypi.python.org/pypi?%3Aaction=list_classifiers for a list
@@ -46,62 +46,41 @@
 # External dependencies
 # More info https://pythonhosted.org/setuptools/setuptools.html#declaring-dependencies
 REQUIREMENTS_EXTERNAL = [
-    'six>=1.14.0',
-    'bumpversion>=0.5.3',
-    'click>=3.3',
-    'jupyter>=1.0.0',
-    'jupyterlab>=0.32.1',
-    'pep8>=1.7.0',
-    'virtualenv>=15.0.1',
     'jsonschema>=2.5.1',
     'python-slugify>=0.1.0',
-    'paramiko>=2.1.2',
-    'PyHive>=0.3.0',
-    'thrift>=0.10.0',
-    'thrift-sasl==0.3.0',
-    'virtualenvwrapper>=4.7.1',
     'requests>=2.19.1',
     'python-dateutil>=2.7.3',
-    'python-slugify>=0.1.0',
     'path.py>=7.2',
     'httpretty>=0.9.5',
-    'tornado>=4.5.3',
     'jsonschema>=2.5.1',
     'gprof2dot',
     'ujsonpath>=0.0.2',
     'simplejson>=3.10.0',
     'configobj>=5.0.6',
     'findspark>=1.1.0',
+    'progressbar2>=3.34.3',
+    'urllib3==1.21.1',
+    'unidecode==1.0.23',
+    'configparser',
+    'jupyter>=1.0.0',
+    'jupyterlab>=0.32.1',
+    'pep8>=1.7.0',
+    'thrift>=0.10.0',
+    'thrift-sasl==0.3.0',
+    'python-slugify>=0.1.0',
     'grpcio>=1.13.0',
     'grpcio-tools>=1.13.0',
     'joblib>=0.11',
     'autopep8>=1.3.3',
-    'progressbar2>=3.34.3',
-    'urllib3==1.21.1',
-    'unidecode==1.0.23',
     'idna>=2.5',
     'bleach>=1.5.0',
-    'numpy>=1.16.2',
-    'google-cloud-bigquery>=1.21',
-    'pandas>=0.24'
+    'pyspark',
 ]
 
-# Test dependencies
-REQUIREMENTS_TESTS = [
-    'tox>=2.2.0',
-    'mock>=2.0.0',
-    'pytest>=2.9.2',
-    'pytest-cov>=1.8.1,<2.6',
-    'pytest-watch>=4.1.0',
-    'pytest-testmon==0.8.2',
-    'Keras>=2.2.0',
-    'tensorflow==2.0',
-]
 # This is normally an empty list
 DEPENDENCY_LINKS_EXTERNAL = []
-
 # script to be used
-SCRIPTS = ['bin/marvin', 'marvin.ini']
+SCRIPTS = ['bin/marvin-daemon']
 
 
 def _get_version():
@@ -111,33 +90,6 @@
     return version
 
 
-class Tox(TestCommand):
-    """Run the test cases using TOX command."""
-    user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
-
-    def initialize_options(self):
-        TestCommand.initialize_options(self)
-        self.tox_args = None
-
-    def finalize_options(self):
-        TestCommand.finalize_options(self)
-        self.test_args = []
-        self.test_suite = True
-
-    def run_tests(self):
-        # Import here, cause outside the eggs aren't loaded
-        import tox
-        import shlex
-        args = self.tox_args
-        if args:
-            args = shlex.split(self.tox_args)
-        else:
-            # Run all tests by default
-            args = ['-c', os.path.join(os.path.dirname(__file__), 'tox.ini'), 'tests']
-        errno = tox.cmdline(args=args)
-        sys.exit(errno)
-
-
 DEVELOPMENT_STATUS = {
     'planning': '1 - Planning',
     'pre-alpha': '2 - Pre-Alpha',
@@ -165,7 +117,8 @@
     version=_get_version(),
     url=URL,
     description=PACKAGE_DESCRIPTION,
-    long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
+    long_description=open(os.path.join(
+        os.path.dirname(__file__), 'README.md')).read(),
     author=AUTHOR_NAME,
     maintainer=AUTHOR_NAME,
     maintainer_email=AUTHOR_EMAIL,
@@ -174,11 +127,6 @@
     zip_safe=False,
     classifiers=CLASSIFIERS,
     install_requires=REQUIREMENTS_EXTERNAL,
-    tests_require=REQUIREMENTS_TESTS,
-    extras_require={
-        'testing': REQUIREMENTS_TESTS,
-    },
     dependency_links=DEPENDENCY_LINKS_EXTERNAL,
     scripts=SCRIPTS,
-    cmdclass={'test': Tox},
 )
diff --git a/python-toolbox/tests/common/test_config.py b/python-daemon/tests/common/test_config.py
similarity index 68%
rename from python-toolbox/tests/common/test_config.py
rename to python-daemon/tests/common/test_config.py
index 5baf001..2d0e01d 100644
--- a/python-toolbox/tests/common/test_config.py
+++ b/python-daemon/tests/common/test_config.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -23,15 +23,15 @@
 except ImportError:
     import unittest.mock as mock
 
-from marvin_python_toolbox.common.config import Config, load_conf_from_file
-from marvin_python_toolbox.common.exceptions import InvalidConfigException
+from marvin_python_daemon.common.config import Config, load_conf_from_file
+from marvin_python_daemon.common.exceptions import InvalidConfigException
 
 
 class TestConfig:
     def teardown_method(self, test_method):
         Config.reset()
 
-    @mock.patch('marvin_python_toolbox.common.config.ConfigObj')
+    @mock.patch('marvin_python_daemon.common.config.ConfigObj')
     def test_load_conf_from_file(self, ConfigParserMocked):
         filepath = '/path/to/config/file.ini'
 
@@ -39,8 +39,8 @@
 
         ConfigParserMocked.assert_called_once_with(filepath)
 
-    @mock.patch('marvin_python_toolbox.common.config.ConfigObj')
-    @mock.patch('marvin_python_toolbox.common.config.os.getenv')
+    @mock.patch('marvin_python_daemon.common.config.ConfigObj')
+    @mock.patch('marvin_python_daemon.common.config.os.getenv')
     def test_load_conf_from_env(self, getenv_mocked, ConfigParserMocked):
         filepath = '/path/to/config/file.ini'
 
@@ -49,61 +49,65 @@
 
         ConfigParserMocked.assert_called_once_with(filepath)
 
-    @mock.patch('marvin_python_toolbox.common.config.ConfigObj')
+    @mock.patch('marvin_python_daemon.common.config.ConfigObj')
     def test_load_conf_from_default_path(self, ConfigParserMocked):
+        os.environ["DEFAULT_CONFIG_PATH"] = '/path/to/config'
         load_conf_from_file()
 
-        ConfigParserMocked.assert_called_once_with(os.environ['DEFAULT_CONFIG_PATH'])
+        ConfigParserMocked.assert_called_once_with(
+            os.environ['DEFAULT_CONFIG_PATH'])
 
-    @mock.patch('marvin_python_toolbox.common.config.logger')
-    @mock.patch('marvin_python_toolbox.common.config.ConfigObj.__getitem__')
+    @mock.patch('marvin_python_daemon.common.config.logger')
+    @mock.patch('marvin_python_daemon.common.config.ConfigObj.__getitem__')
     def test_load_conf_from_default_path_with_invalid_section(self, ConfigParserGetItemMocked, logger_mocked):
         from configparser import NoSectionError
 
         filepath = '/path/to/config/file.ini'
 
         ConfigParserGetItemMocked.side_effect = NoSectionError('')
-        assert len(load_conf_from_file(filepath, section='invalidsection')) == 0
+        assert len(load_conf_from_file(
+            filepath, section='invalidsection')) == 0
         logger_mocked.warn.assert_called_once_with(
             "Couldn't find \"invalidsection\" section in \"/path/to/config/file.ini\""
         )
 
-    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    @mock.patch('marvin_python_daemon.common.config.load_conf_from_file')
     def test_get(self, load_conf_from_file_mocked, config_fixture):
         load_conf_from_file_mocked.return_value = config_fixture
         assert Config.get('key') == config_fixture['key']
 
-    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    @mock.patch('marvin_python_daemon.common.config.load_conf_from_file')
     def test_get_invalid_key(self, load_conf_from_file_mocked, config_fixture):
         load_conf_from_file_mocked.return_value = config_fixture
         assert 'invalidkey' not in config_fixture
         with pytest.raises(InvalidConfigException):
             Config.get('invalidkey')
 
-    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    @mock.patch('marvin_python_daemon.common.config.load_conf_from_file')
     def test_get_invalid_key_with_default(self, load_conf_from_file_mocked, config_fixture):
         load_conf_from_file_mocked.return_value = config_fixture
         assert 'invalidkey' not in config_fixture
-        assert Config.get('invalidkey', default='default_value') == 'default_value'
+        assert Config.get(
+            'invalidkey', default='default_value') == 'default_value'
 
-    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    @mock.patch('marvin_python_daemon.common.config.load_conf_from_file')
     def test_get_with_invalid_section(self, load_conf_from_file_mocked, config_fixture):
         load_conf_from_file_mocked.return_value = {}
         with pytest.raises(InvalidConfigException):
             Config.get('key', section='invalidsection')
 
-    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    @mock.patch('marvin_python_daemon.common.config.load_conf_from_file')
     def test_keys_alread_loaded(self, load_conf_from_file_mocked, config_fixture):
         load_conf_from_file_mocked.return_value = config_fixture
         Config._load()
         assert Config.keys() == config_fixture.keys()
 
-    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    @mock.patch('marvin_python_daemon.common.config.load_conf_from_file')
     def test_keys(self, load_conf_from_file_mocked, config_fixture):
         load_conf_from_file_mocked.return_value = config_fixture
         assert Config.keys() == config_fixture.keys()
 
-    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    @mock.patch('marvin_python_daemon.common.config.load_conf_from_file')
     def test_keys_with_invalid_section(self, load_conf_from_file_mocked):
         load_conf_from_file_mocked.return_value = {}
         assert not Config.keys(section='invalidsection')
@@ -112,6 +116,8 @@
     def test_read_with_real_file(self, env_read):
         env_read.return_value = 'tests/fixtures/config.sample'
         assert Config.get('models.default_context_name') == 'pdl'
-        assert Config.get('models.default_context_name', section='section') == 'pdl2'
+        assert Config.get('models.default_context_name',
+                          section='section') == 'pdl2'
         assert Config.get('models.default_type_name') == 'pdl'
-        assert Config.get('models.default_type_name') == Config.get('models.default_type_name', section='section')
+        assert Config.get('models.default_type_name') == Config.get(
+            'models.default_type_name', section='section')
diff --git a/python-toolbox/tests/common/test_data.py b/python-daemon/tests/common/test_data.py
similarity index 72%
rename from python-toolbox/tests/common/test_data.py
rename to python-daemon/tests/common/test_data.py
index 99b34ef..5cfbbb9 100644
--- a/python-toolbox/tests/common/test_data.py
+++ b/python-daemon/tests/common/test_data.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -22,8 +22,8 @@
 except ImportError:
     import unittest.mock as mock
 
-from marvin_python_toolbox.common.data import MarvinData
-from marvin_python_toolbox.common.exceptions import InvalidConfigException
+from marvin_python_daemon.common.data import MarvinData
+from marvin_python_daemon.common.exceptions import InvalidConfigException
 from io import IOBase
 
 
@@ -39,26 +39,26 @@
 
 def test_read_from_env(data_path_key, data_path):
     os.environ[data_path_key] = data_path
-    assert MarvinData.data_path == os.environ[data_path_key]
+    assert data_path == os.environ[data_path_key]
 
 
 def test_path_not_set(data_path_key):
     del os.environ[data_path_key]
     path_ = None
     try:
-        path_ = MarvinData.data_path
+        path_ = MarvinData.get_data_path()
     except InvalidConfigException:
         assert not path_
 
 
-@mock.patch('marvin_python_toolbox.common.data.check_path')
+@mock.patch('marvin_python_daemon.common.data.check_path')
 def test_unable_to_create_path(check_path, data_path_key, data_path):
     os.environ[data_path_key] = data_path
     check_path.return_value = False
 
     path_ = None
     try:
-        path_ = MarvinData.data_path
+        path_ = MarvinData.get_data_path()
     except InvalidConfigException:
         assert not path_
 
@@ -67,18 +67,18 @@
     data = 'return value'
 
     # If the data was not found try to load from filesystem
-    with mock.patch('marvin_python_toolbox.common.data.open', create=True) as mock_open:
+    with mock.patch('marvin_python_daemon.common.data.open', create=True) as mock_open:
         mock_open.return_value = mock.MagicMock(spec=IOBase)
         mocked_fp = mock_open.return_value.__enter__.return_value
         mocked_fp.read.return_value = data
-        content = MarvinData.load_data(os.path.join('named_features', 'brands.json'))
+        content = MarvinData.load_data('brands.json')
 
     mocked_fp.read.assert_called_once()
     assert content == data
 
 
 def test_load_data_from_filesystem_exception(data_path_key, data_path):
-    with mock.patch('marvin_python_toolbox.common.data.open') as mock_open:
+    with mock.patch('marvin_python_daemon.common.data.open') as mock_open:
         mock_open.side_effect = IOError
 
         # load_data should propagate IOError
@@ -87,11 +87,12 @@
 
 
 def test_data_key_using_abspath(data_path_key, data_path):
-    assert MarvinData._convert_path_to_key(os.path.join(data_path, 'brands.json')) == 'brands.json'
+    assert MarvinData._convert_path_to_key(
+        os.path.join(data_path, 'brands.json')) == 'brands.json'
 
 
-@mock.patch('marvin_python_toolbox.common.data.progressbar')
-@mock.patch('marvin_python_toolbox.common.data.requests')
+@mock.patch('marvin_python_daemon.common.data.progressbar')
+@mock.patch('marvin_python_daemon.common.data.requests')
 def test_download_file(mocked_requests, mocked_progressbar):
     file_url = 'google.com/file.json'
     file_path = MarvinData.download_file(file_url)
@@ -100,21 +101,23 @@
     file_path = MarvinData.download_file(file_url, local_file_name='myfile')
     assert file_path == '/tmp/data/myfile'
 
-@mock.patch('marvin_python_toolbox.common.data.progressbar')
-@mock.patch('marvin_python_toolbox.common.data.requests')
+
+@mock.patch('marvin_python_daemon.common.data.progressbar')
+@mock.patch('marvin_python_daemon.common.data.requests')
 def test_download_file_delete_file_if_exception(mocked_requests, mocked_progressbar):
     mocked_requests.get.side_effect = Exception()
     with open('/tmp/data/error.json', 'w') as f:
         f.write('test')
-    
+
     file_url = 'google.com/error.json'
     with pytest.raises(Exception) as excinfo:
         file_path = MarvinData.download_file(file_url, force=True)
 
     assert os.path.exists('/tmp/data/error.json') is False
 
-@mock.patch('marvin_python_toolbox.common.data.progressbar.ProgressBar')
-@mock.patch('marvin_python_toolbox.common.data.requests')
+
+@mock.patch('marvin_python_daemon.common.data.progressbar.ProgressBar')
+@mock.patch('marvin_python_daemon.common.data.requests')
 def test_download_file_write_file_if_content(mocked_requests, mocked_progressbar):
     from requests import Response
     file_url = 'google.com/file.json'
@@ -122,17 +125,18 @@
     response = mock.Mock(spec=Response)
     response.iter_content.return_value = 'x'
     mocked_requests.get.return_value = response
-        
+
     mocked_open = mock.mock_open()
-    with mock.patch('marvin_python_toolbox.common.data.open', mocked_open, create=True):
+    with mock.patch('marvin_python_daemon.common.data.open', mocked_open, create=True):
         MarvinData.download_file(file_url, force=True)
 
     mocked_open.assert_called_once_with('/tmp/data/file.json', 'wb')
     handle = mocked_open()
     handle.write.assert_called_once_with('x')
 
-@mock.patch('marvin_python_toolbox.common.data.progressbar.ProgressBar')
-@mock.patch('marvin_python_toolbox.common.data.requests')
+
+@mock.patch('marvin_python_daemon.common.data.progressbar.ProgressBar')
+@mock.patch('marvin_python_daemon.common.data.requests')
 def test_download_file_dont_write_file_if_no_content(mocked_requests, mocked_progressbar):
     from requests import Response
     file_url = 'google.com/file.json'
@@ -140,11 +144,11 @@
     response = mock.Mock(spec=Response)
     response.iter_content.return_value = ''
     mocked_requests.get.return_value = response
-        
+
     mocked_open = mock.mock_open()
-    with mock.patch('marvin_python_toolbox.common.data.open', mocked_open, create=True):
+    with mock.patch('marvin_python_daemon.common.data.open', mocked_open, create=True):
         MarvinData.download_file(file_url, force=True)
 
     mocked_open.assert_called_once_with('/tmp/data/file.json', 'wb')
     handle = mocked_open()
-    assert handle.write.call_count == 0
\ No newline at end of file
+    assert handle.write.call_count == 0
diff --git a/python-toolbox/tests/common/test_data_source_provider.py b/python-daemon/tests/common/test_data_source_provider.py
similarity index 91%
rename from python-toolbox/tests/common/test_data_source_provider.py
rename to python-daemon/tests/common/test_data_source_provider.py
index 08a7633..b54c49d 100644
--- a/python-toolbox/tests/common/test_data_source_provider.py
+++ b/python-daemon/tests/common/test_data_source_provider.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -15,12 +15,14 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+from marvin_python_daemon.common.data_source_provider import get_spark_session
+from pyspark.tests import ReusedPySparkTestCase
+import os
 import findspark
+
 findspark.init()
 
 # is important to import these classes after findspark.init call
-from pyspark.tests import ReusedPySparkTestCase
-from marvin_python_toolbox.common.data_source_provider import get_spark_session
 
 try:
     import mock
@@ -67,5 +69,6 @@
 class TestSparkDataSource(ReusedPySparkTestCase):
     def test_spark_initialization(self):
         rdd = self.sc.parallelize(['Hi there', 'Hi'])
-        counted = rdd.flatMap(lambda word: word.split(' ')).map(lambda word: (word, 1)).reduceByKey(lambda acc, n: acc + n)
+        counted = rdd.flatMap(lambda word: word.split(' ')).map(
+            lambda word: (word, 1)).reduceByKey(lambda acc, n: acc + n)
         assert counted.collectAsMap() == {'Hi': 2, 'there': 1}
diff --git a/python-toolbox/tests/common/test_http_client.py b/python-daemon/tests/common/test_http_client.py
similarity index 91%
rename from python-toolbox/tests/common/test_http_client.py
rename to python-daemon/tests/common/test_http_client.py
index 10971eb..2d4798b 100644
--- a/python-toolbox/tests/common/test_http_client.py
+++ b/python-daemon/tests/common/test_http_client.py
@@ -1,6 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
-# Copyright [2019] [Apache Software Foundation]
+
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -19,8 +20,8 @@
 import httpretty
 from httpretty import httpretty as httpretty_object
 
-from marvin_python_toolbox.common.http_client import ApiClient, ListResultSet
-from marvin_python_toolbox.common.exceptions import HTTPException
+from marvin_python_daemon.common.http_client import ApiClient, ListResultSet
+from marvin_python_daemon.common.exceptions import HTTPException
 
 
 class TestHttpClient:
@@ -146,7 +147,8 @@
                                content_type='text/json',
                                status=500)
 
-        response = ApiClient().post('/service1/', {"name": "americanas", "url": "www.americanas.com.br"})
+        response = ApiClient().post(
+            '/service1/', {"name": "americanas", "url": "www.americanas.com.br"})
         assert not response.ok
 
     @httpretty.activate
@@ -157,7 +159,8 @@
                                content_type='text/json',
                                status=201)
 
-        response = ApiClient().post('/service1/', {"name": "americanas", "url": "www.americanas.com.br"})
+        response = ApiClient().post(
+            '/service1/', {"name": "americanas", "url": "www.americanas.com.br"})
         assert response.ok
 
     @httpretty.activate
@@ -168,7 +171,8 @@
                                content_type="application/json",
                                status=500)
 
-        response = ApiClient().put('/service1/', {"id": "1", "url": "www.americanas.com.br"})
+        response = ApiClient().put(
+            '/service1/', {"id": "1", "url": "www.americanas.com.br"})
         assert not response.ok
 
     @httpretty.activate
@@ -179,7 +183,8 @@
                                content_type='text/json',
                                status=200)
 
-        response = ApiClient().put('/service1/', {"id": "1", "name": "americanas", "url": "www.americanas.com.br"})
+        response = ApiClient().put(
+            '/service1/', {"id": "1", "name": "americanas", "url": "www.americanas.com.br"})
         assert response.ok
 
     @httpretty.activate
diff --git a/python-toolbox/tests/common/test_profiling.py b/python-daemon/tests/common/test_profiling.py
similarity index 96%
rename from python-toolbox/tests/common/test_profiling.py
rename to python-daemon/tests/common/test_profiling.py
index 9ff5fc1..d9c8a48 100644
--- a/python-toolbox/tests/common/test_profiling.py
+++ b/python-daemon/tests/common/test_profiling.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -26,7 +26,7 @@
 except ImportError:
     import unittest.mock as mock
 
-from marvin_python_toolbox.common.profiling import profiling
+from marvin_python_daemon.common.profiling import profiling
 
 
 class TestProfiling:
@@ -167,7 +167,7 @@
 
         shutil.rmtree(output_path)
 
-    @mock.patch('marvin_python_toolbox.common.profiling.subprocess')
+    @mock.patch('marvin_python_daemon.common.profiling.subprocess')
     def test_subprocess_exception(self, subprocess_mock):
         subprocess_mock.call.side_effect = Exception()
 
@@ -203,7 +203,7 @@
 
         shutil.rmtree(output_path)
 
-    @mock.patch('marvin_python_toolbox.common.profiling.subprocess')
+    @mock.patch('marvin_python_daemon.common.profiling.subprocess')
     def test_subprocess_exception_jupyter_repr_html(self, subprocess_mock):
         subprocess_mock.call.side_effect = Exception()
 
diff --git a/python-toolbox/tests/common/test_utils.py b/python-daemon/tests/common/test_utils.py
similarity index 86%
rename from python-toolbox/tests/common/test_utils.py
rename to python-daemon/tests/common/test_utils.py
index 2bd0af4..178c180 100644
--- a/python-toolbox/tests/common/test_utils.py
+++ b/python-daemon/tests/common/test_utils.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -26,10 +26,10 @@
 except ImportError:
     import unittest.mock as mock
 
-from marvin_python_toolbox.common.utils import (class_property, memoized_class_property, get_datetime, deprecated,
-                                        to_json, from_json, is_valid_json, validate_json, generate_key, to_slug,
-                                        url_encode, getattr_qualified, chunks, check_path)
-from marvin_python_toolbox.common.exceptions import InvalidJsonException
+from marvin_python_daemon.common.utils import (class_property, memoized_class_property, get_datetime, deprecated,
+                                               to_json, from_json, is_valid_json, validate_json, generate_key, to_slug,
+                                               url_encode, getattr_qualified, chunks, check_path)
+from marvin_python_daemon.common.exceptions import InvalidJsonException
 
 instance_count = 0
 
@@ -120,7 +120,7 @@
 
 def test_validate_json():
     valid = {
-        'prop': ['a', 'b' , 'c']
+        'prop': ['a', 'b', 'c']
     }
     invalid = {
         'prop': 'a'
@@ -130,7 +130,7 @@
         'properties': {
             'prop': {
                 'type': 'array',
-                'items': { 'type': 'string' }
+                'items': {'type': 'string'}
             }
         }
     }
@@ -142,7 +142,7 @@
 
 def test_is_valid_json():
     valid = {
-        'prop': ['a', 'b' , 'c']
+        'prop': ['a', 'b', 'c']
     }
     invalid = {
         'prop': 'a'
@@ -152,7 +152,7 @@
         'properties': {
             'prop': {
                 'type': 'array',
-                'items': { 'type': 'string' }
+                'items': {'type': 'string'}
             }
         }
     }
@@ -160,7 +160,8 @@
     assert is_valid_json(invalid, schema=schema) is False
 
     assert is_valid_json(json.dumps(valid), schema=json.dumps(schema)) is True
-    assert is_valid_json(json.dumps(invalid), schema=json.dumps(schema)) is False
+    assert is_valid_json(json.dumps(invalid),
+                         schema=json.dumps(schema)) is False
 
 
 def test_generate_key():
@@ -209,14 +210,14 @@
         getattr_qualified(a, 'b', 'default', 'bla')
 
 
-@mock.patch('marvin_python_toolbox.common.utils.os.path.exists')
+@mock.patch('marvin_python_daemon.common.utils.os.path.exists')
 def test_path_not_exists(path_exists_mock):
     path_exists_mock.return_value = False
     assert not check_path('temp')
 
 
-@mock.patch('marvin_python_toolbox.common.utils.os.makedirs')
-@mock.patch('marvin_python_toolbox.common.utils.os.path.exists')
+@mock.patch('marvin_python_daemon.common.utils.os.makedirs')
+@mock.patch('marvin_python_daemon.common.utils.os.path.exists')
 def test_path_creation(path_exists_mock, makedirs_mock):
     path_exists_mock.side_effect = [False, True]
     makedirs_mock.return_value = None
diff --git a/python-toolbox/tests/conftest.py b/python-daemon/tests/conftest.py
similarity index 93%
rename from python-toolbox/tests/conftest.py
rename to python-daemon/tests/conftest.py
index ebed059..8c767eb 100644
--- a/python-toolbox/tests/conftest.py
+++ b/python-daemon/tests/conftest.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -25,4 +25,4 @@
         'section': {
             'section_key': 'section_value'
         }
-    }
+    }
\ No newline at end of file
diff --git a/python-toolbox/tests/engine_base/serializers/test_keras_serializer.py b/python-daemon/tests/engine_base/serializers/test_keras_serializer.py
similarity index 85%
rename from python-toolbox/tests/engine_base/serializers/test_keras_serializer.py
rename to python-daemon/tests/engine_base/serializers/test_keras_serializer.py
index 4b53983..dceb553 100644
--- a/python-toolbox/tests/engine_base/serializers/test_keras_serializer.py
+++ b/python-daemon/tests/engine_base/serializers/test_keras_serializer.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -18,8 +18,8 @@
 import mock
 import pytest
 
-from marvin_python_toolbox.engine_base import EngineBaseTraining
-from marvin_python_toolbox.engine_base import KerasSerializer
+from marvin_python_daemon.engine_base import EngineBaseTraining
+from marvin_python_daemon.engine_base.serializers.keras_serializer import KerasSerializer
 
 
 @pytest.fixture
@@ -31,7 +31,7 @@
 
 
 class TestKerasSerializer(object):
-    @mock.patch('keras.models.load_model')
+    @mock.patch('tensorflow.keras.models.load_model')
     def test__serializer_load_keras(self, mocked_load, engine):
         mocked_load.return_value = {"me": "here"}
         mocked_path = "/tmp/engine/model"
@@ -53,7 +53,7 @@
         engine._serializer_dump(mocked_obj, object_file_path=mocked_path)
         mocked_obj.save.assert_called_once_with(mocked_path)
 
-    @mock.patch('marvin_python_toolbox.engine_base.EngineBaseTraining._serializer_dump')
+    @mock.patch('marvin_python_daemon.engine_base.EngineBaseTraining._serializer_dump')
     def test__serializer_dump_not_keras(self, mocked_dump, engine):
         mocked_obj = mock.MagicMock()
         mocked_path = "/tmp/engine/dataset"
diff --git a/python-toolbox/tests/engine_base/test_engine_base_action.py b/python-daemon/tests/engine_base/test_engine_base_action.py
similarity index 85%
rename from python-toolbox/tests/engine_base/test_engine_base_action.py
rename to python-daemon/tests/engine_base/test_engine_base_action.py
index b4d2010..469c581 100644
--- a/python-toolbox/tests/engine_base/test_engine_base_action.py
+++ b/python-daemon/tests/engine_base/test_engine_base_action.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -27,10 +27,10 @@
 except ImportError:
     import unittest.mock as mock
 
-from marvin_python_toolbox.engine_base import EngineBaseBatchAction
-from marvin_python_toolbox.engine_base import EngineBaseAction, EngineBaseOnlineAction
-from marvin_python_toolbox.engine_base.stubs.actions_pb2 import HealthCheckResponse, HealthCheckRequest
-from marvin_python_toolbox.engine_base.stubs.actions_pb2 import OnlineActionRequest, ReloadRequest, BatchActionRequest
+from marvin_python_daemon.engine_base import EngineBaseBatchAction
+from marvin_python_daemon.engine_base import EngineBaseAction, EngineBaseOnlineAction
+from marvin_python_daemon.engine_base.stubs.actions_pb2 import HealthCheckResponse, HealthCheckRequest
+from marvin_python_daemon.engine_base.stubs.actions_pb2 import OnlineActionRequest, ReloadRequest, BatchActionRequest
 
 
 @pytest.fixture
@@ -72,7 +72,8 @@
         assert engine._persistence_mode == 'x'
 
     def test_get_object_file_path(self, engine_action):
-        assert engine_action._get_object_file_path(object_reference="xpath") == "/tmp/.marvin/test_base_action/xpath"
+        assert engine_action._get_object_file_path(
+            object_reference="xpath") == "/tmp/.marvin/test_base_action/xpath"
 
     def test_save_obj_memory_persistence(self, engine_action):
         obj = [6, 5, 4]
@@ -90,7 +91,8 @@
 
         assert obj == engine_action._params
         assert os.path.exists("/tmp/.marvin/test_base_action/params")
-        assert list(engine_action._local_saved_objects.keys()) == [object_reference]
+        assert list(engine_action._local_saved_objects.keys()) == [
+            object_reference]
 
     def test_release_saved_objects(self, engine_action):
         obj = [6, 5, 4]
@@ -98,7 +100,8 @@
         engine_action._persistence_mode = 'local'
         engine_action._save_obj(object_reference, obj)
 
-        assert list(engine_action._local_saved_objects.keys()) == [object_reference]
+        assert list(engine_action._local_saved_objects.keys()) == [
+            object_reference]
         engine_action._release_local_saved_objects()
         assert engine_action._params is None
 
@@ -195,7 +198,8 @@
             def execute(self, input_message, params, **kwargs):
                 return "message 1"
 
-        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        request = OnlineActionRequest(
+            message="{\"k\": 1}", params="{\"k\": 1}")
         engine_action = StringReturnedAction()
         response = engine_action._remote_execute(request=request, context=None)
 
@@ -206,7 +210,8 @@
             def execute(self, input_message, params, **kwargs):
                 return 1
 
-        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        request = OnlineActionRequest(
+            message="{\"k\": 1}", params="{\"k\": 1}")
         engine_action = StringReturnedAction()
         response = engine_action._remote_execute(request=request, context=None)
 
@@ -217,7 +222,8 @@
             def execute(self, input_message, params, **kwargs):
                 return {"r": 1}
 
-        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        request = OnlineActionRequest(
+            message="{\"k\": 1}", params="{\"k\": 1}")
         engine_action = StringReturnedAction()
         response = engine_action._remote_execute(request=request, context=None)
 
@@ -228,23 +234,25 @@
             def execute(self, input_message, params, **kwargs):
                 return [1, 2]
 
-        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        request = OnlineActionRequest(
+            message="{\"k\": 1}", params="{\"k\": 1}")
         engine_action = StringReturnedAction()
         response = engine_action._remote_execute(request=request, context=None)
 
         assert response.message == "[1, 2]"
 
-    @mock.patch('marvin_python_toolbox.engine_base.engine_base_action.EngineBaseAction._load_obj')
+    @mock.patch('marvin_python_daemon.engine_base.engine_base_action.EngineBaseAction._load_obj')
     def test_remote_reload_with_artifacts(self, load_obj_mocked, engine_action):
         objs_key = "obj1"
         engine_action._save_obj(objs_key, "check")
         request = ReloadRequest(artifacts=objs_key, protocol='xyz')
 
         response = engine_action._remote_reload(request, None)
-        load_obj_mocked.assert_called_once_with(force=True, object_reference=u'obj1')
+        load_obj_mocked.assert_called_once_with(
+            force=True, object_reference=u'obj1')
         assert response.message == "Reloaded"
 
-    @mock.patch('marvin_python_toolbox.engine_base.engine_base_action.EngineBaseAction._load_obj')
+    @mock.patch('marvin_python_daemon.engine_base.engine_base_action.EngineBaseAction._load_obj')
     def test_remote_reload_without_artifacts(self, load_obj_mocked, engine_action):
         request = ReloadRequest(artifacts=None, protocol='xyz')
 
@@ -279,7 +287,7 @@
     def test_pipeline_execute_without_previous_steps(self, batch_engine_action):
         batch_engine_action.execute = mock.MagicMock()
         batch_engine_action._pipeline_execute(params=123)
-        
+
         batch_engine_action.execute.assert_called_once_with(123)
 
     def test_pipeline_execute_with_previous_steps(self, batch_engine_action):
@@ -289,7 +297,7 @@
         batch_engine_action.execute = mock.MagicMock()
 
         batch_engine_action._pipeline_execute(params=123)
-        
+
         previous._pipeline_execute.assert_called_once_with(123)
         batch_engine_action.execute.assert_called_once_with(123)
 
@@ -300,7 +308,8 @@
         request = BatchActionRequest()
         batch_engine_action._remote_execute(request, None)
 
-        batch_engine_action._pipeline_execute.assert_called_once_with(params=123)
+        batch_engine_action._pipeline_execute.assert_called_once_with(
+            params=123)
 
     def test_remote_execute_with_request_params(self, batch_engine_action):
         batch_engine_action._params = 123
@@ -309,7 +318,8 @@
         request = BatchActionRequest(params='{"test": 123}')
         batch_engine_action._remote_execute(request, None)
 
-        batch_engine_action._pipeline_execute.assert_called_once_with(params={u"test": 123})
+        batch_engine_action._pipeline_execute.assert_called_once_with(params={
+                                                                      u"test": 123})
 
     @mock.patch("json.load")
     def test__serializer_load_metrics(self, mocked_load):
@@ -324,8 +334,9 @@
                 pass
 
         mocked_open = mock.mock_open()
-        with mock.patch('marvin_python_toolbox.engine_base.engine_base_action.open', mocked_open, create=False):
-            _metrics = _EAction(default_root_path="/tmp/.marvin", persistence_mode="local")._load_obj(object_reference)
+        with mock.patch('marvin_python_daemon.engine_base.engine_base_action.open', mocked_open, create=False):
+            _metrics = _EAction(default_root_path="/tmp/.marvin",
+                                persistence_mode="local")._load_obj(object_reference)
 
         mocked_load.assert_called_once_with(ANY)
         mocked_open.assert_called_once()
@@ -343,10 +354,10 @@
                 pass
 
         mocked_open = mock.mock_open()
-        with mock.patch('marvin_python_toolbox.engine_base.engine_base_action.open', mocked_open, create=False):
-            _EAction(default_root_path="/tmp/.marvin", persistence_mode="local")._save_obj(object_reference, obj)
+        with mock.patch('marvin_python_daemon.engine_base.engine_base_action.open', mocked_open, create=False):
+            _EAction(default_root_path="/tmp/.marvin",
+                     persistence_mode="local")._save_obj(object_reference, obj)
 
-        mocked_dump.assert_called_once_with(obj, ANY, indent=4, separators=(u',', u': '), sort_keys=True)
+        mocked_dump.assert_called_once_with(
+            obj, ANY, indent=4, separators=(u',', u': '), sort_keys=True)
         mocked_open.assert_called_once()
-
-
diff --git a/python-toolbox/tests/engine_base/test_engine_base_data_handler.py b/python-daemon/tests/engine_base/test_engine_base_data_handler.py
similarity index 87%
rename from python-toolbox/tests/engine_base/test_engine_base_data_handler.py
rename to python-daemon/tests/engine_base/test_engine_base_data_handler.py
index 03c1648..8e93ccc 100644
--- a/python-toolbox/tests/engine_base/test_engine_base_data_handler.py
+++ b/python-daemon/tests/engine_base/test_engine_base_data_handler.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -17,7 +17,7 @@
 
 import pytest
 
-from marvin_python_toolbox.engine_base import EngineBaseDataHandler
+from marvin_python_daemon.engine_base import EngineBaseDataHandler
 
 
 @pytest.fixture
@@ -33,7 +33,8 @@
 
     def test_initial_dataset(self, engine_action):
         engine_action.marvin_initial_dataset = [1]
-        assert engine_action.marvin_initial_dataset == engine_action._initial_dataset == [1]
+        assert engine_action.marvin_initial_dataset == engine_action._initial_dataset == [
+            1]
 
     def test_dataset(self, engine_action):
         engine_action.marvin_dataset = [1]
diff --git a/python-toolbox/tests/engine_base/test_engine_base_prediction.py b/python-daemon/tests/engine_base/test_engine_base_prediction.py
similarity index 86%
rename from python-toolbox/tests/engine_base/test_engine_base_prediction.py
rename to python-daemon/tests/engine_base/test_engine_base_prediction.py
index 2a9d5e0..5143925 100644
--- a/python-toolbox/tests/engine_base/test_engine_base_prediction.py
+++ b/python-daemon/tests/engine_base/test_engine_base_prediction.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -21,7 +21,7 @@
 except ImportError:
     import unittest.mock as mock
 
-from marvin_python_toolbox.engine_base import EngineBasePrediction
+from marvin_python_daemon.engine_base import EngineBasePrediction
 
 
 @pytest.fixture
@@ -49,17 +49,17 @@
 
 class TestEnsureReloadActionReplaceObjectAttr:
 
-    @mock.patch('marvin_python_toolbox.engine_base.engine_base_action.EngineBaseAction._serializer_load')
+    @mock.patch('marvin_python_daemon.engine_base.engine_base_action.EngineBaseAction._serializer_load')
     def test_first_load_from_artifact_works(self, mock_serializer, engine_action):
         mock_serializer.return_value = "MOCKED"
 
         assert engine_action._model == None
 
         engine_action._load_obj(object_reference="model")
-        
+
         assert engine_action._model == engine_action.marvin_model == "MOCKED"
 
-    @mock.patch('marvin_python_toolbox.engine_base.engine_base_action.EngineBaseAction._serializer_load')
+    @mock.patch('marvin_python_daemon.engine_base.engine_base_action.EngineBaseAction._serializer_load')
     def test_reload_works_before_first_load(self, mock_serializer, engine_action):
         mock_serializer.return_value = "MOCKED"
 
@@ -73,4 +73,4 @@
 
         engine_action._load_obj(object_reference="model", force=True)
 
-        assert engine_action._model == engine_action.marvin_model == "NEW MOCKED"
\ No newline at end of file
+        assert engine_action._model == engine_action.marvin_model == "NEW MOCKED"
diff --git a/python-toolbox/tests/engine_base/test_engine_base_training.py b/python-daemon/tests/engine_base/test_engine_base_training.py
similarity index 91%
rename from python-toolbox/tests/engine_base/test_engine_base_training.py
rename to python-daemon/tests/engine_base/test_engine_base_training.py
index 6102b14..3142a6a 100644
--- a/python-toolbox/tests/engine_base/test_engine_base_training.py
+++ b/python-daemon/tests/engine_base/test_engine_base_training.py
@@ -1,7 +1,7 @@
 #!/usr/bin/env python
 # coding=utf-8
 
-# Copyright [2019] [Apache Software Foundation]
+# Copyright [2020] [Apache Software Foundation]
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -17,7 +17,7 @@
 
 import pytest
 
-from marvin_python_toolbox.engine_base import EngineBaseTraining
+from marvin_python_daemon.engine_base import EngineBaseTraining
 
 
 @pytest.fixture
diff --git a/python-toolbox/tests/fixtures/config.sample b/python-daemon/tests/fixtures/config.sample
similarity index 100%
rename from python-toolbox/tests/fixtures/config.sample
rename to python-daemon/tests/fixtures/config.sample
diff --git a/python-daemon/tests/management/test_engine.py b/python-daemon/tests/management/test_engine.py
new file mode 100644
index 0000000..bded069
--- /dev/null
+++ b/python-daemon/tests/management/test_engine.py
@@ -0,0 +1,114 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from mock import call
+from mock import ANY
+from marvin_python_daemon.management.engine import MarvinDryRun
+from marvin_python_daemon.management.engine import dryrun
+import os
+
+
+mocked_conf = {
+    'marvin_package': 'test_package',
+    'inidir': 'test_dir'
+}
+
+
+def mocked_sleep(value):
+    if value == 100:
+        raise KeyboardInterrupt()
+
+
+class AcquisitorAndCleaner():
+
+    def __init__(self, persistence_mode, is_remote_calling, default_root_path):
+        self.persistence_mode = persistence_mode
+        self.is_remote_calling = is_remote_calling
+        self.default_root_path = default_root_path
+
+    def execute(self, **kwargs):
+        print('test')
+
+
+@mock.patch('marvin_python_daemon.management.engine.time.time')
+@mock.patch('marvin_python_daemon.management.engine.MarvinDryRun')
+@mock.patch('marvin_python_daemon.management.engine.os.system')
+def test_dryrun(system_mocked, MarvinDryRun_mocked, time_mocked):
+    time_mocked.return_value = 555
+    action = 'all'
+
+    dryrun(config=mocked_conf, action=action, profiling=None)
+
+    time_mocked.assert_called()
+    MarvinDryRun_mocked.assert_called_with(config=mocked_conf, messages=[
+                                           {}, {}])
+
+    MarvinDryRun_mocked.return_value.execute.assert_called_with(clazz='Feedback',
+                                                                params={}, profiling_enabled=None)
+
+    action = 'acquisitor'
+
+    dryrun(config=mocked_conf, action=action, profiling=None)
+
+    time_mocked.assert_called()
+    MarvinDryRun_mocked.assert_called_with(config=mocked_conf, messages=[
+                                           {}, {}])
+
+
+@mock.patch('marvin_python_daemon.management.engine.dynamic_import')
+def test_marvindryrun(import_mocked):
+    messages = ['/tmp/messages', '/tmp/feedback']
+    response = 'response'
+    clazz = 'PredictionPreparator'
+    import_mocked.return_value = AcquisitorAndCleaner
+
+    test_dryrun = MarvinDryRun(
+        config=mocked_conf, messages=messages)
+    test_dryrun.execute(clazz=clazz, params=None, profiling_enabled=True)
+
+    import_mocked.assert_called_with("{}.{}".format(
+        'test_package', 'PredictionPreparator'))
+
+    clazz = 'Feedback'
+    test_dryrun.execute(clazz=clazz, params=None, profiling_enabled=False)
+
+    import_mocked.assert_called_with(
+        "{}.{}".format('test_package', 'Feedback'))
+
+    clazz = 'Predictor'
+    test_dryrun.execute(clazz=clazz, params=None, profiling_enabled=False)
+
+    import_mocked.assert_called_with("{}.{}".format(
+        'test_package', 'PredictionPreparator'))
+
+    clazz = 'test'
+    test_dryrun.execute(clazz=clazz, params=None, profiling_enabled=True)
+    test_dryrun.execute(clazz=clazz, params=None,  profiling_enabled=False)
+
+    import_mocked.assert_called_with("{}.{}".format('test_package', 'test'))
+
+    response = False
+    clazz = 'PredictionPreparator'
+
+    MarvinDryRun(config=mocked_conf, messages=messages)
+    test_dryrun = MarvinDryRun(config=mocked_conf, messages=messages)
+    test_dryrun.execute(clazz=clazz, params=None, profiling_enabled=False)
diff --git a/python-daemon/tests/management/test_notebook.py b/python-daemon/tests/management/test_notebook.py
new file mode 100644
index 0000000..0172578
--- /dev/null
+++ b/python-daemon/tests/management/test_notebook.py
@@ -0,0 +1,91 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2020] [Apache Software Foundation]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+import os
+from marvin_python_daemon.management.notebook import notebook, lab
+
+
+mocked_config = {
+    'base_path': '/tmp'
+}
+
+os.environ['MARVIN_DAEMON_PATH'] = '/tmp/marvin/marvin_python_daemon'
+os.environ['MARVIN_ENGINE_PATH'] = '/tmp/marvin'
+
+
+@mock.patch('marvin_python_daemon.management.notebook.sys')
+@mock.patch('marvin_python_daemon.management.notebook.os.system')
+def test_notebook(system_mocked, sys_mocked):
+    config = mocked_config
+    port = '8888'
+    enable_security = False
+    allow_root = False
+    spark_conf = '/opt/spark/conf'
+    system_mocked.return_value = 1
+
+    notebook(config, enable_security, port)
+
+    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter notebook --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --config ' +
+                                          os.environ["MARVIN_ENGINE_PATH"] + '/marvin_python_daemon/extras/notebook_extensions/jupyter_notebook_config.py --NotebookApp.token= --allow-root')
+
+
+@mock.patch('marvin_python_daemon.management.notebook.sys')
+@mock.patch('marvin_python_daemon.management.notebook.os.system')
+def test_notebook_with_security(system_mocked, sys_mocked):
+    config = mocked_config
+    port = '8888'
+    enable_security = True
+    system_mocked.return_value = 1
+
+    notebook(config, enable_security, port)
+
+    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter notebook --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --config ' +
+                                          os.environ["MARVIN_ENGINE_PATH"] + '/marvin_python_daemon/extras/notebook_extensions/jupyter_notebook_config.py --allow-root')
+
+
+@mock.patch('marvin_python_daemon.management.notebook.sys')
+@mock.patch('marvin_python_daemon.management.notebook.os.system')
+def test_jupyter_lab(system_mocked, sys_mocked):
+    config = mocked_config
+    port = '8888'
+    enable_security = False
+    spark_conf = '/opt/spark/conf'
+    system_mocked.return_value = 1
+
+    lab(config, enable_security, port)
+
+    system_mocked.assert_called_once_with(
+        'SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter-lab --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --NotebookApp.token=')
+
+
+@mock.patch('marvin_python_daemon.management.notebook.sys')
+@mock.patch('marvin_python_daemon.management.notebook.os.system')
+def test_jupyter_lab_with_security(system_mocked, sys_mocked):
+    config = mocked_config
+    port = '8888'
+    enable_security = True
+    system_mocked.return_value = 1
+
+    lab(config, enable_security, port)
+
+    system_mocked.assert_called_once_with(
+        'SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter-lab --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser')
diff --git a/python-daemon/tox.ini b/python-daemon/tox.ini
new file mode 100644
index 0000000..8e918b7
--- /dev/null
+++ b/python-daemon/tox.ini
@@ -0,0 +1,10 @@
+[tox]
+envlist = py36
+
+[testenv]
+deps=pytest
+     pytest-cov
+     mock
+     tensorflow
+commands=py.test --cov={envsitepackagesdir}/marvin_python_daemon --cov-report html --cov-report xml {posargs}
+passenv=SPARK_HOME MARVIN_HOME MARVIN_DATA_PATH MARVIN_LOG
diff --git a/python-toolbox/.bumpversion.cfg b/python-toolbox/.bumpversion.cfg
deleted file mode 100644
index 2938b3c..0000000
--- a/python-toolbox/.bumpversion.cfg
+++ /dev/null
@@ -1,7 +0,0 @@
-[bumpversion]
-current_version = 0.0.5
-
-[bumpversion:file:marvin_python_toolbox/VERSION]
-
-[bumpversion:file:README.md]
-
diff --git a/python-toolbox/.coveragerc b/python-toolbox/.coveragerc
deleted file mode 100644
index 9c77704..0000000
--- a/python-toolbox/.coveragerc
+++ /dev/null
@@ -1,22 +0,0 @@
-[run]
-omit = tests/*
-branch = True
-
-[report]
-exclude_lines =
-    pragma: no cover
-
-    def __repr__
-    if self\.debug
-
-    raise AssertionError
-    raise NotImplementedError
-
-    if 0:
-    if __name__ == .__main__.:
-
-[html]
-directory = coverage_report
-
-[xml]
-output = coverage.xml
\ No newline at end of file
diff --git a/python-toolbox/.github/ISSUE_TEMPLATE/bug_report.md b/python-toolbox/.github/ISSUE_TEMPLATE/bug_report.md
deleted file mode 100644
index 8e7cb86..0000000
--- a/python-toolbox/.github/ISSUE_TEMPLATE/bug_report.md
+++ /dev/null
@@ -1,30 +0,0 @@
----
-name: Bug report
-about: Create a report to help us improve
-
----
-
-**Describe the bug**
-A clear and concise description of what the bug is.
-
-**To Reproduce**
-Steps to reproduce the behavior:
-1. Go to '...'
-2. Click on '....'
-3. Scroll down to '....'
-4. See error
-
-**Expected behavior**
-A clear and concise description of what you expected to happen.
-
-**Screenshots**
-If applicable, add screenshots to help explain your problem.
-
-**Desktop (please complete the following information):**
- - OS: [e.g. iOS]
- - Browser [e.g. chrome, safari]
- - Version [e.g. 22]
- - Log [Warning, Error]
-
-**Additional context**
-Add any other context about the problem here.
diff --git a/python-toolbox/.github/ISSUE_TEMPLATE/custom.md b/python-toolbox/.github/ISSUE_TEMPLATE/custom.md
deleted file mode 100644
index 99bb9a0..0000000
--- a/python-toolbox/.github/ISSUE_TEMPLATE/custom.md
+++ /dev/null
@@ -1,7 +0,0 @@
----
-name: Custom issue template
-about: Describe this issue template's purpose here.
-
----
-
-
diff --git a/python-toolbox/.github/ISSUE_TEMPLATE/feature_request.md b/python-toolbox/.github/ISSUE_TEMPLATE/feature_request.md
deleted file mode 100644
index 066b2d9..0000000
--- a/python-toolbox/.github/ISSUE_TEMPLATE/feature_request.md
+++ /dev/null
@@ -1,17 +0,0 @@
----
-name: Feature request
-about: Suggest an idea for this project
-
----
-
-**Is your feature request related to a problem? Please describe.**
-A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
-
-**Describe the solution you'd like**
-A clear and concise description of what you want to happen.
-
-**Describe alternatives you've considered**
-A clear and concise description of any alternative solutions or features you've considered.
-
-**Additional context**
-Add any other context or screenshots about the feature request here.
diff --git a/python-toolbox/CHANGES.md b/python-toolbox/CHANGES.md
deleted file mode 100644
index 0cf8ca6..0000000
--- a/python-toolbox/CHANGES.md
+++ /dev/null
@@ -1,65 +0,0 @@
-## Changes log
-
-### 0.0.5
-
-   - Artifact reload bug fix #3 #5
-   - Dependency Links Deprecation & Remove #14
-   - Dependencies version fix for MacOS env #17
-   - General contribution file update #10
-   - CI update
-     - Move Travis CI out from sub-directory #1 #5
-     - Travis CI retry #6
-     - Travis CI sbt batch mode #7 #8
-     - Pytest-Cov Version Requirement Update #9
-   - Readme update
-     - Tutorial book link update #2 #4 #11
-     - Install links update to Apache Website #12
-   - License Update
-     - Move License out for Github #13
-
-### 0.0.4
-
-   - Docs.yaml file update for metrics api
-   - Some adjustments in toolbox and template makefiles #104
-   - Removing some commands by install mode (dev and prod) #104
-   - Moving autocomplete and notebook extension from toolbox setup to engine template setup. Close #107
-   - Separating tests dependencies and creating a new make command. close #100
-   - Metrics as json and Keras serializser to Closes #86 and Closes #98
-   - Saving and loading metrics artifacts as json files to Fix #98
-   - Adding a symlink to the data path on engine generate. close #93
-   - Marvin is now installable with pip. fix #84
-   - ASCII encode error fix for accented words in predict message
-   - Add Jupyter Lab command. Fix #85
-   - Cli parameter conflit fix
-   - New param to force reload #80
-   - Improving test coverage
-   - New python binary parameter to be used in the creation of virtual env
-   - Fix tornado 4.5.3 and pip 9.0.1
-
-### 0.0.3
-
-	- Python 3 support general compatibility refactoring (#68)
-    - Add marvin_ prefix in artefacts getters and setters to avoid user code conflicts   
-    - Fixing #66 bug related to override the params default values
-    - Refact artifacts setter and getter in engine templates
-    - Making marvin.ini from toolbox be found by default
-    - Making "params" as an execute method parameter to be possible to overriden default values in runtime
-    - Enabling to inform extra parameters for executor's jvm customization. Fix #65
-    - Improve spark conf parameter usage in cli's commands to use SPARK_CONF_DIR and SPARK_HOME envs.
-    - Not use json dumps if response type is string. Fixed #67
-    - Adding gitter tag to README file.
-    - Remove deploy to pipy from build
-    - Install twine in distribution task
-    - Add --process-dependency-links in pip install command
-    - General bug fixes
-
-### 0.0.2
-
-    - change executor vm parameter from modelProtocol to protocol
-    - Generic Dockerfile template and make commands to be used to build, run and push containers    
-    - fix spark conf dir parameter bug
-    - create distribute task to simplify the pypi package distribution.
-
-### 0.0.1
-
- - initial version
diff --git a/python-toolbox/CODE_OF_CONDUCT.md b/python-toolbox/CODE_OF_CONDUCT.md
deleted file mode 100644
index d1ea5fa..0000000
--- a/python-toolbox/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,46 +0,0 @@
-# Contributor Covenant Code of Conduct
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy towards other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or advances
-* Trolling, insulting/derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or electronic address, without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a professional setting
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at dev@marvin.apache.org. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
-
-[homepage]: http://contributor-covenant.org
-[version]: http://contributor-covenant.org/version/1/4/
diff --git a/python-toolbox/INSTALL b/python-toolbox/INSTALL
deleted file mode 100644
index e69de29..0000000
--- a/python-toolbox/INSTALL
+++ /dev/null
diff --git a/python-toolbox/MANIFEST.in b/python-toolbox/MANIFEST.in
deleted file mode 100644
index b219cb6..0000000
--- a/python-toolbox/MANIFEST.in
+++ /dev/null
@@ -1,13 +0,0 @@
-include CHANGES.md
-include INSTALL
-include LICENSE
-include MANIFEST.in
-include README.md
-include marvin_python_toolbox/VERSION
-recursive-include marvin_python_toolbox/extras *
-recursive-include marvin_python_toolbox/management *
-recursive-include notebooks *
-recursive-include tests *
-global-exclude __pycache__
-global-exclude *.py[co]
-prune notebooks/build
diff --git a/python-toolbox/Makefile b/python-toolbox/Makefile
deleted file mode 100644
index d0417d4..0000000
--- a/python-toolbox/Makefile
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-.PHONY: help marvin marvin-prod update clean-pyc clean-build clean-reports clean-deps clean grpc distribution
-
-help:
-	@echo "    marvin"
-	@echo "        Prepare project to be used as a marvin package."
-	@echo "    marvin-prod"
-	@echo "        Prepare project to be used in production environment."
-	@echo "    update"
-	@echo "        Reinstall requirements and setup.py dependencies."
-	@echo "    clean-all"
-	@echo "        Remove all generated artifacts."
-	@echo "    clean-pyc"
-	@echo "        Remove python artifacts."
-	@echo "    clean-build"
-	@echo "        Remove build artifacts."
-	@echo "    clean-reports"
-	@echo "        Remove coverage reports."
-	@echo "    clean-deps"
-	@echo "        Remove marvin setup.py dependencies."
-	@echo "    grpc"
-	@echo "        Build grpc stubs."
-	@echo "    distribution"
-	@echo "        Build and upload the toolbox as a wheel package in pypi."
-
-marvin:
-	pip install -e ".[testing]"
-	touch .dev
-	marvin --help
-
-update:
-	pip install -e . -U
-
-marvin-prod:
-	pip install .
-	rm -f .dev
-	marvin --help
-
-clean-pyc:
-	find . -name '*.pyc' -exec rm -f {} +
-	find . -name '*.pyo' -exec rm -f {} +
-	find . -name '*~' -exec rm -f  {} +
-
-clean-build:
-	rm -f .prod
-	rm -rf *.egg-info
-	rm -rf .cache
-	rm -rf .eggs
-	rm -rf dist
-
-clean-reports:
-	rm -rf coverage_report/
-	rm -f coverage.xml
-	rm -f .coverage
-
-clean-deps:
-	pip freeze | grep -v "^-e" | xargs pip uninstall -y
-
-clean: clean-build clean-pyc clean-reports clean-deps
-
-grpc:
-	python -m grpc_tools.protoc --proto_path=marvin_python_toolbox/engine_base/protos --python_out=marvin_python_toolbox/engine_base/stubs --grpc_python_out=marvin_python_toolbox/engine_base/stubs marvin_python_toolbox/engine_base/protos/actions.proto
-	ls -la marvin_python_toolbox/engine_base/stubs/*.py
-
-distribution: clean-build
-	pip install twine
-	python setup.py bdist_wheel --universal
-	twine upload dist/marvin_python_toolbox*.whl
diff --git a/python-toolbox/NOTICE b/python-toolbox/NOTICE
deleted file mode 100644
index 45cc502..0000000
--- a/python-toolbox/NOTICE
+++ /dev/null
@@ -1,5 +0,0 @@
-Marvin Artifical Intelligence Platform
-Copyright 2019 Apache Software Foundation
-
-This product includes software developed at
-Apache Software Foundation https://www.apache.org/
\ No newline at end of file
diff --git a/python-toolbox/PULL_REQUEST_TEMPLATE.md b/python-toolbox/PULL_REQUEST_TEMPLATE.md
deleted file mode 100644
index 82eda38..0000000
--- a/python-toolbox/PULL_REQUEST_TEMPLATE.md
+++ /dev/null
@@ -1,17 +0,0 @@
-Fixes # .
-
-Changes proposed in this pull request:
--
--
--
--
--
-
-How to test new changes:
--
--
--
--
--
-
-dev@marvin.apache.org
diff --git a/python-toolbox/README.md b/python-toolbox/README.md
deleted file mode 100644
index ef81a72..0000000
--- a/python-toolbox/README.md
+++ /dev/null
@@ -1,112 +0,0 @@
-[![Build Status](https://travis-ci.org/apache/incubator-marvin.svg)](https://travis-ci.org/apache/incubator-marvin) [![codecov](https://codecov.io/gh/apache/incubator-marvin/branch/develop/graph/badge.svg)](https://codecov.io/gh/apache/incubator-marvin/branch/develop)
-
-# Marvin Toolbox v0.0.5
-
-![](https://github.com/apache/incubator-marvin-website/blob/master/site/assets/themes/apache/img/logo.png?raw=true)
-
-# Quick Start
-
-## Review
-
-**Marvin** is an open-source Artificial Intelligence platform that focuses on helping data scientists deliver meaningful solutions to complex problems. Supported by a standardized large-scale, language-agnostic architecture, Marvin simplifies the process of exploration and modeling.
-
-## Getting Started
-* [Installing Marvin (Ubuntu)](https://marvin.apache.org/marvin-platform-book/ch2_toolbox_installation/ubuntu/)
-* [Installing Marvin (MacOS)](https://marvin.apache.org/marvin-platform-book/ch2_toolbox_installation/mac/)
-* [Installing Marvin (Other OS) Vagrant](https://marvin.apache.org/marvin-platform-book/ch2_toolbox_installation/vagrant/)
-* [Creating a new engine](#creating-a-new-engine)
-* [Working in an existing engine](#working-in-an-existing-engine)
-* [Command line interface](#command-line-interface)
-* [Running an example engine](#running-a-example-engine)
-
-
-### Creating a new engine
-1. To create a new engine
-```
-workon python-toolbox-env
-marvin engine-generate
-```
-Respond to the prompt and wait for the engine environment preparation to complete. Don't forget to start dev box before if you are using vagrant.
-
-2. Test the new engine
-```
-workon <new_engine_name>-env
-marvin test
-```
-
-3. For more information
-```
-marvin --help
-```
-
-### Working in an existing engine
-
-1. Set VirtualEnv and get to the engine's path
-```
-workon <engine_name>-env
-```
-
-2. Test your engine
-```
-marvin test
-```
-
-3. Bring up the notebook and access it from your browser
-```
-marvin notebook
-```
-
-### Command line interface
-Usage: marvin [OPTIONS] COMMAND [ARGS]
-
-Options:
-```
-  --debug       #Enable debug mode.
-  --version     #Show the version and exit.
-  --help        #Show this command line interface and exit.
-```
-
-Commands:
-```
-  engine-generate     #Generate a new marvin engine project.
-  engine-generateenv  #Generate a new marvin engine environment.
-  engine-grpcserver   #Marvin gRPC engine action server starts.
-  engine-httpserver   #Marvin http api server starts.
-  hive-dataimport     #Import data samples from a hive databse to the hive running in this toolbox.
-  hive-generateconf   #Generate default configuration file.
-  hive-resetremote    #Drop all remote tables from informed engine on host.
-  notebook            #Start the Jupyter notebook server.
-  pkg-bumpversion     #Bump the package version.
-  pkg-createtag       #Create git tag using the package version.
-  pkg-showchanges     #Show the package changelog.
-  pkg-showinfo        #Show information about the package.
-  pkg-showversion     #Show the package version.
-  pkg-updatedeps      #Update requirements.txt.
-  test                #Run tests.
-  test-checkpep8      #Check python code style.
-  test-tdd            #Watch for changes to run tests automatically.
-  test-tox            #Run tests using a new virtualenv.
-```
-
-### Running a example engine 
-
-1. Clone the example engine from the repository
-```
-git clone https://github.com/apache/incubator-marvin.git
-cd public-engines
-```
-
-2. Generate a new Marvin engine environment for the Iris species engine
-```
-workon python-toolbox-env
-marvin engine-generateenv ../engines/iris-species-engine/
-```
-
-3. Run the Iris species engine
-```
-workon iris-species-engine-env
-marvin engine-dryrun 
-```
-
-> Marvin is a project started at B2W Digital offices and released open source on September 2017.
-> The project is donated to Apache Software Foundation on August 2018.
diff --git a/python-toolbox/marvin.ini b/python-toolbox/marvin.ini
deleted file mode 100644
index bf06992..0000000
--- a/python-toolbox/marvin.ini
+++ /dev/null
@@ -1,3 +0,0 @@
-[marvin]
-package = marvin_python_toolbox
-type = tool
diff --git a/python-toolbox/marvin_python_toolbox/_compatibility.py b/python-toolbox/marvin_python_toolbox/_compatibility.py
deleted file mode 100644
index 0e4e321..0000000
--- a/python-toolbox/marvin_python_toolbox/_compatibility.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Compatibility module.
-
-Import this module to help to write code compatible with Python 2 and 3.
-"""
-
-from __future__ import print_function
-from __future__ import division
-from __future__ import absolute_import
-from __future__ import unicode_literals
-from six.moves.urllib.parse import urlparse, quote
-from six import StringIO, iteritems, text_type
-from six.moves import xrange
-import six
-
-__all__ = ['six']
-
-# Add here any code that have to differentiate between python 2 and 3.
diff --git a/python-toolbox/marvin_python_toolbox/config.py b/python-toolbox/marvin_python_toolbox/config.py
deleted file mode 100644
index 025ad3d..0000000
--- a/python-toolbox/marvin_python_toolbox/config.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-
-import os
-import os.path
-import copy
-
-import configparser
-
-from ._compatibility import six
-from ._logging import get_logger
-
-
-__all__ = ['find_inidir', 'parse_ini']
-
-
-logger = get_logger('config')
-
-
-def find_inidir(inifilename='marvin.ini'):
-    inidir = None
-    currentdir = os.getcwd()
-
-    while True:
-        logger.debug('Looking for marvinini in {}'.format(currentdir))
-        if os.path.exists(os.path.join(currentdir, inifilename)):
-            inidir = currentdir
-            logger.debug('marvinini found {}'.format(inidir))
-            break
-
-        parentdir = os.path.abspath(os.path.join(currentdir, os.pardir))
-        if currentdir == parentdir:
-            # currentdir is '/'
-            logger.debug('marvinini not found')
-            break
-
-        currentdir = parentdir
-
-    return inidir
-
-
-def parse_ini(inipath, defaults=None):
-    if defaults is None:
-        defaults = {}
-
-    logger.debug("Parsing marvinini '{}' with defaults '{}'".format(inipath, defaults))
-
-    config_raw = configparser.ConfigParser()
-    config_raw.read(inipath)
-
-    config = copy.deepcopy(defaults)
-
-    for section in config_raw.sections():
-        # Firt pass
-        for key, value in config_raw.items(section):
-            key = '_'.join((section, key)).lower()
-            logger.debug('Processing {}: {}'.format(key, value))
-            processed_value = value.format(**config)
-            config[key] = processed_value
-
-    # Second pass
-    for key, value in config.items():
-        processed_value = value.format(**config)
-        if ',' in processed_value:
-            processed_value = processed_value.split(',')
-        config[key] = processed_value
-
-    logger.debug('marvinini loaded: {}'.format(config))
-
-    return config
diff --git a/python-toolbox/marvin_python_toolbox/decorators.py b/python-toolbox/marvin_python_toolbox/decorators.py
deleted file mode 100644
index fab885e..0000000
--- a/python-toolbox/marvin_python_toolbox/decorators.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from functools import update_wrapper
-from click.decorators import pass_context
-from click.decorators import command as click_command
-
-
-def command(*args, **kwargs):
-    def wrapper(func):
-        @pass_context
-        def new_func(ctx, *args, **kwargs):
-            return ctx.invoke(func, ctx, *args, **kwargs)
-        return click_command(*args, **kwargs)(update_wrapper(new_func, func))
-    return wrapper
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/stubs/actions_pb2.py b/python-toolbox/marvin_python_toolbox/engine_base/stubs/actions_pb2.py
deleted file mode 100644
index a05f55d..0000000
--- a/python-toolbox/marvin_python_toolbox/engine_base/stubs/actions_pb2.py
+++ /dev/null
@@ -1,821 +0,0 @@
-# Generated by the protocol buffer compiler.  DO NOT EDIT!
-# source: actions.proto
-
-import sys
-_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-from google.protobuf import descriptor_pb2
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
-  name='actions.proto',
-  package='',
-  syntax='proto3',
-  serialized_pb=_b('\n\ractions.proto\"6\n\x13OnlineActionRequest\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x0e\n\x06params\x18\x02 \x01(\t\"\'\n\x14OnlineActionResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"$\n\x12\x42\x61tchActionRequest\x12\x0e\n\x06params\x18\x01 \x01(\t\"&\n\x13\x42\x61tchActionResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"4\n\rReloadRequest\x12\x10\n\x08protocol\x18\x01 \x01(\t\x12\x11\n\tartifacts\x18\x02 \x01(\t\"!\n\x0eReloadResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"\'\n\x12HealthCheckRequest\x12\x11\n\tartifacts\x18\x02 \x01(\t\"]\n\x13HealthCheckResponse\x12+\n\x06status\x18\x01 \x01(\x0e\x32\x1b.HealthCheckResponse.Status\"\x19\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\x07\n\x03NOK\x10\x01\x32\xca\x01\n\x13OnlineActionHandler\x12@\n\x0f_remote_execute\x12\x14.OnlineActionRequest\x1a\x15.OnlineActionResponse\"\x00\x12\x33\n\x0e_remote_reload\x12\x0e.ReloadRequest\x1a\x0f.ReloadResponse\"\x00\x12<\n\r_health_check\x12\x13.HealthCheckRequest\x1a\x14.HealthCheckResponse\"\x00\x32\xc7\x01\n\x12\x42\x61tchActionHandler\x12>\n\x0f_remote_execute\x12\x13.BatchActionRequest\x1a\x14.BatchActionResponse\"\x00\x12\x33\n\x0e_remote_reload\x12\x0e.ReloadRequest\x1a\x0f.ReloadResponse\"\x00\x12<\n\r_health_check\x12\x13.HealthCheckRequest\x1a\x14.HealthCheckResponse\"\x00\x62\x06proto3')
-)
-
-
-
-_HEALTHCHECKRESPONSE_STATUS = _descriptor.EnumDescriptor(
-  name='Status',
-  full_name='HealthCheckResponse.Status',
-  filename=None,
-  file=DESCRIPTOR,
-  values=[
-    _descriptor.EnumValueDescriptor(
-      name='OK', index=0, number=0,
-      options=None,
-      type=None),
-    _descriptor.EnumValueDescriptor(
-      name='NOK', index=1, number=1,
-      options=None,
-      type=None),
-  ],
-  containing_type=None,
-  options=None,
-  serialized_start=390,
-  serialized_end=415,
-)
-_sym_db.RegisterEnumDescriptor(_HEALTHCHECKRESPONSE_STATUS)
-
-
-_ONLINEACTIONREQUEST = _descriptor.Descriptor(
-  name='OnlineActionRequest',
-  full_name='OnlineActionRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='message', full_name='OnlineActionRequest.message', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='params', full_name='OnlineActionRequest.params', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=17,
-  serialized_end=71,
-)
-
-
-_ONLINEACTIONRESPONSE = _descriptor.Descriptor(
-  name='OnlineActionResponse',
-  full_name='OnlineActionResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='message', full_name='OnlineActionResponse.message', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=73,
-  serialized_end=112,
-)
-
-
-_BATCHACTIONREQUEST = _descriptor.Descriptor(
-  name='BatchActionRequest',
-  full_name='BatchActionRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='params', full_name='BatchActionRequest.params', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=114,
-  serialized_end=150,
-)
-
-
-_BATCHACTIONRESPONSE = _descriptor.Descriptor(
-  name='BatchActionResponse',
-  full_name='BatchActionResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='message', full_name='BatchActionResponse.message', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=152,
-  serialized_end=190,
-)
-
-
-_RELOADREQUEST = _descriptor.Descriptor(
-  name='ReloadRequest',
-  full_name='ReloadRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='protocol', full_name='ReloadRequest.protocol', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-    _descriptor.FieldDescriptor(
-      name='artifacts', full_name='ReloadRequest.artifacts', index=1,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=192,
-  serialized_end=244,
-)
-
-
-_RELOADRESPONSE = _descriptor.Descriptor(
-  name='ReloadResponse',
-  full_name='ReloadResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='message', full_name='ReloadResponse.message', index=0,
-      number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=246,
-  serialized_end=279,
-)
-
-
-_HEALTHCHECKREQUEST = _descriptor.Descriptor(
-  name='HealthCheckRequest',
-  full_name='HealthCheckRequest',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='artifacts', full_name='HealthCheckRequest.artifacts', index=0,
-      number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=_b("").decode('utf-8'),
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=281,
-  serialized_end=320,
-)
-
-
-_HEALTHCHECKRESPONSE = _descriptor.Descriptor(
-  name='HealthCheckResponse',
-  full_name='HealthCheckResponse',
-  filename=None,
-  file=DESCRIPTOR,
-  containing_type=None,
-  fields=[
-    _descriptor.FieldDescriptor(
-      name='status', full_name='HealthCheckResponse.status', index=0,
-      number=1, type=14, cpp_type=8, label=1,
-      has_default_value=False, default_value=0,
-      message_type=None, enum_type=None, containing_type=None,
-      is_extension=False, extension_scope=None,
-      options=None),
-  ],
-  extensions=[
-  ],
-  nested_types=[],
-  enum_types=[
-    _HEALTHCHECKRESPONSE_STATUS,
-  ],
-  options=None,
-  is_extendable=False,
-  syntax='proto3',
-  extension_ranges=[],
-  oneofs=[
-  ],
-  serialized_start=322,
-  serialized_end=415,
-)
-
-_HEALTHCHECKRESPONSE.fields_by_name['status'].enum_type = _HEALTHCHECKRESPONSE_STATUS
-_HEALTHCHECKRESPONSE_STATUS.containing_type = _HEALTHCHECKRESPONSE
-DESCRIPTOR.message_types_by_name['OnlineActionRequest'] = _ONLINEACTIONREQUEST
-DESCRIPTOR.message_types_by_name['OnlineActionResponse'] = _ONLINEACTIONRESPONSE
-DESCRIPTOR.message_types_by_name['BatchActionRequest'] = _BATCHACTIONREQUEST
-DESCRIPTOR.message_types_by_name['BatchActionResponse'] = _BATCHACTIONRESPONSE
-DESCRIPTOR.message_types_by_name['ReloadRequest'] = _RELOADREQUEST
-DESCRIPTOR.message_types_by_name['ReloadResponse'] = _RELOADRESPONSE
-DESCRIPTOR.message_types_by_name['HealthCheckRequest'] = _HEALTHCHECKREQUEST
-DESCRIPTOR.message_types_by_name['HealthCheckResponse'] = _HEALTHCHECKRESPONSE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-OnlineActionRequest = _reflection.GeneratedProtocolMessageType('OnlineActionRequest', (_message.Message,), dict(
-  DESCRIPTOR = _ONLINEACTIONREQUEST,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:OnlineActionRequest)
-  ))
-_sym_db.RegisterMessage(OnlineActionRequest)
-
-OnlineActionResponse = _reflection.GeneratedProtocolMessageType('OnlineActionResponse', (_message.Message,), dict(
-  DESCRIPTOR = _ONLINEACTIONRESPONSE,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:OnlineActionResponse)
-  ))
-_sym_db.RegisterMessage(OnlineActionResponse)
-
-BatchActionRequest = _reflection.GeneratedProtocolMessageType('BatchActionRequest', (_message.Message,), dict(
-  DESCRIPTOR = _BATCHACTIONREQUEST,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:BatchActionRequest)
-  ))
-_sym_db.RegisterMessage(BatchActionRequest)
-
-BatchActionResponse = _reflection.GeneratedProtocolMessageType('BatchActionResponse', (_message.Message,), dict(
-  DESCRIPTOR = _BATCHACTIONRESPONSE,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:BatchActionResponse)
-  ))
-_sym_db.RegisterMessage(BatchActionResponse)
-
-ReloadRequest = _reflection.GeneratedProtocolMessageType('ReloadRequest', (_message.Message,), dict(
-  DESCRIPTOR = _RELOADREQUEST,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:ReloadRequest)
-  ))
-_sym_db.RegisterMessage(ReloadRequest)
-
-ReloadResponse = _reflection.GeneratedProtocolMessageType('ReloadResponse', (_message.Message,), dict(
-  DESCRIPTOR = _RELOADRESPONSE,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:ReloadResponse)
-  ))
-_sym_db.RegisterMessage(ReloadResponse)
-
-HealthCheckRequest = _reflection.GeneratedProtocolMessageType('HealthCheckRequest', (_message.Message,), dict(
-  DESCRIPTOR = _HEALTHCHECKREQUEST,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:HealthCheckRequest)
-  ))
-_sym_db.RegisterMessage(HealthCheckRequest)
-
-HealthCheckResponse = _reflection.GeneratedProtocolMessageType('HealthCheckResponse', (_message.Message,), dict(
-  DESCRIPTOR = _HEALTHCHECKRESPONSE,
-  __module__ = 'actions_pb2'
-  # @@protoc_insertion_point(class_scope:HealthCheckResponse)
-  ))
-_sym_db.RegisterMessage(HealthCheckResponse)
-
-
-
-_ONLINEACTIONHANDLER = _descriptor.ServiceDescriptor(
-  name='OnlineActionHandler',
-  full_name='OnlineActionHandler',
-  file=DESCRIPTOR,
-  index=0,
-  options=None,
-  serialized_start=418,
-  serialized_end=620,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='_remote_execute',
-    full_name='OnlineActionHandler._remote_execute',
-    index=0,
-    containing_service=None,
-    input_type=_ONLINEACTIONREQUEST,
-    output_type=_ONLINEACTIONRESPONSE,
-    options=None,
-  ),
-  _descriptor.MethodDescriptor(
-    name='_remote_reload',
-    full_name='OnlineActionHandler._remote_reload',
-    index=1,
-    containing_service=None,
-    input_type=_RELOADREQUEST,
-    output_type=_RELOADRESPONSE,
-    options=None,
-  ),
-  _descriptor.MethodDescriptor(
-    name='_health_check',
-    full_name='OnlineActionHandler._health_check',
-    index=2,
-    containing_service=None,
-    input_type=_HEALTHCHECKREQUEST,
-    output_type=_HEALTHCHECKRESPONSE,
-    options=None,
-  ),
-])
-_sym_db.RegisterServiceDescriptor(_ONLINEACTIONHANDLER)
-
-DESCRIPTOR.services_by_name['OnlineActionHandler'] = _ONLINEACTIONHANDLER
-
-
-_BATCHACTIONHANDLER = _descriptor.ServiceDescriptor(
-  name='BatchActionHandler',
-  full_name='BatchActionHandler',
-  file=DESCRIPTOR,
-  index=1,
-  options=None,
-  serialized_start=623,
-  serialized_end=822,
-  methods=[
-  _descriptor.MethodDescriptor(
-    name='_remote_execute',
-    full_name='BatchActionHandler._remote_execute',
-    index=0,
-    containing_service=None,
-    input_type=_BATCHACTIONREQUEST,
-    output_type=_BATCHACTIONRESPONSE,
-    options=None,
-  ),
-  _descriptor.MethodDescriptor(
-    name='_remote_reload',
-    full_name='BatchActionHandler._remote_reload',
-    index=1,
-    containing_service=None,
-    input_type=_RELOADREQUEST,
-    output_type=_RELOADRESPONSE,
-    options=None,
-  ),
-  _descriptor.MethodDescriptor(
-    name='_health_check',
-    full_name='BatchActionHandler._health_check',
-    index=2,
-    containing_service=None,
-    input_type=_HEALTHCHECKREQUEST,
-    output_type=_HEALTHCHECKRESPONSE,
-    options=None,
-  ),
-])
-_sym_db.RegisterServiceDescriptor(_BATCHACTIONHANDLER)
-
-DESCRIPTOR.services_by_name['BatchActionHandler'] = _BATCHACTIONHANDLER
-
-try:
-  # THESE ELEMENTS WILL BE DEPRECATED.
-  # Please use the generated *_pb2_grpc.py files instead.
-  import grpc
-  from grpc.beta import implementations as beta_implementations
-  from grpc.beta import interfaces as beta_interfaces
-  from grpc.framework.common import cardinality
-  from grpc.framework.interfaces.face import utilities as face_utilities
-
-
-  class OnlineActionHandlerStub(object):
-    # missing associated documentation comment in .proto file
-    pass
-
-    def __init__(self, channel):
-      """Constructor.
-
-      Args:
-        channel: A grpc.Channel.
-      """
-      self._remote_execute = channel.unary_unary(
-          '/OnlineActionHandler/_remote_execute',
-          request_serializer=OnlineActionRequest.SerializeToString,
-          response_deserializer=OnlineActionResponse.FromString,
-          )
-      self._remote_reload = channel.unary_unary(
-          '/OnlineActionHandler/_remote_reload',
-          request_serializer=ReloadRequest.SerializeToString,
-          response_deserializer=ReloadResponse.FromString,
-          )
-      self._health_check = channel.unary_unary(
-          '/OnlineActionHandler/_health_check',
-          request_serializer=HealthCheckRequest.SerializeToString,
-          response_deserializer=HealthCheckResponse.FromString,
-          )
-
-
-  class OnlineActionHandlerServicer(object):
-    # missing associated documentation comment in .proto file
-    pass
-
-    def _remote_execute(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-      context.set_details('Method not implemented!')
-      raise NotImplementedError('Method not implemented!')
-
-    def _remote_reload(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-      context.set_details('Method not implemented!')
-      raise NotImplementedError('Method not implemented!')
-
-    def _health_check(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-      context.set_details('Method not implemented!')
-      raise NotImplementedError('Method not implemented!')
-
-
-  def add_OnlineActionHandlerServicer_to_server(servicer, server):
-    rpc_method_handlers = {
-        '_remote_execute': grpc.unary_unary_rpc_method_handler(
-            servicer._remote_execute,
-            request_deserializer=OnlineActionRequest.FromString,
-            response_serializer=OnlineActionResponse.SerializeToString,
-        ),
-        '_remote_reload': grpc.unary_unary_rpc_method_handler(
-            servicer._remote_reload,
-            request_deserializer=ReloadRequest.FromString,
-            response_serializer=ReloadResponse.SerializeToString,
-        ),
-        '_health_check': grpc.unary_unary_rpc_method_handler(
-            servicer._health_check,
-            request_deserializer=HealthCheckRequest.FromString,
-            response_serializer=HealthCheckResponse.SerializeToString,
-        ),
-    }
-    generic_handler = grpc.method_handlers_generic_handler(
-        'OnlineActionHandler', rpc_method_handlers)
-    server.add_generic_rpc_handlers((generic_handler,))
-
-
-  class BatchActionHandlerStub(object):
-    # missing associated documentation comment in .proto file
-    pass
-
-    def __init__(self, channel):
-      """Constructor.
-
-      Args:
-        channel: A grpc.Channel.
-      """
-      self._remote_execute = channel.unary_unary(
-          '/BatchActionHandler/_remote_execute',
-          request_serializer=BatchActionRequest.SerializeToString,
-          response_deserializer=BatchActionResponse.FromString,
-          )
-      self._remote_reload = channel.unary_unary(
-          '/BatchActionHandler/_remote_reload',
-          request_serializer=ReloadRequest.SerializeToString,
-          response_deserializer=ReloadResponse.FromString,
-          )
-      self._health_check = channel.unary_unary(
-          '/BatchActionHandler/_health_check',
-          request_serializer=HealthCheckRequest.SerializeToString,
-          response_deserializer=HealthCheckResponse.FromString,
-          )
-
-
-  class BatchActionHandlerServicer(object):
-    # missing associated documentation comment in .proto file
-    pass
-
-    def _remote_execute(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-      context.set_details('Method not implemented!')
-      raise NotImplementedError('Method not implemented!')
-
-    def _remote_reload(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-      context.set_details('Method not implemented!')
-      raise NotImplementedError('Method not implemented!')
-
-    def _health_check(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-      context.set_details('Method not implemented!')
-      raise NotImplementedError('Method not implemented!')
-
-
-  def add_BatchActionHandlerServicer_to_server(servicer, server):
-    rpc_method_handlers = {
-        '_remote_execute': grpc.unary_unary_rpc_method_handler(
-            servicer._remote_execute,
-            request_deserializer=BatchActionRequest.FromString,
-            response_serializer=BatchActionResponse.SerializeToString,
-        ),
-        '_remote_reload': grpc.unary_unary_rpc_method_handler(
-            servicer._remote_reload,
-            request_deserializer=ReloadRequest.FromString,
-            response_serializer=ReloadResponse.SerializeToString,
-        ),
-        '_health_check': grpc.unary_unary_rpc_method_handler(
-            servicer._health_check,
-            request_deserializer=HealthCheckRequest.FromString,
-            response_serializer=HealthCheckResponse.SerializeToString,
-        ),
-    }
-    generic_handler = grpc.method_handlers_generic_handler(
-        'BatchActionHandler', rpc_method_handlers)
-    server.add_generic_rpc_handlers((generic_handler,))
-
-
-  class BetaOnlineActionHandlerServicer(object):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This class was generated
-    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
-    # missing associated documentation comment in .proto file
-    pass
-    def _remote_execute(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-    def _remote_reload(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-    def _health_check(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-
-
-  class BetaOnlineActionHandlerStub(object):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This class was generated
-    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
-    # missing associated documentation comment in .proto file
-    pass
-    def _remote_execute(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
-      # missing associated documentation comment in .proto file
-      pass
-      raise NotImplementedError()
-    _remote_execute.future = None
-    def _remote_reload(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
-      # missing associated documentation comment in .proto file
-      pass
-      raise NotImplementedError()
-    _remote_reload.future = None
-    def _health_check(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
-      # missing associated documentation comment in .proto file
-      pass
-      raise NotImplementedError()
-    _health_check.future = None
-
-
-  def beta_create_OnlineActionHandler_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This function was
-    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
-    request_deserializers = {
-      ('OnlineActionHandler', '_health_check'): HealthCheckRequest.FromString,
-      ('OnlineActionHandler', '_remote_execute'): OnlineActionRequest.FromString,
-      ('OnlineActionHandler', '_remote_reload'): ReloadRequest.FromString,
-    }
-    response_serializers = {
-      ('OnlineActionHandler', '_health_check'): HealthCheckResponse.SerializeToString,
-      ('OnlineActionHandler', '_remote_execute'): OnlineActionResponse.SerializeToString,
-      ('OnlineActionHandler', '_remote_reload'): ReloadResponse.SerializeToString,
-    }
-    method_implementations = {
-      ('OnlineActionHandler', '_health_check'): face_utilities.unary_unary_inline(servicer._health_check),
-      ('OnlineActionHandler', '_remote_execute'): face_utilities.unary_unary_inline(servicer._remote_execute),
-      ('OnlineActionHandler', '_remote_reload'): face_utilities.unary_unary_inline(servicer._remote_reload),
-    }
-    server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
-    return beta_implementations.server(method_implementations, options=server_options)
-
-
-  def beta_create_OnlineActionHandler_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This function was
-    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
-    request_serializers = {
-      ('OnlineActionHandler', '_health_check'): HealthCheckRequest.SerializeToString,
-      ('OnlineActionHandler', '_remote_execute'): OnlineActionRequest.SerializeToString,
-      ('OnlineActionHandler', '_remote_reload'): ReloadRequest.SerializeToString,
-    }
-    response_deserializers = {
-      ('OnlineActionHandler', '_health_check'): HealthCheckResponse.FromString,
-      ('OnlineActionHandler', '_remote_execute'): OnlineActionResponse.FromString,
-      ('OnlineActionHandler', '_remote_reload'): ReloadResponse.FromString,
-    }
-    cardinalities = {
-      '_health_check': cardinality.Cardinality.UNARY_UNARY,
-      '_remote_execute': cardinality.Cardinality.UNARY_UNARY,
-      '_remote_reload': cardinality.Cardinality.UNARY_UNARY,
-    }
-    stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
-    return beta_implementations.dynamic_stub(channel, 'OnlineActionHandler', cardinalities, options=stub_options)
-
-
-  class BetaBatchActionHandlerServicer(object):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This class was generated
-    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
-    # missing associated documentation comment in .proto file
-    pass
-    def _remote_execute(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-    def _remote_reload(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-    def _health_check(self, request, context):
-      # missing associated documentation comment in .proto file
-      pass
-      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
-
-
-  class BetaBatchActionHandlerStub(object):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This class was generated
-    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
-    # missing associated documentation comment in .proto file
-    pass
-    def _remote_execute(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
-      # missing associated documentation comment in .proto file
-      pass
-      raise NotImplementedError()
-    _remote_execute.future = None
-    def _remote_reload(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
-      # missing associated documentation comment in .proto file
-      pass
-      raise NotImplementedError()
-    _remote_reload.future = None
-    def _health_check(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
-      # missing associated documentation comment in .proto file
-      pass
-      raise NotImplementedError()
-    _health_check.future = None
-
-
-  def beta_create_BatchActionHandler_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This function was
-    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
-    request_deserializers = {
-      ('BatchActionHandler', '_health_check'): HealthCheckRequest.FromString,
-      ('BatchActionHandler', '_remote_execute'): BatchActionRequest.FromString,
-      ('BatchActionHandler', '_remote_reload'): ReloadRequest.FromString,
-    }
-    response_serializers = {
-      ('BatchActionHandler', '_health_check'): HealthCheckResponse.SerializeToString,
-      ('BatchActionHandler', '_remote_execute'): BatchActionResponse.SerializeToString,
-      ('BatchActionHandler', '_remote_reload'): ReloadResponse.SerializeToString,
-    }
-    method_implementations = {
-      ('BatchActionHandler', '_health_check'): face_utilities.unary_unary_inline(servicer._health_check),
-      ('BatchActionHandler', '_remote_execute'): face_utilities.unary_unary_inline(servicer._remote_execute),
-      ('BatchActionHandler', '_remote_reload'): face_utilities.unary_unary_inline(servicer._remote_reload),
-    }
-    server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
-    return beta_implementations.server(method_implementations, options=server_options)
-
-
-  def beta_create_BatchActionHandler_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
-    """The Beta API is deprecated for 0.15.0 and later.
-
-    It is recommended to use the GA API (classes and functions in this
-    file not marked beta) for all further purposes. This function was
-    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
-    request_serializers = {
-      ('BatchActionHandler', '_health_check'): HealthCheckRequest.SerializeToString,
-      ('BatchActionHandler', '_remote_execute'): BatchActionRequest.SerializeToString,
-      ('BatchActionHandler', '_remote_reload'): ReloadRequest.SerializeToString,
-    }
-    response_deserializers = {
-      ('BatchActionHandler', '_health_check'): HealthCheckResponse.FromString,
-      ('BatchActionHandler', '_remote_execute'): BatchActionResponse.FromString,
-      ('BatchActionHandler', '_remote_reload'): ReloadResponse.FromString,
-    }
-    cardinalities = {
-      '_health_check': cardinality.Cardinality.UNARY_UNARY,
-      '_remote_execute': cardinality.Cardinality.UNARY_UNARY,
-      '_remote_reload': cardinality.Cardinality.UNARY_UNARY,
-    }
-    stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
-    return beta_implementations.dynamic_stub(channel, 'BatchActionHandler', cardinalities, options=stub_options)
-except ImportError:
-  pass
-# @@protoc_insertion_point(module_scope)
diff --git a/python-toolbox/marvin_python_toolbox/engine_base/stubs/actions_pb2_grpc.py b/python-toolbox/marvin_python_toolbox/engine_base/stubs/actions_pb2_grpc.py
deleted file mode 100644
index 1821456..0000000
--- a/python-toolbox/marvin_python_toolbox/engine_base/stubs/actions_pb2_grpc.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-# from .actions_pb2 import * /python3
-# import actions_pb2 as actions__pb2 /python2
-from ..stubs import actions_pb2 as actions__pb2
-
-
-class OnlineActionHandlerStub(object):
-  # missing associated documentation comment in .proto file
-  pass
-
-  def __init__(self, channel):
-    """Constructor.
-
-    Args:
-      channel: A grpc.Channel.
-    """
-    self._remote_execute = channel.unary_unary(
-        '/OnlineActionHandler/_remote_execute',
-        request_serializer=actions__pb2.OnlineActionRequest.SerializeToString,
-        response_deserializer=actions__pb2.OnlineActionResponse.FromString,
-        )
-    self._remote_reload = channel.unary_unary(
-        '/OnlineActionHandler/_remote_reload',
-        request_serializer=actions__pb2.ReloadRequest.SerializeToString,
-        response_deserializer=actions__pb2.ReloadResponse.FromString,
-        )
-    self._health_check = channel.unary_unary(
-        '/OnlineActionHandler/_health_check',
-        request_serializer=actions__pb2.HealthCheckRequest.SerializeToString,
-        response_deserializer=actions__pb2.HealthCheckResponse.FromString,
-        )
-
-
-class OnlineActionHandlerServicer(object):
-  # missing associated documentation comment in .proto file
-  pass
-
-  def _remote_execute(self, request, context):
-    # missing associated documentation comment in .proto file
-    pass
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-  def _remote_reload(self, request, context):
-    # missing associated documentation comment in .proto file
-    pass
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-  def _health_check(self, request, context):
-    # missing associated documentation comment in .proto file
-    pass
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-
-def add_OnlineActionHandlerServicer_to_server(servicer, server):
-  rpc_method_handlers = {
-      '_remote_execute': grpc.unary_unary_rpc_method_handler(
-          servicer._remote_execute,
-          request_deserializer=actions__pb2.OnlineActionRequest.FromString,
-          response_serializer=actions__pb2.OnlineActionResponse.SerializeToString,
-      ),
-      '_remote_reload': grpc.unary_unary_rpc_method_handler(
-          servicer._remote_reload,
-          request_deserializer=actions__pb2.ReloadRequest.FromString,
-          response_serializer=actions__pb2.ReloadResponse.SerializeToString,
-      ),
-      '_health_check': grpc.unary_unary_rpc_method_handler(
-          servicer._health_check,
-          request_deserializer=actions__pb2.HealthCheckRequest.FromString,
-          response_serializer=actions__pb2.HealthCheckResponse.SerializeToString,
-      ),
-  }
-  generic_handler = grpc.method_handlers_generic_handler(
-      'OnlineActionHandler', rpc_method_handlers)
-  server.add_generic_rpc_handlers((generic_handler,))
-
-
-class BatchActionHandlerStub(object):
-  # missing associated documentation comment in .proto file
-  pass
-
-  def __init__(self, channel):
-    """Constructor.
-
-    Args:
-      channel: A grpc.Channel.
-    """
-    self._remote_execute = channel.unary_unary(
-        '/BatchActionHandler/_remote_execute',
-        request_serializer=actions__pb2.BatchActionRequest.SerializeToString,
-        response_deserializer=actions__pb2.BatchActionResponse.FromString,
-        )
-    self._remote_reload = channel.unary_unary(
-        '/BatchActionHandler/_remote_reload',
-        request_serializer=actions__pb2.ReloadRequest.SerializeToString,
-        response_deserializer=actions__pb2.ReloadResponse.FromString,
-        )
-    self._health_check = channel.unary_unary(
-        '/BatchActionHandler/_health_check',
-        request_serializer=actions__pb2.HealthCheckRequest.SerializeToString,
-        response_deserializer=actions__pb2.HealthCheckResponse.FromString,
-        )
-
-
-class BatchActionHandlerServicer(object):
-  # missing associated documentation comment in .proto file
-  pass
-
-  def _remote_execute(self, request, context):
-    # missing associated documentation comment in .proto file
-    pass
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-  def _remote_reload(self, request, context):
-    # missing associated documentation comment in .proto file
-    pass
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-  def _health_check(self, request, context):
-    # missing associated documentation comment in .proto file
-    pass
-    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
-    context.set_details('Method not implemented!')
-    raise NotImplementedError('Method not implemented!')
-
-
-def add_BatchActionHandlerServicer_to_server(servicer, server):
-  rpc_method_handlers = {
-      '_remote_execute': grpc.unary_unary_rpc_method_handler(
-          servicer._remote_execute,
-          request_deserializer=actions__pb2.BatchActionRequest.FromString,
-          response_serializer=actions__pb2.BatchActionResponse.SerializeToString,
-      ),
-      '_remote_reload': grpc.unary_unary_rpc_method_handler(
-          servicer._remote_reload,
-          request_deserializer=actions__pb2.ReloadRequest.FromString,
-          response_serializer=actions__pb2.ReloadResponse.SerializeToString,
-      ),
-      '_health_check': grpc.unary_unary_rpc_method_handler(
-          servicer._health_check,
-          request_deserializer=actions__pb2.HealthCheckRequest.FromString,
-          response_serializer=actions__pb2.HealthCheckResponse.SerializeToString,
-      ),
-  }
-  generic_handler = grpc.method_handlers_generic_handler(
-      'BatchActionHandler', rpc_method_handlers)
-  server.add_generic_rpc_handlers((generic_handler,))
diff --git a/python-toolbox/marvin_python_toolbox/extras/marvin_bash_completion b/python-toolbox/marvin_python_toolbox/extras/marvin_bash_completion
deleted file mode 100644
index 4ab247b..0000000
--- a/python-toolbox/marvin_python_toolbox/extras/marvin_bash_completion
+++ /dev/null
@@ -1,8 +0,0 @@
-_marvin_completion() {
-    COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \
-                   COMP_CWORD=$COMP_CWORD \
-                   _MARVIN_COMPLETE=complete $1 ) )
-    return 0
-}
-
-complete -F _marvin_completion -o default marvin;
diff --git a/python-toolbox/marvin_python_toolbox/loader.py b/python-toolbox/marvin_python_toolbox/loader.py
deleted file mode 100644
index e82be7a..0000000
--- a/python-toolbox/marvin_python_toolbox/loader.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import imp
-from inspect import getmembers
-import click
-
-
-def load_commands_from_file(path):
-    module = imp.load_source('custom_commands', path)
-    commands = [obj for name, obj in getmembers(module) if isinstance(obj, click.core.Command)]
-    return commands
diff --git a/python-toolbox/marvin_python_toolbox/manage.py b/python-toolbox/marvin_python_toolbox/manage.py
deleted file mode 100644
index 753794f..0000000
--- a/python-toolbox/marvin_python_toolbox/manage.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from .management import *
diff --git a/python-toolbox/marvin_python_toolbox/management/__init__.py b/python-toolbox/marvin_python_toolbox/management/__init__.py
deleted file mode 100644
index 3b80667..0000000
--- a/python-toolbox/marvin_python_toolbox/management/__init__.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-
-import os
-import click
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from .pkg import cli as cli_pkg
-from .test import cli as cli_test
-from .notebook import cli as cli_notebook
-from .hive import cli as cli_hive
-from .engine import cli as cli_engine
-from .bigquery import cli as cli_bigquery
-
-from ..config import parse_ini
-from ..loader import load_commands_from_file
-
-
-__all__ = ['create_cli']
-
-
-logger = get_logger('management')
-
-TOOL_EXCLUDE = ['engine-server', 'engine-dryrun', 'engine-httpserver', 'engine-grpcserver', 'engine-deploy', 'engine-httpserver-remote', 'pkg-showversion', 'bigquery-dataimport']
-PROD_EXCLUDE = ['test', 'test-tdd', 'test-tox', 'test-checkpep8', 'lab', 'notebook', 'pkg-bumpversion', 'pkg-createtag', 'pkg-showchanges', 'pkg-showinfo', 'pkg-updatedeps']
-
-EXCLUDE_BY_TYPE = {
-    'python-engine': ['engine-generate', 'engine-generateenv'],
-    'tool': TOOL_EXCLUDE
-}
-
-
-VERSION_MSG = '''
-  __  __            _____ __      __ _____  _   _       
- |  \/  |    /\    |  __ \\\ \    / /|_   _|| \ | |
- | \  / |   /  \   | |__) |\ \  / /   | |  |  \| | 
- | |\/| |  / /\ \  |  _  /  \ \/ /    | |  | . ` | 
- | |  | | / ____ \ | | \ \   \  /    _| |_ | |\  | 
- |_|  |_|/_/    \_\|_|  \_\   \/    |_____||_| \_| 
-            _    _             _                 _  _                                                              
-           | |  | |           | |               | || |                                                             
-           | |_ | |__    ___  | |_  ___    ___  | || |__    ___ __  __                                             
-           | __|| '_ \  / _ \ | __|/ _ \  / _ \ | || '_ \  / _ \\ \/ /                                             
-  _  _  _  | |_ | | | ||  __/ | |_| (_) || (_) || || |_) || (_) |>  <                                              
- (_)(_)(_)  \__||_| |_| \___|  \__|\___/  \___/ |_||_.__/  \___//_/\_\ v%(version)s
-'''
-
-
-def create_cli(package_name, package_path, type_=None, exclude=None, config=None):
-    base_path = os.path.abspath(os.path.join(package_path, '..'))
-
-    if exclude is None:
-        exclude = EXCLUDE_BY_TYPE.get(type_, [])
-
-    mode_file = os.path.join(base_path, '.dev')
-    if type_ == 'tool' and not os.path.exists(mode_file):
-        exclude = exclude + PROD_EXCLUDE
-
-    if config is None:
-        # Find the ini directory
-        inifilename = 'marvin.ini'
-        inidir = base_path
-
-        # Load the ini file
-        inipath = os.path.join(inidir, inifilename)
-        config_defaults = {
-            'inidir': inidir,
-            'marvin_packagedir': '{inidir}/{marvin_package}',
-        }
-        if os.path.exists(inipath):
-            config = parse_ini(inipath, config_defaults)
-        else:
-            config = {}
-
-    exclude = config.get('marvin_exclude', ','.join(exclude))
-    if isinstance(exclude, str):
-        exclude = exclude.split(',')
-
-    @click.group('custom')
-    @click.option('--debug', is_flag=True, help='Enable debug mode.')
-    @click.pass_context
-    def cli(ctx, debug):
-        ctx.obj = {
-            'debug': debug,
-            'package_name': package_name,
-            'package_path': package_path,
-            'base_path': base_path,
-            'type': type_,
-            'config': config,
-        }
-
-    # Load internal commands
-    commands = {}
-    commands.update(cli_bigquery.commands)
-    commands.update(cli_pkg.commands)
-    commands.update(cli_test.commands)
-    commands.update(cli_notebook.commands)
-    commands.update(cli_engine.commands)
-    commands.update(cli_hive.commands)
-
-    for name, command in commands.items():
-        if name not in exclude:
-            cli.add_command(command, name=name)
-
-    # Load custom commands from project been managed
-    commands_file_paths = [
-        config.get('marvin_commandsfile'),
-        os.path.join(base_path, 'marvin_commands.py'),
-        os.path.join(base_path, 'commands.py')
-    ]
-
-    for commands_file_path in commands_file_paths:
-        if commands_file_path and os.path.exists(commands_file_path):
-            commands = load_commands_from_file(commands_file_path)
-            for command in commands:
-                cli.add_command(command)
-            break
-
-    # Add version and help messages
-    from .. import __version__
-    cli = click.version_option(version=__version__,
-                               message=VERSION_MSG.replace('\n', '\n  '))(cli)
-
-    cli.help = '\b{}\n'.format(VERSION_MSG % {'version': __version__})
-
-    return cli
diff --git a/python-toolbox/marvin_python_toolbox/management/bigquery.py b/python-toolbox/marvin_python_toolbox/management/bigquery.py
deleted file mode 100644
index b18a7b8..0000000
--- a/python-toolbox/marvin_python_toolbox/management/bigquery.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-import click
-import time
-import os
-import json
-import pandas as pd
-from google.cloud import bigquery
-import hashlib
-
-from .._logging import get_logger
-
-from .._compatibility import six
-
-
-logger = get_logger('management.bigquery')
-
-
-@click.group('bigquery')
-def cli():
-    pass
-
-def read_file(filename):
-    fname = os.path.join("", filename)
-    if os.path.exists(fname):
-
-        print("Engine file {} loaded!".format(filename))
-
-        with open(fname, 'r') as fp:
-            return json.load(fp)
-    else:
-        print("Engine file {} doesn't exists...".format(filename))
-        return {}
-
-@cli.command(
-    'bigquery-dataimport',
-    help='Import data samples from a BigQuery database.')
-@click.option('--dryrun', '-d', is_flag=True, help='If it must run just to estimate costs')
-@click.option('--max_billed', '-m', default=10, help='Max bytes to be billed in the queries')
-@click.option('--metadata-file', '-mf', default='engine.metadata', help='Marvin engine metadata file path', type=click.Path(exists=True))
-@click.pass_context
-def bigquery_dataimport_cli(ctx, dryrun, max_billed, metadata_file):
-    bigquery_dataimport(ctx, metadata_file, dryrun, max_billed)
-
-def bigquery_dataimport(ctx, metadata_file, dryrun, max_billed):
-
-    initial_start_time = time.time()
-
-    metadata = read_file(metadata_file)
-
-    if metadata:
-        print(chr(27) + "[2J")
-        
-        data_path = os.environ['MARVIN_DATA_PATH']
-        path_csvs = data_path + '/bigquery-' + metadata['bigquery_project']
-        
-        if not dryrun:
-            os.mkdir(path_csvs)
-        
-        for query, file in zip(metadata['bigquery_queries'], metadata['bigquery_csvfiles']):
-            
-            print("project: {} query: {} file: {}".format(metadata['bigquery_project'], query, file))
-            
-            bdi = BigQueryImporter(
-                        project=metadata['bigquery_project'],
-                        sql=query,
-                        file=file,
-                        # transform max_billed in bytes
-                        max_billed=max_billed * 1073741824,
-                        path_csv = path_csvs
-                        
-                    )
-            if dryrun:
-                bdi.dryrun()
-            else:
-                bdi.query()
-                
-        print("Total Time : {:.2f}s".format(time.time() - initial_start_time))
-
-        print("\n")
-
-
-def read_config(filename):
-    fname = os.path.join("", filename)
-    if os.path.exists(fname):
-        with open(fname, 'r') as fp:
-            return json.load(fp)[0]
-    else:
-        print("Configuration file {} doesn't exists...".format(filename))
-        return {}
-
-
-class BigQueryImporter():
-    def __init__(self, project, sql, file, max_billed, path_csv):
-        self.project = project
-        self.sql = sql
-        self.file = file
-        self.max_billed = max_billed
-        self.path_csv = path_csv
-
-    def query(self):
-        job_config = bigquery.QueryJobConfig()
-        job_config.use_query_cache = False
-        job_config.maximum_bytes_billed=self.max_billed
-        client = bigquery.Client(project=self.project)
-        query_job = client.query(self.sql,
-                                 job_config=job_config)
-        dataframe = query_job.to_dataframe()
-        dataframe.to_csv(self.path_csv + '/' + self.file, index=False)
-        
-    def dryrun(self):
-        job_config = bigquery.QueryJobConfig()
-        job_config.use_query_cache = False
-        job_config.dry_run = True
-        job_config.maximum_bytes_billed=self.max_billed
-        client = bigquery.Client(project=self.project)
-        query_job = client.query(self.sql,
-                                 job_config=job_config)
-        
-        assert query_job.state == "DONE"
-        assert query_job.dry_run
-        
-        print("The query: {}\nWill process {} Gb.\n".format(self.sql, query_job.total_bytes_processed / 1073741824))
diff --git a/python-toolbox/marvin_python_toolbox/management/engine.py b/python-toolbox/marvin_python_toolbox/management/engine.py
deleted file mode 100644
index 9dec2cc..0000000
--- a/python-toolbox/marvin_python_toolbox/management/engine.py
+++ /dev/null
@@ -1,798 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-
-import click
-import json
-import os
-import sys
-import time
-import os.path
-import re
-import shutil
-import subprocess
-import jinja2
-import six
-from unidecode import unidecode
-import multiprocessing
-from marvin_python_toolbox.common.profiling import profiling
-from marvin_python_toolbox.common.data import MarvinData
-from marvin_python_toolbox.common.config import Config, load_conf_from_file
-from .._compatibility import iteritems
-from .._logging import get_logger
-
-
-logger = get_logger('management.engine')
-
-
-@click.group('engine')
-def cli():
-    pass
-
-
-@cli.command('engine-dryrun', help='Marvin Dryrun Utility - Run marvin engines in a stadalone way')
-@click.option(
-    '--action',
-    '-a',
-    default='all',
-    type=click.Choice(['all', 'acquisitor', 'tpreparator', 'trainer', 'evaluator', 'ppreparator', 'predictor', 'feedback']),
-    help='Marvin engine action name')
-@click.option('--initial-dataset', '-id', help='Initial dataset file path', type=click.Path(exists=True))
-@click.option('--dataset', '-d', help='Dataset file path', type=click.Path(exists=True))
-@click.option('--model', '-m', help='Engine model file path', type=click.Path(exists=True))
-@click.option('--metrics', '-me', help='Engine Metrics file path', type=click.Path(exists=True))
-@click.option('--params-file', '-pf', default='engine.params', help='Marvin engine params file path', type=click.Path(exists=True))
-@click.option('--messages-file', '-mf', default='engine.messages', help='Marvin engine predictor input messages file path', type=click.Path(exists=True))
-@click.option('--feedback-file', '-ff', default='feedback.messages', help='Marvin engine feedback input messages file path', type=click.Path(exists=True))
-@click.option('--response', '-r', default=True, is_flag=True, help='If enable, print responses from engine online actions (ppreparator and predictor)')
-@click.option('--profiling', default=False, is_flag=True, help='Enable execute method profiling')
-@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
-@click.pass_context
-def dryrun_cli(ctx, action, params_file, messages_file, feedback_file, initial_dataset, dataset, model, metrics, response, spark_conf, profiling):
-    dryrun(ctx, action, params_file, messages_file, feedback_file, initial_dataset, dataset, model, metrics, response, spark_conf, profiling)
-
-
-def dryrun(ctx, action, params_file, messages_file, feedback_file, initial_dataset, dataset, model, metrics, response, spark_conf, profiling):
-
-    print(chr(27) + "[2J")
-
-    # setting spark configuration directory
-    os.environ["SPARK_CONF_DIR"] = spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")
-    os.environ["YARN_CONF_DIR"] = os.environ["SPARK_CONF_DIR"]
-
-    params = read_file(params_file)
-    messages_file = read_file(messages_file)
-    feedback_file = read_file(feedback_file)
-
-    if action in ['all', 'ppreparator', 'predictor'] and not messages_file:
-        print('Please, set the input message to be used by the dry run process. Use --messages-file flag to informe in a json valid form.')
-        sys.exit("Stoping process!")
-
-    if action in ['all', 'feedback'] and not feedback_file:
-        print('Please, set the feedback input message to be used by the dry run process. Use --feedback-file flag to informe in a json valid form.')
-        sys.exit("Stoping process!")
-
-    if action == 'all':
-        pipeline = ['acquisitor', 'tpreparator', 'trainer', 'evaluator', 'ppreparator', 'predictor', 'feedback']
-    else:
-        pipeline = [action]
-
-    _dryrun = MarvinDryRun(ctx=ctx, messages=[messages_file, feedback_file], print_response=response)
-
-    initial_start_time = time.time()
-
-    for step in pipeline:
-        _dryrun.execute(clazz=CLAZZES[step], params=params, initial_dataset=initial_dataset, dataset=dataset, model=model, metrics=metrics,
-                        profiling_enabled=profiling)
-
-    print("Total Time : {:.2f}s".format(time.time() - initial_start_time))
-
-    print("\n")
-
-
-CLAZZES = {
-    "acquisitor": "AcquisitorAndCleaner",
-    "tpreparator": "TrainingPreparator",
-    "trainer": "Trainer",
-    "evaluator": "MetricsEvaluator",
-    "ppreparator": "PredictionPreparator",
-    "predictor": "Predictor",
-    "feedback": "Feedback"
-}
-
-
-class MarvinDryRun(object):
-    def __init__(self, ctx, messages, print_response):
-        self.predictor_messages = messages[0]
-        self.feedback_messages = messages[1]
-        self.pmessages = []
-        self.package_name = ctx.obj['package_name']
-        self.kwargs = None
-        self.print_response = print_response
-
-    def execute(self, clazz, params, initial_dataset, dataset, model, metrics, profiling_enabled=False):
-        self.print_start_step(clazz)
-
-        _Step = dynamic_import("{}.{}".format(self.package_name, clazz))
-
-        if not self.kwargs:
-            self.kwargs = generate_kwargs(_Step, params, initial_dataset, dataset, model, metrics)
-
-        step = _Step(**self.kwargs)
-
-        def call_online_actions(step, msg, msg_idx):
-            def print_message(result):
-                try:
-                    print(json.dumps(result, indent=4, sort_keys=True))
-                except TypeError:
-                    print("Unable to serialize the object returned!")
-
-            if self.print_response:
-                    print("\nMessage {} :\n".format(msg_idx))
-                    print_message(msg)
-
-            if profiling_enabled:
-                with profiling(output_path=".profiling", uid=clazz) as prof:
-                    result = step.execute(input_message=msg, params=params)
-
-                prof.disable
-                print("\nProfile images created in {}\n".format(prof.image_path))
-
-            else:
-                result = step.execute(input_message=msg, params=params)
-
-            if self.print_response:
-                print("\nResult for Message {} :\n".format(msg_idx))
-                print_message(result)
-
-            return result
-
-        if clazz == 'PredictionPreparator':
-            for idx, msg in enumerate(self.predictor_messages):
-                self.pmessages.append(call_online_actions(step, msg, idx))
-
-        elif clazz == 'Feedback':
-            for idx, msg in enumerate(self.feedback_messages):
-                self.pmessages.append(call_online_actions(step, msg, idx))
-
-        elif clazz == 'Predictor':
-
-            self.execute("PredictionPreparator", params, initial_dataset, dataset, model, metrics)
-
-            self.pmessages = self.messages if not self.pmessages else self.pmessages
-
-            for idx, msg in enumerate(self.pmessages):
-                call_online_actions(step, msg, idx)
-
-        else:
-            if profiling_enabled:
-                with profiling(output_path=".profiling", uid=clazz) as prof:
-                    step.execute(params=params)
-
-                prof.disable
-
-                print("\nProfile images created in {}\n".format(prof.image_path))
-
-            else:
-                step.execute(params=params)
-
-        self.print_finish_step()
-
-    def print_finish_step(self):
-        print("\n                                               STEP TAKES {:.4f} (seconds) ".format((time.time() - self.start_time)))
-
-    def print_start_step(self, name):
-        print("\n------------------------------------------------------------------------------")
-        print("MARVIN DRYRUN - STEP [{}]".format(name))
-        print("------------------------------------------------------------------------------\n")
-        self.start_time = time.time()
-
-
-def dynamic_import(clazz):
-    components = clazz.split('.')
-    mod = __import__(components[0])
-    for comp in components[1:]:
-        mod = getattr(mod, comp)
-    return mod
-
-
-def read_file(filename):
-    fname = os.path.join("", filename)
-    if os.path.exists(fname):
-
-        print("Engine file {} loaded!".format(filename))
-
-        with open(fname, 'r') as fp:
-            return json.load(fp)
-    else:
-        print("Engine file {} doesn't exists...".format(filename))
-        return {}
-
-
-def generate_kwargs(clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None):
-    kwargs = {}
-
-    if params:
-        kwargs["params"] = params
-    if dataset:
-        kwargs["dataset"] = clazz.retrieve_obj(dataset)
-    if initial_dataset:
-        kwargs["initial_dataset"] = clazz.retrieve_obj(initial_dataset)
-    if model:
-        kwargs["model"] = clazz.retrieve_obj(model)
-    if metrics:
-        kwargs["metrics"] = clazz.retrieve_obj(metrics)
-
-    kwargs["persistence_mode"] = 'local'
-    kwargs["default_root_path"] = os.path.join(os.getenv('MARVIN_DATA_PATH'), '.artifacts')
-    kwargs["is_remote_calling"] = True
-
-    return kwargs
-
-
-class MarvinEngineServer(object):
-    @classmethod
-    def create(self, ctx, action, port, workers, rpc_workers, params, initial_dataset, dataset, model, metrics, pipeline):
-        package_name = ctx.obj['package_name']
-
-        def create_object(act):
-            clazz = CLAZZES[act]
-            _Action = dynamic_import("{}.{}".format(package_name, clazz))
-            kwargs = generate_kwargs(_Action, params, initial_dataset, dataset, model, metrics)
-            return _Action(**kwargs)
-
-        root_obj = create_object(action)
-        previous_object = root_obj
-
-        if pipeline:
-            for step in list(reversed(pipeline)):
-                previous_object._previous_step = create_object(step)
-                previous_object = previous_object._previous_step
-
-        server = root_obj._prepare_remote_server(port=port, workers=workers, rpc_workers=rpc_workers)
-
-        print("Starting GRPC server [{}] for {} Action".format(port, action))
-        server.start()
-
-        return server
-
-
-@cli.command('engine-grpcserver', help='Marvin gRPC engine action server starts')
-@click.option(
-    '--action',
-    '-a',
-    default='all',
-    type=click.Choice(['all', 'acquisitor', 'tpreparator', 'trainer', 'evaluator', 'predictor', 'feedback']),
-    help='Marvin engine action name')
-@click.option('--initial-dataset', '-id', help='Initial dataset file path', type=click.Path(exists=True))
-@click.option('--dataset', '-d', help='Dataset file path', type=click.Path(exists=True))
-@click.option('--model', '-m', help='Engine model file path', type=click.Path(exists=True))
-@click.option('--metrics', '-me', help='Engine Metrics file path', type=click.Path(exists=True))
-@click.option('--params-file', '-pf', default='engine.params', help='Marvin engine params file path', type=click.Path(exists=True))
-@click.option('--metadata-file', '-mf', default='engine.metadata', help='Marvin engine metadata file path', type=click.Path(exists=True))
-@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration path to be used')
-@click.option('--max-workers', '-w', default=multiprocessing.cpu_count(), help='Max number of grpc threads workers per action')
-@click.option('--max-rpc-workers', '-rw', default=multiprocessing.cpu_count(), help='Max number of grpc workers per action')
-@click.pass_context
-def engine_server(ctx, action, params_file, metadata_file, initial_dataset, dataset, model, metrics, spark_conf, max_workers, max_rpc_workers):
-
-    print("Starting server ...")
-
-    # setting spark configuration directory
-    os.environ["SPARK_CONF_DIR"] = spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")
-    os.environ["YARN_CONF_DIR"] = os.environ["SPARK_CONF_DIR"]
-
-    params = read_file(params_file)
-    metadata = read_file(metadata_file)
-    default_actions = {action['name']: action for action in metadata['actions']}
-
-    if action == 'all':
-        action = default_actions
-    else:
-        action = {action: default_actions[action]}
-
-    servers = []
-    for action_name in action.keys():
-        # initializing server configuration
-        engine_server = MarvinEngineServer.create(
-            ctx=ctx,
-            action=action_name,
-            port=action[action_name]["port"],
-            workers=max_workers,
-            rpc_workers=max_rpc_workers,
-            params=params,
-            initial_dataset=initial_dataset,
-            dataset=dataset,
-            model=model,
-            metrics=metrics,
-            pipeline=action[action_name]["pipeline"]
-        )
-
-        servers.append(engine_server)
-
-    try:
-        while True:
-            time.sleep(100)
-
-    except KeyboardInterrupt:
-        print("Terminating server ...")
-        for server in servers:
-            server.stop(0)
-
-
-TEMPLATE_BASES = {
-    'python-engine': os.path.join(os.path.dirname(__file__), 'templates', 'python-engine'),
-    'automl-engine': os.path.join(os.path.dirname(__file__), 'templates', 'python-engine'),
-}
-
-RENAME_DIRS = [
-    ('project_package', '{{project.package}}'),
-]
-
-IGNORE_DIRS = [
-    # Ignore service internal templates
-    'templates'
-]
-
-
-_orig_type = type
-
-def _get_package_name(package,type_):
-    # Make sure package name starts with "marvin"
-    if not package.startswith('marvin'):
-        package = 'marvin_{}'.format(package)
-
-    # Remove "lib" prefix from package name
-    if type_ == 'lib' and package.endswith('lib'):
-        package = package[:-3]
-    # Custom strip to remove underscores
-    package = package.strip('_')
-
-    # Append project type to services
-
-    if type_ in TEMPLATE_BASES and not package.endswith('engine'):
-        package = '{}_engine'.format(package)
-
-    return package
-
-def _get_dir(name,package,type_):
-    # Process directory/virtualenv name
-
-    # Directory name should use '-' instead of '_'
-    dir_ = package.replace('_', '-')
-
-    # Remove "marvin" prefix from directory
-    if dir_.startswith('marvin'):
-        dir_ = dir_[6:]
-    dir_ = dir_.strip('-')
-
-    # Append "lib" to directory name if creating a lib
-    if type_ == 'lib' and not dir_.endswith('lib'):
-        dir_ = '{}-lib'.format(dir_)
-    
-    return dir_
-
-@cli.command('engine-generateenv', help='Generate a new marvin engine environment and install default requirements.')
-@click.argument('engine-path', type=click.Path(exists=True))
-@click.option('--python', '-p', default='python', help='The Python interpreter to use to create the new environment')
-def generate_env(engine_path, python):
-    engine_type = load_conf_from_file(engine_path + '/marvin.ini').get('type')
-    dir_ = os.path.basename(os.path.abspath(engine_path))
-    venv_name = _create_virtual_env(dir_, engine_path, python)
-    _call_make_env(venv_name, engine_type)
-
-    print('\nDone!!!!')
-    print('Now to workon in the new engine project use: workon {}'.format(venv_name))
-
-
-@cli.command('engine-generate', help='Generate a new marvin engine project and install default requirements.')
-@click.option('--name', '-n', prompt='Project name', help='Project name')
-@click.option('--description', '-d', prompt='Short description', default='Marvin engine', help='Library short description')
-@click.option('--mantainer', '-m', prompt='Mantainer name', default='Marvin AI Community', help='Mantainer name')
-@click.option('--email', '-e', prompt='Mantainer email', default='dev@marvin.apache.org', help='Mantainer email')
-@click.option('--package', '-p', default='', help='Package name')
-@click.option('--dest', '-d', envvar='MARVIN_HOME', type=click.Path(exists=True), help='Root folder path for the creation')
-@click.option('--no-env', is_flag=True, default=False, help='Don\'t create the virtual enviroment')
-@click.option('--no-git', is_flag=True, default=False, help='Don\'t initialize the git repository')
-@click.option('--automl', '-aml', default='n' ,prompt='Use AutoML?: ', type=click.Choice(['y','n']))
-@click.option('--python', '-py', default='python', help='The Python interpreter to use to create the new environment')
-def generate(name, description, mantainer, email, package, dest, no_env, no_git, automl, python):
-    type_ = 'python-engine'
-    type = _orig_type
-
-
-    # Check if package should be automl
-    if automl == 'y':
-        type_ = 'automl-engine'
-        
-    # Process package name
-    package = _slugify(package or name)
-    package = _get_package_name(package,type_)
-
-    # Process dir name
-    dir_ = _get_dir(name,package,type_)
-
-    # Get dest name
-    dest = os.path.join(dest, dir_)
-
-    if type_ not in TEMPLATE_BASES:
-        print('[ERROR] Could not found template files for "{type}".'.format(type=type_))
-        sys.exit(1)
-
-    project = {
-        'name': _slugify(name),
-        'description': description,
-        'package': package,
-        'toolbox_version': os.getenv('TOOLBOX_VERSION'),
-        'type': type_
-    }
-
-    mantainer = {
-        'name': mantainer,
-        'email': email,
-    }
-
-    context = {
-        'project': project,
-        'mantainer': mantainer,
-    }
-
-    folder_created = False
-
-    try:
-        _copy_scaffold_structure(TEMPLATE_BASES[type_], dest)
-
-        folder_created = True
-
-        _copy_processed_files(TEMPLATE_BASES[type_], dest, context)
-        _rename_dirs(dest, RENAME_DIRS, context)
-        _make_data_link(dest)
-
-        venv_name = None
-        if not no_env:
-            venv_name = _create_virtual_env(dir_, dest, python)
-            _call_make_env(venv_name, type_)
-
-        if not no_git:
-            _call_git_init(dest)
-
-        print('\nDone!!!!')
-
-        if not no_env:
-            print('Now to workon in the new engine project use: workon {}'.format(venv_name))
-
-    except Exception as e:
-        logger.info(e)
-        print("\nAlready exists a engine project with this name!")
-        # remove project if created
-        if os.path.exists(dest) and folder_created:
-            shutil.rmtree(dest)
-
-
-
-@cli.command('engine-delete', help='Delete an existing marvin engine project.')
-@click.option('--name', '-n', prompt='Project name', help='Project name')
-@click.option('--package', '-p', default='', help='Package name')
-@click.option('--dest', '-d', envvar='MARVIN_HOME', type=click.Path(exists=True), help='Root folder path for the creation')
-def delete(name,dest,package):
-    type_ = 'python-engine'
-   
-    # Process package name
-    package = _slugify(package or name)
-    package = _get_package_name(package,type_)
-
-    # Process dir name
-    dir_ = _get_dir(name,package,type_)
-
-    # Get dest name
-    dest = os.path.join(dest, dir_)
-
-    # Delete virtualenv 
-    venv_name = _delete_virtual_env(dir_)
-
-    try:
-        shutil.rmtree(dest)
-        print('\nDone!!!!')
-    except Exception as e:
-        logger.info(e)
-        print("\nCan't find an existing engine project with this name!")
-
-    
-
-_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
-
-
-def _slugify(text, delim='_'):
-    result = []
-    for word in _punct_re.split(text.lower()):
-        result.extend(unidecode(word).split())
-    return six.u(delim.join(result))
-
-
-def _copy_scaffold_structure(src, dest):
-    os.mkdir(dest)
-
-    for root, dirs, files in os.walk(src):
-        for dir_ in dirs:
-            dirname = os.path.join(root, dir_)
-            dirname = '{dest}{dirname}'.format(dest=dest, dirname=dirname.replace(src, ''))  # get dirname without source path
-
-            os.mkdir(dirname)
-
-
-def _copy_processed_files(src, dest, context):
-    env = jinja2.Environment(loader=jinja2.FileSystemLoader(src))
-
-    print('Processing template files...')
-
-    for root, dirs, files in os.walk(src):
-
-        dirname = root.replace(src, '')[1:]  # get dirname without source path
-        to_dirname = os.path.join(dest, dirname)
-
-        should_process = not any(root.startswith(dir_) for dir_ in IGNORE_DIRS)
-
-        for file in files:
-
-            # Ignore trash
-            if file == '.DS_Store' or file.endswith('.pyc'):
-                continue
-
-            from_ = os.path.join(dirname, file)
-            to_ = os.path.join(to_dirname, file)
-
-            print('Copying "{0}" to "{1}"...'.format(from_, to_))
-
-            if not should_process:
-                shutil.copy(os.path.join(src, from_), to_)
-            else:
-                template = env.get_template(from_)
-                output = template.render(**context)
-
-                with open(to_, 'w') as file:
-                    file.write(output)
-
-
-def _rename_dirs(base, dirs, context):
-    for dir_ in dirs:
-        dirname, template = dir_
-        oldname = os.path.join(base, dirname)
-
-        processed = jinja2.Template(template).render(**context)
-        newname = os.path.join(base, processed)
-
-        shutil.move(oldname, newname)
-
-        print('Renaming {0} as {1}'.format(oldname, newname))
-
-
-def _create_virtual_env(name, dest, python):
-    venv_name = '{}-env'.format(name).replace('_', '-')
-    print('Creating virtualenv: {0}...'.format(venv_name))
-
-    command = ['bash', '-c', '. virtualenvwrapper.sh; mkvirtualenv -p {0} -a {1} {2};'.format(python, dest, venv_name)]
-
-    try:
-        result = subprocess.Popen(command, env=os.environ).wait()
-
-        if result > 0:
-            sys.exit(1)
-
-    except:
-        logger.exception('Could not create the virtualenv!')
-        sys.exit(1)
-
-    return venv_name
-
-
-def _delete_virtual_env(name):
-    venv_name = '{}-env'.format(name).replace('_', '-')
-    print('Deleting virtualenv: {0}...'.format(venv_name))
-
-    command = ['bash', '-c', '. virtualenvwrapper.sh; rmvirtualenv {0};'.format(venv_name)]
-
-    try:
-        result = subprocess.Popen(command, env=os.environ).wait()
-
-        if result > 0:
-            sys.exit(1)
-
-    except:
-        logger.exception('Could not delete the virtualenv!')
-        sys.exit(1)
-
-    return venv_name
-
-
-def _call_make_env(venv_name, engine):
-    if engine == 'automl-engine':
-        command = ['bash', '-c', '. virtualenvwrapper.sh; workon {}; make marvin-automl'.format(venv_name)]
-    else:
-        command = ['bash', '-c', '. virtualenvwrapper.sh; workon {}; make marvin'.format(venv_name)]
-
-    try:
-        subprocess.Popen(command, env=os.environ).wait()
-    except:
-        logger.exception('Could not call make marvin!')
-        sys.exit(1)
-
-
-def _call_git_init(dest):
-    command = ['bash', '-c', '/usr/bin/git init {0}'.format(dest)]
-    print('Initializing git repository...')
-    try:
-        subprocess.Popen(command, env=os.environ).wait()
-    except OSError:
-        print('WARNING: Could not initialize repository!')
-
-
-def _make_data_link(dest):
-    data_path = os.environ['MARVIN_DATA_PATH']
-    data_link = os.path.join(dest, 'notebooks/data')
-    os.symlink(data_path, data_link)
-
-
-@cli.command('engine-httpserver', help='Marvin http api server starts')
-@click.option(
-    '--action',
-    '-a',
-    default='all',
-    type=click.Choice(['all', 'acquisitor', 'tpreparator', 'trainer', 'evaluator', 'ppreparator', 'predictor', 'feedback']),
-    help='Marvin engine action name')
-@click.option('--initial-dataset', '-id', help='Initial dataset file path', type=click.Path(exists=True))
-@click.option('--dataset', '-d', help='Dataset file path', type=click.Path(exists=True))
-@click.option('--model', '-m', help='Engine model file path', type=click.Path(exists=True))
-@click.option('--metrics', '-me', help='Engine Metrics file path', type=click.Path(exists=True))
-@click.option('--protocol', '-pr', default='', help='Marvin protocol to be loaded during initialization.')
-@click.option('--params-file', '-pf', default='engine.params', help='Marvin engine params file path', type=click.Path(exists=True))
-@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
-@click.option('--http-host', '-h', default='localhost', help='Engine executor http bind host')
-@click.option('--http-port', '-p', default=8000, help='Engine executor http port')
-@click.option('--executor-path', '-e', help='Marvin engine executor jar path', type=click.Path(exists=True))
-@click.option('--max-workers', '-w', default=multiprocessing.cpu_count(), help='Max number of grpc threads workers per action')
-@click.option('--max-rpc-workers', '-rw', default=multiprocessing.cpu_count(), help='Max number of grpc workers per action')
-@click.option('--extra-executor-parameters', '-jvm', help='Use to send extra JVM parameters to engine executor process')
-@click.pass_context
-def engine_httpserver_cli(ctx, action, params_file, initial_dataset, dataset,
-                          model, metrics, protocol, spark_conf, http_host, http_port,
-                          executor_path, max_workers, max_rpc_workers, extra_executor_parameters):
-    engine_httpserver(
-        ctx, action, params_file, initial_dataset, dataset,
-        model, metrics, protocol, spark_conf, http_host, http_port,
-        executor_path, max_workers, max_rpc_workers, extra_executor_parameters
-    )
-
-
-def engine_httpserver(ctx, action, params_file, initial_dataset, dataset, model, metrics, protocol, spark_conf, http_host,
-                      http_port, executor_path, max_workers, max_rpc_workers, extra_executor_parameters):
-    logger.info("Starting http and grpc servers ...")
-
-    grpcserver = None
-    httpserver = None
-
-    def _params(**kwargs):
-        params = []
-        if kwargs is not None:
-            for key, value in iteritems(kwargs):
-                if value is not None:
-                    params.append("-{0}".format(str(key)))
-                    params.append(str(value))
-        return params
-
-    try:
-        optional_args = _params(id=initial_dataset, d=dataset, m=model, me=metrics, pf=params_file, c=spark_conf)
-        grpcserver = subprocess.Popen(['marvin', 'engine-grpcserver', '-a', action, '-w', str(max_workers), '-rw', str(max_rpc_workers)] + optional_args)
-
-        time.sleep(3)
-
-    except:
-        logger.exception("Could not start grpc server!")
-        sys.exit(1)
-
-    try:
-        if not (executor_path and os.path.exists(executor_path)):
-            executor_url = Config.get("executor_url", section="marvin")
-            executor_path = MarvinData.download_file(executor_url, force=False)
-
-        command_list = ['java']
-        command_list.append('-DmarvinConfig.engineHome={}'.format(ctx.obj['config']['inidir']))
-        command_list.append('-DmarvinConfig.ipAddress={}'.format(http_host))
-        command_list.append('-DmarvinConfig.port={}'.format(http_port))
-        command_list.append('-DmarvinConfig.protocol={}'.format(protocol))
-
-        if extra_executor_parameters:
-            command_list.append(extra_executor_parameters)
-
-        command_list.append('-jar')
-        command_list.append(executor_path)
-
-        httpserver = subprocess.Popen(command_list)
-
-    except:
-        logger.exception("Could not start http server!")
-        grpcserver.terminate() if grpcserver else None
-        sys.exit(1)
-
-    try:
-        while True:
-            time.sleep(100)
-
-    except KeyboardInterrupt:
-        logger.info("Terminating http and grpc servers...")
-        grpcserver.terminate() if grpcserver else None
-        httpserver.terminate() if httpserver else None
-        logger.info("Http and grpc servers terminated!")
-        sys.exit(0)
-
-
-@cli.command('engine-deploy', help='Engine provisioning and deployment command')
-@click.option('--provision', is_flag=True, default=False, help='Forces provisioning')
-@click.option('--package', is_flag=True, default=False, help='Creates engine package')
-@click.option('--skip-clean', is_flag=True, default=False, help='Skips make clean')
-def engine_deploy(provision, package, skip_clean):
-
-    TOOLBOX_VERSION = os.getenv('TOOLBOX_VERSION')
-
-    if provision:
-        subprocess.Popen([
-            "fab",
-            "provision",
-        ], env=os.environ).wait()
-        subprocess.Popen([
-            "fab",
-            "deploy:version={version}".format(version=TOOLBOX_VERSION),
-        ], env=os.environ).wait()
-    elif package:
-        subprocess.Popen([
-            "fab",
-            "package:version={version}".format(version=TOOLBOX_VERSION),
-        ], env=os.environ).wait()
-    elif skip_clean:
-        subprocess.Popen([
-            "fab",
-            "deploy:version={version},skip_clean=True".format(version=TOOLBOX_VERSION),
-        ], env=os.environ).wait()
-    else:
-        subprocess.Popen([
-            "fab",
-            "deploy:version={version}".format(version=TOOLBOX_VERSION),
-        ], env=os.environ).wait()
-
-
-@cli.command('engine-httpserver-remote', help='Remote HTTP server control command')
-@click.option('--http_host', '-h', default='0.0.0.0', help='Engine executor http bind host')
-@click.option('--http_port', '-p', default=8000, help='Engine executor http port')
-@click.argument('command', type=click.Choice(['start', 'stop', 'status']))
-def engine_httpserver_remote(command, http_host, http_port):
-    if command == "start":
-        subprocess.Popen([
-            "fab",
-            "engine_start:{host},{port}".format(host=http_host, port=http_port)
-        ], env=os.environ).wait()
-    elif command == "stop":
-        subprocess.Popen([
-            "fab",
-            "engine_stop",
-        ], env=os.environ).wait()
-    elif command == "status":
-        subprocess.Popen([
-            "fab",
-            "engine_status",
-        ], env=os.environ).wait()
-    else:
-        print("Usage: marvin engine-httpserver-remote [ start | stop | status ]")
diff --git a/python-toolbox/marvin_python_toolbox/management/hive.py b/python-toolbox/marvin_python_toolbox/management/hive.py
deleted file mode 100644
index 3d828b2..0000000
--- a/python-toolbox/marvin_python_toolbox/management/hive.py
+++ /dev/null
@@ -1,709 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-import click
-import time
-import os
-import re
-import sys
-import json
-from paramiko import SSHClient, AutoAddPolicy
-from pyhive import hive
-from slugify import slugify
-import hashlib
-
-from .._logging import get_logger
-
-from .._compatibility import six
-
-
-logger = get_logger('management.hive')
-
-
-@click.group('hive')
-def cli():
-    pass
-
-
-@cli.command('hive-generateconf', help='Generate default configuration file')
-@click.pass_context
-def hive_generateconf_cli(ctx):
-    hive_generateconf(ctx)
-
-
-def hive_generateconf(ctx):
-    default_conf = [{
-        "origin_host": "xxx_host_name",
-        "origin_db": "xxx_db_name",
-        "origin_queue": "marvin",
-        "target_table_name": "xxx_table_name",
-        "sample_sql": "SELECT * FROM XXX",
-        "sql_id": "1"
-    }]
-
-    with open('hive_dataimport.conf', 'w') as outfile:
-        json.dump(default_conf, outfile, indent=2)
-
-    print("Done!!!")
-
-
-@cli.command('hive-resetremote', help='Drop all remote tables from informed engine on host')
-@click.option('--host', '-h', default='marvin-hadoop')
-@click.option('--queue', '-h', default='default')
-@click.option('--engine', default=(os.path.relpath(".", "..")), help='Marvin engine name (default is the current folder)')
-@click.pass_context
-def hive_resetremote_cli(ctx, host, engine, queue):
-    hive_resetremote(ctx, host, engine, queue)
-
-
-def hive_resetremote(ctx, host, engine, queue):
-    hdi = HiveDataImporter(
-        engine=engine,
-        origin_host=host,
-        origin_queue=queue,
-        origin_db=None,
-        target_table_name=None,
-        sample_sql=None,
-        max_query_size=None,
-        destination_host=None,
-        destination_port=None,
-        destination_host_username='vagrant',
-        destination_host_password='vagrant',
-        destination_hdfs_root_path='/user/hive/warehouse/',
-        sql_id=None
-    )
-    hdi.reset_remote_tables()
-
-
-@cli.command(
-    'hive-dataimport',
-    help='Export and import data samples from a hive databse to the hive running in this toolbox, cloning same data structure (db and table).')
-@click.option('--destination-hdfs-root-path', '-hdfs', default='/user/hive/warehouse/')
-@click.option('--destination-host-password', '-p', default='vagrant')
-@click.option('--destination-host-username', '-u', default='vagrant')
-@click.option('--destination-host', '-dh', default='marvin-hadoop')
-@click.option('--destination-port', '-dp', default=22)
-@click.option('--max-query-size', '-s', default=(50 * 1024 * 1024), help='Max query size in bytes')
-@click.option('--force', is_flag=True, help='Force table creation even table already exists in destination')
-@click.option('--force-remote', is_flag=True, help='Force remote temp table creation even table already exists in origin')
-@click.option('--validate', is_flag=True, help='Validate the query sample')
-@click.option('--force-copy-files', is_flag=True, help='Force the hdfs files copy procedure')
-@click.option('--skip-remote-preparation', is_flag=True, help='Skip the creation of remote temp table')
-@click.option('--engine', default=(os.path.relpath(".", "..")), help='Marvin engine name (default is the current folder)')
-@click.option('--sql-id', '-q', help='If informed the process will be applied exclusivelly for this sample sql')
-@click.option('--conf', '-c', default='hive_dataimport.conf', help='Hive data import configuration file')
-@click.pass_context
-def hive_dataimport_cli(
-    ctx, conf, sql_id, engine, skip_remote_preparation, force_copy_files, validate, force,
-    force_remote, max_query_size, destination_host, destination_port, destination_host_username,
-    destination_host_password, destination_hdfs_root_path
-):
-    hive_dataimport(
-        ctx, conf, sql_id, engine, skip_remote_preparation, force_copy_files, validate, force,
-        force_remote, max_query_size, destination_host, destination_port, destination_host_username,
-        destination_host_password, destination_hdfs_root_path
-    )
-
-def hive_dataimport(
-    ctx, conf, sql_id, engine, skip_remote_preparation, force_copy_files, validate, force,
-    force_remote, max_query_size, destination_host, destination_port, destination_host_username,
-    destination_host_password, destination_hdfs_root_path
-):
-
-    initial_start_time = time.time()
-
-    confs = read_config(filename=conf)
-
-    if confs:
-        print(chr(27) + "[2J")
-
-        if sql_id:
-            confs = [x for x in confs if x['sql_id'] == sql_id]
-
-        for conf in confs:
-            hdi = HiveDataImporter(
-                max_query_size=max_query_size,
-                destination_host=destination_host,
-                destination_port=destination_port,
-                destination_host_username=destination_host_username,
-                destination_host_password=destination_host_password,
-                destination_hdfs_root_path=destination_hdfs_root_path,
-                engine=engine,
-                **conf)
-
-            if force:
-                table_exists = False
-
-            else:
-                table_exists = hdi.table_exists(host=hdi.destination_host, db=hdi.origin_db, table=hdi.target_table_name)
-
-            if not table_exists:
-                hdi.import_sample(
-                    create_temp_table=(not skip_remote_preparation),
-                    copy_files=force_copy_files,
-                    validate_query=validate,
-                    force_create_remote_table=force_remote,
-                )
-
-            else:
-                print ("Table {} already exists, skiping data import. Use --force flag to force data importation".format(hdi.full_table_name))
-
-        print("Total Time : {:.2f}s".format(time.time() - initial_start_time))
-
-        print("\n")
-
-
-def read_config(filename):
-    fname = os.path.join("", filename)
-    if os.path.exists(fname):
-        with open(fname, 'r') as fp:
-            return json.load(fp)
-    else:
-        print("Configuration file {} doesn't exists...".format(filename))
-        return {}
-
-
-class HiveDataImporter():
-    def __init__(
-        self, origin_host, origin_db, origin_queue, target_table_name, sample_sql, engine,
-        max_query_size, destination_host, destination_port, destination_host_username, destination_host_password,
-        destination_hdfs_root_path, sql_id
-    ):
-
-        self.sql_id = sql_id
-        self.origin_host = origin_host
-        self.origin_db = origin_db
-        self.origin_queue = origin_queue
-        self.target_table_name = target_table_name
-        self.sample_sql = sample_sql
-        self.engine = engine
-        self.destination_host = destination_host
-        self.destination_port = destination_port
-        self.destination_host_username = destination_host_username
-        self.destination_host_password = destination_host_password
-        self.destination_hdfs_root_path = destination_hdfs_root_path
-
-        self.temp_db_name = 'marvin'
-        self.max_query_size = max_query_size
-
-        self.supported_format_types = {
-            'TextInputFormat': 'TEXTFILE',
-            'SequenceFileInputFormat': 'SEQUENCEFILE',
-            'OrcInputFormat': 'ORC',
-            'MapredParquetInputFormat': 'PARQUET',
-            'AvroContainerInputFormat': 'AVRO',
-            'RCFileInputFormat': 'RCFILE'
-        }
-
-        print("\n------------------------------------------------------------------------------")
-        print("Initializing process for sql_id [{}]:".format(self.sql_id))
-        print("     Origin -->")
-        print("         Host:       [{}]".format(self.origin_host))
-        print("         DataBase:   [{}]".format(self.origin_db))
-        print("         Table Name: [{}]".format(self.target_table_name))
-        print("         Sample SQL: [{}]".format(self.sample_sql))
-        print("\n")
-        print("     Destination -->")
-        print("         Host:       [{}]".format(self.destination_host))
-        print("         DataBase:   [{}]".format(self.origin_db))
-        print("         Table Name: [{}]".format(self.target_table_name))
-        print("\n")
-
-    def validade_query(self):
-        # creating connections
-        print("Connecting with {} database on {} .. ".format(self.origin_db, self.origin_host))
-        conn_origin = self.get_connection(host=self.origin_host, db=self.origin_db, queue=self.origin_queue)
-
-        print("Counting sample sql ...")
-        total_rows = self.count_rows(conn=conn_origin, sql=self.sample_sql)
-        print("Found [{}] rows!".format(total_rows))
-
-        print("Retrieve data sample for query estimation reasons...")
-        data_sample = self.retrieve_data_sample(conn=conn_origin, full_table_name=self.full_table_name)
-        print("Calculated [{}] bytes per row!".format(data_sample['estimate_query_mean_per_line']))
-
-        estimated_size = data_sample['estimate_query_mean_per_line'] * total_rows
-
-        print ("Estimated query size is : {} bytes".format(estimated_size))
-        print ("Max permited query size is: {} bytes".format(self.max_query_size))
-
-        return estimated_size <= self.max_query_size
-
-    def table_exists(self, host, db, table):
-        print("Verifiying if table {}.{} exists on {} ...".format(db, table, host))
-        local_conn = self.get_connection(host=host)
-        cursor = local_conn.cursor()
-
-        cursor.execute("SHOW DATABASES LIKE '{}'".format(db))
-        dbs = cursor.fetchall()
-        self.show_log(cursor)
-
-        if not len(dbs) == 1:
-            table_exists = False
-        else:
-            cursor.execute("USE {} ".format(db))
-
-            cursor.execute("SHOW TABLES LIKE '{}'".format(table))
-            tbs = cursor.fetchall()
-            self.show_log(cursor)
-
-            if not len(tbs) == 1:
-                table_exists = False
-            else:
-                table_exists = True
-
-        cursor.close()
-        return table_exists
-
-    def reset_remote_tables(self):
-        self.print_start_step(name="Reset Remote Tables for {}".format(self.temp_table_prefix), step_number=1, total_steps=1)
-
-        print("Connecting with {} database on {} .. ".format(self.temp_db_name, self.origin_host))
-        remote_temp_db_conn = self.get_connection(host=self.origin_host, db=self.temp_db_name, queue=self.origin_queue)
-
-        cursor = remote_temp_db_conn.cursor()
-        cursor.execute("SHOW TABLES LIKE '{}*'".format(self.temp_table_prefix))
-        tbs = cursor.fetchall()
-        self.show_log(cursor)
-        cursor.close()
-
-        valid_tbs = [tb[0] for tb in tbs]
-
-        if valid_tbs:
-            print("Found {} tables for deletion....".format(len(tbs)))
-
-            for tb in valid_tbs:
-                table_name = "{}.{}".format(self.temp_db_name, tb)
-                print("Dropping table {} on {} .. ".format(table_name, self.origin_host))
-                self.drop_table(conn=remote_temp_db_conn, table_name=table_name)
-
-                hdfs_location = self.generate_table_location(self.destination_hdfs_root_path, self.origin_host, self.temp_db_name + '.db', tb)
-                print("Removing hdfs files from {} .. ".format(hdfs_location))
-
-                ssh = self._get_ssh_client(self.origin_host, self.destination_host_username, self.destination_host_password)
-                self.delete_files(ssh, hdfs_location)
-
-        else:
-            print("No table found! Skiping reset remote tables process!!")
-
-        self.print_finish_step()
-
-    def print_finish_step(self):
-        print("\n                                               STEP TAKES {:.4f} (seconds) ".format((time.time() - self.start_time)))
-
-    def print_start_step(self, name, step_number, total_steps):
-        print("\n------------------------------------------------------------------------------")
-        print("MARVIN DATA IMPORT - STEP ({}) of ({}) - [{}]".format(step_number, total_steps, name))
-        print("------------------------------------------------------------------------------\n")
-        self.start_time = time.time()
-
-    def import_sample(self, create_temp_table=True, copy_files=True, validate_query=True, force_create_remote_table=False):
-        #
-        #################################################################################
-        # Step 1 - Query validation
-        self.print_start_step(name="Query Validation", step_number=1, total_steps=6)
-
-        is_valid = self.validade_query() if validate_query else True
-
-        if not is_valid:
-            print("Informed sample query is not valid!")
-            self.print_finish_step()
-            return
-
-        self.print_finish_step()
-
-        #
-        ##################################################################################
-        # Step 2 - Testing remote connecitons and getting table schema
-        self.print_start_step(name="Table Schema Achievement", step_number=2, total_steps=6)
-
-        # creating connections
-        print("Connecting with {} database on {} .. ".format(self.origin_db, self.origin_host))
-        conn_origin = self.get_connection(host=self.origin_host, db=self.origin_db, queue=self.origin_queue)
-
-        print("Connecting with {} database on {} .. ".format(self.temp_db_name, self.origin_host))
-        remote_temp_db_conn = self.get_connection(host=self.origin_host, db=self.temp_db_name, queue=self.origin_queue)
-
-        # getting ddl from real table
-        print("Getting DDL from {} table ".format(self.target_table_name))
-        ddl = self.get_createtable_ddl(conn=conn_origin, origin_table_name=self.target_table_name, dest_table_name=self.temp_table_name)
-
-        # validanting if partitions is used in query statement
-        partitions = self.get_partitions(ddl)
-
-        if validate_query and self.has_partitions(self.sample_sql, [p['cols'] for p in partitions]):
-            print("Informed sample query doesn't have valid partitions in the clausule where!!!! Informe at lest one partition.")
-            print("To disable the query validation use --skip-validation flag.")
-            self.print_finish_step()
-            return
-
-        print("Connecting with DEFAULT database on {} .. ".format(self.destination_host))
-        local_conn = self.get_connection(host=self.destination_host)
-
-        # creating databases if not exists
-        print("Creating database {} ...".format(self.origin_db))
-        self.create_database(conn=local_conn, db=self.origin_db)
-
-        print("Connecting with {} database on {} .. ".format(self.origin_db, self.destination_host))
-        local_conn = self.get_connection(host=self.destination_host, db=self.origin_db)
-
-        # creating databases if not exists
-        print("Creating database {} ...".format(self.temp_db_name))
-        self.create_database(conn=local_conn, db=self.temp_db_name)
-
-        print("Connecting with {} database on {} .. ".format(self.temp_db_name, self.destination_host))
-        local_temp_db_conn = self.get_connection(host=self.destination_host, db=self.temp_db_name)
-
-        self.print_finish_step()
-
-        #
-        ##################################################################################
-        # Step 3 - Remote Table Preparation
-        self.print_start_step(name="Remote Table Preparation", step_number=3, total_steps=6)
-
-        if create_temp_table:
-
-            if force_create_remote_table:
-                remote_table_exists = False
-
-            else:
-                remote_table_exists = self.table_exists(host=self.origin_host, db=self.temp_db_name, table=self.temp_table_name)
-
-            # verify if remote table alredy exists
-            if not remote_table_exists:
-                print("Dropping table {} on {} .. ".format(self.full_temp_table_name, self.origin_host))
-                self.drop_table(conn=remote_temp_db_conn, table_name=self.full_temp_table_name)
-
-                print("Creating table {} on {} .. ".format(self.full_temp_table_name, self.origin_host))
-                self.create_table(conn=remote_temp_db_conn, table_name=self.full_temp_table_name, ddl=ddl)
-
-                # insert from select
-                print("Populating table {} on {} using informed sample sql.. ".format(self.full_temp_table_name, self.origin_host))
-                self.populate_table(conn=conn_origin, table_name=self.full_temp_table_name, partitions=partitions, sql=self.sample_sql)
-
-            else:
-                print("Table {} on {} already exists ...".format(self.full_temp_table_name, self.origin_host))
-
-        self.print_finish_step()
-
-        #
-        ##################################################################################
-        # Step 4 - Copying remote hdfs files
-        self.print_start_step(name="Copying HDFS Files", step_number=4, total_steps=6)
-
-        # get temp location
-        print("Getting hdfs files location from {} table ...".format(self.full_temp_table_name))
-        temp_table_location = self.get_table_location(conn=remote_temp_db_conn, table_name=self.full_temp_table_name)
-
-        # copy hdfs files for local hdfs
-        external_table_location = self.generate_table_location(
-            host=self.destination_host,
-            root_path=self.destination_hdfs_root_path,
-            db_name=self.temp_db_name, table_name=self.temp_table_name)
-
-        print("Copying files from [{}] to [{}]".format(temp_table_location, external_table_location))
-        self.hdfs_dist_copy(force=copy_files,
-                            hdfs_host=self.destination_host,
-                            hdfs_port=self.destination_port,
-                            origin=temp_table_location,
-                            dest=external_table_location,
-                            password=self.destination_host_password,
-                            username=self.destination_host_username)
-
-        self.print_finish_step()
-        #
-        ##################################################################################
-        # Step 5 - External table creation using hdfs files
-        self.print_start_step(name="Local Temporary Table Creation", step_number=5, total_steps=6)
-
-        # creating external table using parquet files in hdfs
-        print("Dropping temp table {} on {} .. ".format(self.full_temp_table_name, self.destination_host))
-        self.drop_table(conn=local_temp_db_conn, table_name=self.full_temp_table_name)
-
-        # create temp table
-        print("Creating temp table {} using imported hdfs files from [{}] ...".format(self.full_temp_table_name, external_table_location))
-        self.create_external_table(conn=local_temp_db_conn,
-                                   temp_table_name=self.full_temp_table_name,
-                                   ddl=ddl,
-                                   parquet_file_location=external_table_location)
-
-        print("Refreshing table {} partitions on {} ..".format(self.full_temp_table_name, self.destination_host))
-        self.refresh_partitions(conn=local_temp_db_conn, table_name=self.full_temp_table_name)
-
-        self.print_finish_step()
-
-        #
-        ##################################################################################
-        # Step 6 - Destination table creation from external table
-        self.print_start_step(name="Table population", step_number=6, total_steps=6)
-
-        # create view
-        print("Dropping table view {} on {} .. ".format(self.full_table_name, self.destination_host))
-        self.drop_view(conn=local_conn, view_name=self.full_table_name)
-
-        print("Creating table view {} ... ".format(self.full_table_name, self.destination_host))
-        self.create_view(conn=local_conn, view_name=self.full_table_name, table_name=self.full_temp_table_name)
-
-        self.print_finish_step()
-
-        print("Procedure done!!!!")
-
-    @property
-    def temp_table_prefix(self):
-        return "{}".format(slugify(self.engine).replace('-', '_'))
-
-    @property
-    def temp_table_name(self):
-        return "{}_{}_{}_{}".format(
-            self.temp_table_prefix,
-            self.origin_db,
-            self.target_table_name,
-            hashlib.sha1(slugify(self.sample_sql).encode('utf-8')).hexdigest()
-        )
-
-    @property
-    def full_table_name(self):
-        return "{}.{}".format(self.origin_db, self.target_table_name)
-
-    @property
-    def full_temp_table_name(self):
-        return "{}.{}".format(self.temp_db_name, self.temp_table_name)
-
-    def generate_table_location(self, root_path, host, db_name, table_name):
-        return "hdfs://{}:8020{}".format(host, os.path.join(root_path, db_name, table_name))
-
-    def clean_ddl(self, ddl, remove_formats=True, remove_general=True):
-        if remove_general:
-            # Removing LOCATION statement
-            regex = "(LOCATION\s+'(.*?)')"
-            result = re.search(regex, ddl)
-            ddl = ddl.replace(result.group(1), " ") if result else ddl
-
-            # Removing TBLPROPERTIES statement
-            regex = "(TBLPROPERTIES\s+(.*?)\))"
-            result = re.search(regex, ddl)
-            ddl = ddl.replace(result.group(1), " ") if result else ddl
-
-            # Removing WITH SERDEPROPERTIES statement
-            regex = "(WITH SERDEPROPERTIES\s+(.*?)\))"
-            result = re.search(regex, ddl)
-            ddl = ddl.replace(result.group(1), " ") if result else ddl
-
-        if remove_formats:
-            # Removing STORED AS INPUTFORMAT statement
-            regex = "(STORED AS INPUTFORMAT\s+'(.*?)')"
-            result = re.search(regex, ddl)
-            ddl = ddl.replace(result.group(1), " ") if result else ddl
-
-            # Removing OUTPUTFORMAT statement
-            regex = "(OUTPUTFORMAT\s+'(.*?)')"
-            result = re.search(regex, ddl)
-            ddl = ddl.replace(result.group(1), " ") if result else ddl
-
-        return ddl
-
-    def get_table_format(self, ddl):
-        regex = "(STORED AS INPUTFORMAT\s+'(.*?)')"
-        result = re.search(regex, ddl)
-        input_format = result.group(2)
-        return self.supported_format_types[input_format.split(".")[-1]]
-
-    def get_database_info(self, ddl):
-        regex = "CREATE TABLE `((.*?)\.)?(.*?)`\("
-        result = re.search(regex, ddl)
-        if result:
-            groups = result.groups()
-            if groups[0]:
-                # found db name
-                return {'db': groups[1], 'table': groups[2]}
-            else:
-                {'db': None, 'table': groups[2]}
-        return {'db': None, 'table': None}
-
-    def get_createtable_ddl(self, conn, origin_table_name, dest_table_name):
-        cursor = conn.cursor()
-        cursor.execute("SHOW CREATE TABLE " + origin_table_name)
-        _lines = [_line[0] for _line in cursor.fetchall()]
-        ddl = ''.join(_lines)
-        ddl = self.clean_ddl(ddl, remove_formats=False, remove_general=True)
-        ddl = ddl.replace(origin_table_name, dest_table_name)
-        cursor.close()
-        return ddl
-
-    def create_database(self, conn, db):
-        self._execute_db_command(conn, "CREATE DATABASE IF NOT EXISTS " + db)
-
-    def drop_table(self, conn, table_name):
-        self._execute_db_command(conn, 'DROP TABLE IF EXISTS ' + table_name)
-
-    def drop_view(self, conn, view_name):
-        self._execute_db_command(conn, 'DROP VIEW ' + view_name)
-
-    def create_table(self, conn, table_name, ddl):
-        self._execute_db_command(conn, ddl)
-
-    def _execute_db_command(self, conn, command):
-        cursor = conn.cursor()
-        cursor.execute(command)
-        self.show_log(cursor)
-        cursor.close()
-
-    def get_connection(self, host, db='DEFAULT', queue='default'):
-        return hive.connect(host=host,
-                            database=db,
-                            configuration={'mapred.job.queue.name': queue,
-                                           ' hive.exec.dynamic.partition.mode': 'nonstrict'})
-
-    def retrieve_data_sample(self, conn, full_table_name, sample_limit=100):
-        cursor = conn.cursor()
-
-        sql = "SELECT * FROM {} TABLESAMPLE ({} ROWS)".format(full_table_name, sample_limit)
-
-        cursor.execute(sql)
-        data_header = [{'col': line[0].split('.')[1], 'table': line[0].split('.')[0], 'type': line[1]} for line in cursor.description]
-        data = [row for row in cursor.fetchall()]
-        self.show_log(cursor)
-        cursor.close()
-        return {'data_header': data_header,
-                'total_lines': len(data),
-                'data': data,
-                'estimate_query_size': sys.getsizeof(data),
-                'estimate_query_mean_per_line': sys.getsizeof(data) / len(data)}
-
-    def count_rows(self, conn, sql):
-        cursor = conn.cursor()
-        cursor.execute("SELECT COUNT(1) " + sql[sql.upper().rfind("FROM"):])
-        size = cursor.fetchone()[0]
-        self.show_log(cursor)
-        cursor.close()
-        return size
-
-    def show_log(self, cursor):
-        for l in cursor.fetch_logs():
-            logger.debug(l)
-
-    def save_data(self, conn, table, data):
-        cursor = conn.cursor()
-        print('Inserting {} rows in {} table...'.format(data['total_lines'], table))
-        cols = [v['col'] for v in data['data_header']]
-        dml = "INSERT INTO {0} ({1}) VALUES ({2})".format(table, ", ".join(cols), ", ".join(['%s' for col in cols]))
-        cursor.executemany(dml, [(v,) for v in data['data'][1:10]])
-        self.show_log(cursor)
-        cursor.close()
-
-    def get_partitions(self, ddl):
-        regex = "(PARTITIONED BY\s+\((.*?)\))"
-        result = re.search(regex, ddl)
-        if result:
-            p_cols = result.group(2).strip().replace('`', '').split(",")
-            return [{'col': p_col.split()[0], 'type': p_col.split()[1]} for p_col in p_cols]
-        else:
-            return []
-
-    def has_partitions(self, sql, partitions):
-        regex = "WHERE(.*?)(" + "|".join(partitions).upper() + ")"
-        result = re.search(regex, sql.upper())
-
-        if result:
-            return True
-        else:
-            return False
-
-    def populate_table(self, conn, table_name, partitions, sql):
-        partitions = [p['col'] for p in partitions]
-        partitions_statement = "PARTITION ({})".format(", ".join(partitions)) if partitions else ""
-        dml = "INSERT OVERWRITE TABLE {0} {1} {2}".format(table_name, partitions_statement, sql)
-        self._execute_db_command(conn, dml)
-
-    def create_view(self, conn, view_name, table_name):
-        dml = "CREATE VIEW {0} AS SELECT * FROM {1}".format(view_name, table_name)
-        self._execute_db_command(conn, dml)
-
-    def refresh_partitions(self, conn, table_name):
-        refresh_statement = "MSCK REPAIR TABLE {0}".format(table_name)
-        self._execute_db_command(conn, refresh_statement)
-
-    def get_table_location(self, conn, table_name):
-        cursor = conn.cursor()
-        cursor.execute("DESCRIBE FORMATTED {}".format(table_name))
-        location = [key[1].strip() for key in cursor.fetchall() if key[0] and key[0].strip().upper() == 'LOCATION:']
-        location = location[0].replace('hdfs://', 'hftp://')
-        cursor.close()
-        return location
-
-    def delete_files(self, ssh, url):
-        cmd = "hdfs dfs -rm -R '{}'".format(url)
-        self._hdfs_commands(ssh, cmd)
-
-    def copy_files(self, ssh, origin, dest):
-        cmd = "hadoop distcp --update '{}' '{}'".format(origin, dest)
-        return self._hdfs_commands(ssh, cmd)
-
-    def _hdfs_commands(self, ssh, cmd):
-        logger.debug("Executing remote command: {}".format(cmd))
-        i, o, e = ssh.exec_command(cmd)
-        errors = e.readlines()
-        output = o.readlines()
-        logger.debug(output)
-        logger.debug(errors)
-        return output, errors
-
-    def _get_ssh_client(self, hdfs_host, hdfs_port, username, password):
-        ssh = SSHClient()
-        ssh.set_missing_host_key_policy(AutoAddPolicy())
-        ssh.connect(hostname=hdfs_host, port=hdfs_port, username=username, password=password, )
-        return ssh
-
-    def hdfs_dist_copy(self, force, hdfs_host, hdfs_port, origin, dest, username=None, password=None):
-        # connecting with hdfs host
-        ssh = self._get_ssh_client(hdfs_host, hdfs_port, username, password)
-
-        if force:
-            print("Removing old hdfs files if necessary. To force copy remote files use --force-copy-files flag.")
-
-            # delete files from dest
-            self.delete_files(ssh, dest)
-
-        else:
-            print("Using old hdfs files to complete the procedure. If necessary to copy files again use --force-copy-files flag.")
-
-        # copy files from origin to destination
-        _, copy_errors = self.copy_files(ssh, origin, dest)
-
-        # validate copy
-        cmd_template = "hdfs dfs -ls -R '{}' | grep -E '^-' | wc -l"
-        cmd = cmd_template.format(origin)
-        result1, _ = self._hdfs_commands(ssh, cmd)
-
-        cmd = cmd_template.format(dest)
-        result2, _ = self._hdfs_commands(ssh, cmd)
-
-        if result1 == result2:
-            print("Files {} successfully transferred!!".format(result1))
-        else:
-            print("Errors during hdfs files copy process!!")
-            for e_l in copy_errors:
-                logger.debug(e_l)
-            sys.exit("Stoping process!")
-
-    def create_external_table(self, conn, temp_table_name, ddl, parquet_file_location):
-        format_type = self.get_table_format(ddl)
-        ddl = self.clean_ddl(ddl, remove_formats=True, remove_general=False)
-        ddl = ddl.replace("CREATE TABLE", "CREATE EXTERNAL TABLE")
-        ddl = "{} STORED AS {} LOCATION '{}'".format(ddl, format_type, parquet_file_location)
-        self.create_table(conn=conn, table_name=temp_table_name, ddl=ddl)
diff --git a/python-toolbox/marvin_python_toolbox/management/notebook.py b/python-toolbox/marvin_python_toolbox/management/notebook.py
deleted file mode 100644
index 3e748d2..0000000
--- a/python-toolbox/marvin_python_toolbox/management/notebook.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-
-import os
-import sys
-import click
-
-
-@click.group('notebook')
-def cli():
-    pass
-
-
-@cli.command('notebook', help='Start the Jupyter notebook server.')
-@click.option('--port', '-p', default=8888, help='Jupyter server port')
-@click.option('--enable-security', is_flag=True, help='Enable jupyter notebook token security.')
-@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
-@click.option('--allow-root', is_flag=True, help='Run notebook from root user.')
-@click.pass_context
-def notebook_cli(ctx, port, enable_security, spark_conf, allow_root):
-    notebook(ctx, port, enable_security, spark_conf, allow_root)
-
-
-def notebook(ctx, port, enable_security, spark_conf, allow_root):
-    notebookdir = os.path.join(ctx.obj['base_path'], 'notebooks')
-    command = [
-        "SPARK_CONF_DIR={0} YARN_CONF_DIR={0}".format(spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")),
-        'jupyter', 'notebook',
-        '--notebook-dir', notebookdir,
-        '--ip', '0.0.0.0',
-        '--port', str(port),
-        '--no-browser',
-        '--config', os.path.join(os.environ["MARVIN_TOOLBOX_PATH"], 'extras', 'notebook_extensions', 'jupyter_notebook_config.py')
-    ]
-
-    command.append("--NotebookApp.token=") if not enable_security else None
-    command.append("--allow-root") if allow_root else None
-
-    ret = os.system(' '.join(command))
-    sys.exit(ret)
-
-
-@cli.command('lab', help='Start the JupyterLab server.')
-@click.option('--port', '-p', default=8888, help='JupyterLab server port')
-@click.option('--enable-security', is_flag=True, help='Enable jupyterlab token security.')
-@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
-@click.pass_context
-def lab_cli(ctx, port, enable_security, spark_conf):
-    lab(ctx, port, enable_security, spark_conf)
-
-
-def lab(ctx, port, enable_security, spark_conf):
-    notebookdir = os.path.join(ctx.obj['base_path'], 'notebooks')
-    command = [
-        "SPARK_CONF_DIR={0} YARN_CONF_DIR={0}".format(spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")),
-        'jupyter-lab',
-        '--notebook-dir', notebookdir,
-        '--ip', '0.0.0.0',
-        '--port', str(port),
-        '--no-browser',
-    ]
-
-    command.append("--NotebookApp.token=") if not enable_security else None
-
-    ret = os.system(' '.join(command))
-    sys.exit(ret)
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/pkg.py b/python-toolbox/marvin_python_toolbox/management/pkg.py
deleted file mode 100644
index a283390..0000000
--- a/python-toolbox/marvin_python_toolbox/management/pkg.py
+++ /dev/null
@@ -1,417 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-
-import os
-import pip
-from distutils.version import LooseVersion
-import sys
-import subprocess
-import click
-import re
-import os.path
-import errno
-import shutil
-from .._compatibility import urlparse
-import multiprocessing
-
-__all__ = ['copy']
-
-
-@click.group('pkg')
-def cli():
-    pass
-
-
-@cli.command('pkg-showversion', help='Show the package version.')
-@click.pass_context
-def version(ctx):
-    print(get_version(ctx.obj['package_path']))
-
-
-@cli.command('pkg-showchanges', help='Show the package changelog.')
-@click.pass_context
-def log(ctx):
-    os.system('less {}'.format(os.path.join(ctx.obj['base_path'], 'CHANGES.md')))
-
-
-@cli.command('pkg-showinfo', help='Show information about the package.')
-@click.pass_context
-def info(ctx):
-    version = get_version(ctx.obj['package_path'])
-    repo = get_git_repository_url(ctx.obj['base_path'])
-    branch = get_git_branch(ctx.obj['base_path'])
-    commit = get_git_commit(ctx.obj['base_path'])
-    tag = get_git_tag(ctx.obj['base_path'])
-    tag_commit = get_git_commit(ctx.obj['base_path'], tag=tag)
-    # tags = utils.get_git_tags(ctx.obj['base_path'])
-    tagged = 'yes' if (tag[1:] == version) else 'no'
-    clean = is_git_clean(ctx.obj['base_path'])
-    status = 'clean' if clean else 'dirty'
-    updated = ('' if (commit == tag_commit and clean) else
-               '(dev)' if (not tag[1:] == version) else
-               '(should be bumped)')
-    pip = 'git+ssh://{repo}@{tag}#egg={pkg}'.format(
-        repo=repo[:-4].replace(':', '/'), tag=tag,
-        pkg=ctx.obj['package_name'])
-
-    print('')
-    print('package: {name}'.format(name=ctx.obj['package_name']))
-    print('type:    {type_}'.format(type_=(ctx.obj['type'] or 'unknown')))
-    print('version: {version} {updated}'.format(version=version,
-                                                updated=updated))
-    print('')
-    print('branch:  {branch}'.format(branch=branch))
-    print('status:  {status}'.format(status=status))
-    print('commit:  {commit}'.format(commit=commit))
-    print('repo:    {repo}'.format(repo=repo))
-    print('')
-    print('tagged:  {tagged}'.format(tagged=tagged))
-    print('current: {tag} ({tag_commit})'.format(tag=tag,
-                                                 tag_commit=tag_commit))
-    print('pip url: {pip}'.format(pip=pip))
-    print('')
-
-
-@cli.command('pkg-updatedeps', help='Show information about the package.')
-@click.pass_context
-def deps(ctx):
-    repos = get_repos_from_requirements(ctx.obj['base_path'])
-    required_versions = get_tag_from_repo_url(repos)
-    latest_versions = get_latest_tags_from_repos(repos)
-    installed_pkgs = pip.get_installed_distributions()
-    click.echo('')
-    for repo in repos:
-        status = 'outdated'
-        required = required_versions[repo]
-        latest = latest_versions[repo]
-        try:
-            repo_small = repo.split('@')[1]
-            pkg_name = repo.split('egg=')[1]
-        except IndexError:
-            continue
-        pkg_name_normalized = pkg_name.lower().replace('_', '-')
-        installed = 'unknown'
-        installed_list = [
-            pkg.version
-            for pkg in installed_pkgs
-            if pkg.key in [pkg_name_normalized, pkg_name_normalized + '-lib']
-        ]
-        if installed_list:
-            installed = 'v{}'.format(installed_list[0])
-
-        if latest is None or installed is None:
-            continue
-
-        if LooseVersion(installed) > LooseVersion(latest):
-            status = 'develop'
-        elif LooseVersion(installed) < LooseVersion(required):
-            status = 'up-to-date (old version installed)'
-        elif required == latest:
-            status = 'up-to-date'
-        msg = '{pkg_name}: {status} (required: {required}, installed: {installed}, latest: {latest})'.format(
-            repo=repo_small, pkg_name=pkg_name_normalized, status=status, required=required, installed=installed, latest=latest)
-        if status == 'up-to-date' or (status == 'develop' and installed == required):
-            color = 'green'
-        elif status in ('develop', 'up-to-date (old version installed)') or installed == latest:
-            color = 'yellow'
-        else:
-            color = 'red'
-        click.echo(click.style(msg, fg=color))
-
-
-@cli.command('pkg-bumpversion', help='Bump the package version.')
-@click.argument('part', default='patch')
-@click.option('--allow-dirty', is_flag=True,
-              help='Allow dirty')
-@click.option('--force', '-f', is_flag=True,
-              help='Alias for --allow-dirty')
-@click.option('--yes', '-y', is_flag=True,
-              help='Answer yes to all prompts')
-@click.pass_context
-def bumpversion(ctx, part, allow_dirty, force, yes):
-    args = [part]
-    allow_dirty = allow_dirty or force
-
-    is_clean = is_git_clean(ctx.obj['base_path'])
-    if not is_clean and not allow_dirty:
-        print('')
-        print('ERROR: Git working directory is not clean.')
-        print('')
-        print('You can use --allow-dirty or --force if you know what '
-              'you\'re doing.')
-        exitcode = 1
-    else:
-        if allow_dirty:
-            args.append('--allow-dirty')
-        command = ['bumpversion'] + args
-
-        old_version = get_version(ctx.obj['package_path'])
-        exitcode = subprocess.call(command, cwd=ctx.obj['base_path'])
-        new_version = get_version(ctx.obj['package_path'])
-
-        if exitcode == 0:
-            print('Bump version from {old} to {new}'.format(
-                old=old_version, new=new_version))
-        if yes or click.confirm('Do you want to edit CHANGES.md?'):
-            click.edit(filename=os.path.join(ctx.obj['base_path'], 'CHANGES.md'))
-    sys.exit(exitcode)
-
-
-@cli.command('pkg-createtag', help='Create git tag using the package version.')
-@click.pass_context
-def tag(ctx):
-    tag = 'v{}'.format(get_version(ctx.obj['package_path']))
-    print('Creating git tag {}'.format(tag))
-    command = ['git', 'tag', '-m', '"version {}"'.format(tag), tag]
-    sys.exit(subprocess.call(command))
-
-
-@cli.command('pkg-updatedeps', help='Update requirements.txt.')
-@click.option('--install', '-i', is_flag=True)
-@click.option('--install-all', '-a', is_flag=True)
-@click.pass_context
-def update(ctx, install, install_all):
-    base_path = ctx.obj['base_path']
-    repos = get_repos_from_requirements(base_path)
-    required_versions = get_tag_from_repo_url(repos)
-    latest_versions = get_latest_tags_from_repos(repos)
-    installed_pkgs = pip.get_installed_distributions()
-    install_list = ['-e .']
-    click.echo('')
-    for repo in repos:
-        latest = latest_versions[repo]
-        required = required_versions[repo]
-        try:
-            pkg_name = repo.split('egg=')[1]
-        except IndexError:
-            continue
-        pkg_name_normalized = pkg_name.lower().replace('_', '-')
-        installed = 'unknown'
-        installed_list = [
-            pkg.version
-            for pkg in installed_pkgs
-            if pkg.key in [pkg_name_normalized, pkg_name_normalized + '-lib']
-        ]
-        if installed_list:
-            installed = 'v{}'.format(installed_list[0])
-
-        if LooseVersion(required) < LooseVersion(latest):
-            click.echo('Updating {} from {} to {}...'.format(pkg_name, required, latest))
-            new_repo = update_repo_tag(repo, latest, path=base_path)
-            if LooseVersion(installed) < LooseVersion(latest):
-                install_list.append(new_repo)
-        elif LooseVersion(installed) < LooseVersion(required):
-            install_list.append(repo)
-    if install_all:
-        install = True
-        install_list = ['-r requirements.txt']
-    if install:
-        for new_repo in install_list:
-            new_repo = new_repo.strip()
-            click.echo('')
-            click.echo('Running `pip install -U {}` ...'.format(new_repo))
-            command = ['pip', 'install', '-U'] + new_repo.split(' ')
-            exitcode = subprocess.call(command, cwd=base_path)
-            if exitcode == 0:
-                click.echo('Done.')
-            else:
-                click.echo('Failed.')
-                sys.exit(exitcode)
-
-
-def copy(src, dest, ignore=('.git', '.pyc', '__pycache__')):
-    try:
-        shutil.copytree(src, dest, ignore=shutil.ignore_patterns(*ignore))
-    except OSError as e:
-        if e.errno == errno.ENOTDIR:
-            shutil.copy(src, dest)
-        else:
-            print('Directory not copied. Error: %s' % e)
-
-
-def get_version(path):
-    """Return the project version from VERSION file."""
-
-    with open(os.path.join(path, 'VERSION'), 'rb') as f:
-        version = f.read().decode('ascii').strip()
-    return version.strip()
-
-
-def get_repos_from_requirements(path):
-    if path is None:
-        path = os.path.curdir
-    with open(os.path.join(path, 'requirements.txt'), 'r') as fp:
-        repos = [line.strip() for line in fp if 'git@' in line and not line.strip().startswith('#')]
-    return repos
-
-
-def get_tag_from_repo_url(repos):
-    tags = {}
-    for repo in repos:
-        if '@' in repo:
-            repo_parsed = urlparse(repo)
-            repo_path = repo_parsed.path
-            tags[repo] = repo_path.split('@')[1]
-        else:
-            tags[repo] = None
-    return tags
-
-
-def _clone(repo):
-    return repo, git_clone(repo, checkout=False, depth=1)
-
-
-def get_latest_tags_from_repos(repos):
-    tags = {}
-    if not repos:
-        return tags
-
-    pool = multiprocessing.Pool(len(repos))
-
-    repos_ = pool.map(_clone, repos)
-    for repo, path in repos_:
-        if path:
-            tag = get_git_tag(path)
-        else:
-            tag = None
-        tags[repo] = tag
-    return tags
-
-
-def update_repo_tag(repo, tag, path=None):
-    if path is None:
-        path = os.path.curdir
-    ret = ''
-    content = ''
-    with open(os.path.join(path, 'requirements.txt'), 'r') as fp:
-        for line in fp:
-            if repo in line:
-                line = re.sub(r'@v[0-9]+\.[0-9]+\.[0-9]+', '@{}'.format(tag), line)
-                ret += line
-            content += line
-
-    with open(os.path.join(path, 'requirements.txt'), 'w') as fp:
-        fp.write(content)
-
-    return ret
-
-
-repo_re = re.compile(r':(\w+)\/(.*)\.git')
-
-
-def git_clone(repo, dest=None, checkout=True, depth=None, branch=None, single_branch=False):
-    if '#egg' in repo:
-        repo_parsed = urlparse(repo)
-        repo_path = repo_parsed.path
-        if '@' in repo_path:
-            repo_path = repo_path.split('@')[0]
-        repo_path = repo_path.strip('/')
-        repo_team, repo_name = tuple(repo_path.split('/'))
-        repo = repo_parsed.netloc + ':' + repo_path
-    else:
-        repo_info = re.search(repo_re, repo)
-        if not repo_info:
-            return None
-        repo_team = repo_info.group(1)
-        repo_name = repo_info.group(2)
-    if dest is None:
-        path = os.path.join(os.path.expanduser('~'), '.marvin-python-toolbox', 'repos')
-        dest = os.path.join(path, repo_team, repo_name)
-
-    opts = ''
-    if not checkout:
-        opts += ' -n'
-    if depth:
-        opts += ' --depth ' + str(depth)
-    if branch:
-        opts += ' --branch ' + branch
-    if single_branch:
-        opts += ' --single-branch'
-
-    if not os.path.exists(dest):
-        os.makedirs(dest)
-        command = 'git clone {opts} {repo} {dest}'.format(
-            opts=opts, repo=repo, dest=dest)
-        print(command)
-        subprocess.Popen(command.split(), stdout=subprocess.PIPE).wait()
-
-    opts = ''
-    if depth:
-        opts += ' --depth ' + str(depth)
-    print('Fetching latest version from {} repository'.format(repo_name))
-    try:
-        subprocess.Popen(('git fetch --tags ' + opts).split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=dest).wait()
-    except OSError:
-        print('Could not fetch tags from {}'.format(repo_name))
-        dest = None
-
-    return dest
-
-
-def get_git_branch(path=None):
-    if path is None:
-        path = os.path.curdir
-    command = 'git rev-parse --abbrev-ref HEAD'.split()
-    branch = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
-    return branch.strip().decode('utf-8')
-
-
-def get_git_tag(path=None):
-    if path is None:
-        path = os.path.curdir
-    command = 'git rev-list --tags --max-count=1'.split()
-    commit = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read().decode('utf-8')
-    command = 'git describe --tags {}'.format(commit).split()
-    tag = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read().decode('utf-8')
-    return tag.strip()
-
-
-def get_git_commit(path=None, tag=None):
-    if path is None:
-        path = os.path.curdir
-    if tag:
-        command = 'git rev-list -n 1 {tag}'.format(tag=tag).split()
-    else:
-        command = 'git rev-parse HEAD'.split()
-    commit = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
-    return commit.strip().decode('utf-8')
-
-
-def get_git_repository_url(path=None):
-    if path is None:
-        path = os.path.curdir
-    command = 'git config --get remote.origin.url'.split()
-    url = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
-    return url.strip().decode('utf-8')
-
-
-def get_git_tags(path=None):
-    if path is None:
-        path = os.path.curdir
-    command = 'git tag'.split()
-    tags = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
-    return sorted(tags.strip().split('\n'), reverse=True)
-
-
-def is_git_clean(path=None):
-    if path is None:
-        path = os.path.curdir
-    command = 'git diff --quiet HEAD'.split()
-    exit_code = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
-    return exit_code
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.bumpversion.cfg b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.bumpversion.cfg
deleted file mode 100644
index e54a5c8..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.bumpversion.cfg
+++ /dev/null
@@ -1,5 +0,0 @@
-[bumpversion]
-current_version = 0.0.1
-
-[bumpversion:file:{{project.package}}/VERSION]
-[bumpversion:file:README.md]
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.coveragerc b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.coveragerc
deleted file mode 100644
index 6ebe78e..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.coveragerc
+++ /dev/null
@@ -1,22 +0,0 @@
-[run]
-omit = tests/*
-branch = True
-
-[report]
-exclude_lines =
-    pragma: no cover
-
-    def __repr__
-    if self\.debug
-
-    raise AssertionError
-    raise NotImplementedError
-
-    if 0:
-    if __name__ == .__main__.:
-
-[html]
-directory = coverage_report
-
-[xml]
-output = coverage_report.xml
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.gitignore b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.gitignore
deleted file mode 100644
index 05dd2a2..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/.gitignore
+++ /dev/null
@@ -1,16 +0,0 @@
-.cache
-.eggs
-.tox
-.testmondata
-.coverage
-.coverage.*
-coverage_report.xml
-coverage_report
-*.egg
-*.egg-info
-*.pyc
-tests/__pycache__
-.DS_Store
-.packages
-.profiling
-notebooks/data
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/CHANGES.md b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/CHANGES.md
deleted file mode 100644
index 4835f6f..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/CHANGES.md
+++ /dev/null
@@ -1,5 +0,0 @@
-## Changes log
-
-### 0.0.1
-
- - initial version
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/Dockerfile b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/Dockerfile
deleted file mode 100644
index ec237ff..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/Dockerfile
+++ /dev/null
@@ -1,113 +0,0 @@
-############################################################
-FROM python:2-alpine3.10
-# To use python3 comment the line above and uncomment 
-# the line bellow.
-#FROM python:3-alpine3.10
-############################################################
-
-MAINTAINER {{mantainer.email}}
-
-ENV SLEEP_MILLIS 0
-
-USER root
-
-##############################################################
-# Define all environment variables to be used 
-##############################################################
-
-ENV MARVIN_HOME=/opt/marvin
-ENV MARVIN_DATA_PATH=/marvin-data
-ENV MARVIN_ENGINE_HOME=$MARVIN_HOME/engine
-ENV MARVIN_ENGINE_ENV=marvin-engine-env
-ENV WORKON_HOME=$MARVIN_HOME/.virtualenvs
-ENV SPARK_HOME=/opt/spark
-ENV SPARK_CONF_DIR=$SPARK_HOME/conf
-ENV HADOOP_CONF_DIR=$SPARK_CONF_DIR
-ENV YARN_CONF_DIR=$SPARK_CONF_DIR
-
-
-
-##############################################################
-# Create all folders needed 
-##############################################################
-
-RUN mkdir -p $MARVIN_HOME && \
-    mkdir -p $MARVIN_DATA_PATH && \
-    mkdir -p $MARVIN_ENGINE_HOME && \
-    mkdir -p /var/log/marvin/engines && \
-    mkdir -p /var/run/marvin/engines && \
-##############################################################
-# Install the system dependencies for default installation 
-##############################################################
-    apk add --no-cache g++ openssl-dev openjdk11-jre-headless bash && \
-    apk add --no-cache --virtual .build-deps make \
-    git \
-    wget \
-    libsass-dev \
-    openblas-dev \
-    libffi-dev \
-    libxml2-dev \
-    libxslt-dev \
-    libpng-dev \
-    freetype-dev \
-    cyrus-sasl-dev
-##############################################################
-# Install Apache Spark
-#
-# Uncomment if you are using spark, note that is needed the 
-# spark configuration files to the think works correctly.
-##############################################################
-#
-# RUN wget -O /tmp/spark-2.1.1-bin-hadoop2.6.tgz https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz && \
-#    tar -xf /tmp/spark-2.1.1-bin-hadoop2.6.tgz -C /opt/ && \
-#    ln -s /opt/spark-2.1.1-bin-hadoop2.6 /opt/spark
-##############################################################
-
-RUN mkdir -p $SPARK_CONF_DIR
-
-##############################################################
-#        <CUSTOM ENGINE INSTALLATION PROCEDURE HERE>         #
-##############################################################
-
-
-##############################################################
-# Copy and Install the marvin engine
-##############################################################
-
-RUN /bin/bash -c "pip install virtualenvwrapper && \
-    cd $MARVIN_ENGINE_HOME && \
-    source /usr/local/bin/virtualenvwrapper.sh && \
-    mkvirtualenv $MARVIN_ENGINE_ENV"
-
-ADD build/engine.tar $MARVIN_ENGINE_HOME
-
-ADD build/marvin-engine-executor-assembly.jar $MARVIN_DATA_PATH 
-
-RUN /bin/bash -c "source /usr/local/bin/virtualenvwrapper.sh && \
-    workon $MARVIN_ENGINE_ENV && \
-    cd $MARVIN_ENGINE_HOME && \
-    pip install --no-cache numpy && \
-    pip install --no-cache scipy && \
-    pip install --no-cache pandas && \
-    pip install --no-cache matplotlib && \
-    pip install --no-cache cython && \
-    pip install --no-cache scikit-learn && \
-    pip install --no-cache Fabric && \
-    pip install --no-cache marvin-python-toolbox && \
-    pip install . "
-##############################################################
-# Uninstalling unnecessary software and cleaning cache
-##############################################################
-RUN rm -rf /root/.cache && \
-    apk del .build-deps
-
-##############################################################
-# Starts the engine http server
-##############################################################
-
-EXPOSE 8000
-
-CMD /bin/bash -c "source /usr/local/bin/virtualenvwrapper.sh && \
-    workon $MARVIN_ENGINE_ENV && \
-    cd $MARVIN_ENGINE_HOME && \
-    marvin engine-httpserver -h 0.0.0.0 -p 8000"
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/INSTALL b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/INSTALL
deleted file mode 100644
index fccdaf8..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/INSTALL
+++ /dev/null
@@ -1 +0,0 @@
-REPLACE: Add here the detailed instructions to install this project
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/LICENSE b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/LICENSE
deleted file mode 100644
index e69de29..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/LICENSE
+++ /dev/null
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/MANIFEST.in b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/MANIFEST.in
deleted file mode 100644
index 4f5d4a8..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/MANIFEST.in
+++ /dev/null
@@ -1,9 +0,0 @@
-include CHANGES.md
-include INSTALL
-include LICENSE
-include MANIFEST.in
-include README.md
-include {{project.package}}/VERSION
-recursive-include notebooks *
-prune notebooks/build
-recursive-include tests *
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/Makefile b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/Makefile
deleted file mode 100644
index 59032ed..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/Makefile
+++ /dev/null
@@ -1,81 +0,0 @@
-.PHONY: help marvin marvin-automl marvin-prod update clean-pyc clean-build clean-reports clean-deps clean docker-build docker-push docker-run
-
-DOCKER_VERSION?=0.00.01
-DOCKER_REGISTRY_ADRESS?=docker.registry.io
-MARVIN_DATA_PATH?=$(HOME)/marvin/data
-MARVIN_ENGINE_NAME?={{project.name}}
-MARVIN_TOOLBOX_VERSION?={{project.toolbox_version}}
-
-help:
-	@echo "    marvin"
-	@echo "        Prepare project to be used as a marvin package."
-	@echo "    marvin-prod"
-	@echo "        Prepare project to be used in production environment."
-	@echo "    marvin-automl"
-	@echo "        Prepare project to be used as a marvin-automl package."
-	@echo "    update"
-	@echo "        Reinstall requirements and setup.py dependencies."
-	@echo "    clean"
-	@echo "        Remove all generated artifacts."
-	@echo "    clean-pyc"
-	@echo "        Remove python artifacts."
-	@echo "    clean-build"
-	@echo "        Remove build artifacts."
-	@echo "    clean-reports"
-	@echo "        Remove coverage reports."
-	@echo "    clean-deps"
-	@echo "        Remove marvin setup.py dependencies."
-	@echo "    docker-build"
-	@echo "        Runs the docker build command with marvin env default parameters."
-	@echo "    docker-push"
-	@echo "        Runs the docker push command with marvin env default parameters."
-	@echo "    docker-run"
-	@echo "        Runs the docker run command with marvin env default parameters."
-
-marvin:
-	pip install -e ".[testing]"
-	marvin --help
-marvin-automl:
-	pip install -e ".[testing]"
-	bash install_automl.sh
-	marvin --help
-marvin-prod:
-	pip install .
-	marvin --help
-
-update:
-	pip install -e . -U
-
-clean-pyc:
-	find . -name '*.pyc' -exec rm -f {} +
-	find . -name '*.pyo' -exec rm -f {} +
-	find . -name '*~' -exec rm -f  {} +
-
-clean-build:
-	rm -rf *.egg-info
-	rm -rf .cache
-	rm -rf .eggs
-	rm -rf dist
-	rm -rf build
-
-clean-reports:
-	rm -rf coverage_report/
-	rm -f coverage.xml
-	rm -f .coverage
-
-clean-deps:
-	pip freeze | grep -v "^-e" | xargs pip uninstall -y
-
-clean: clean-build clean-pyc clean-reports clean-deps
-
-docker-build: clean-build
-	mkdir -p build
-	tar -cf build/engine.tar --exclude=*.log --exclude=*.pkl --exclude='build' --exclude='notebooks' --exclude=*.tar *
-	cp -f $(MARVIN_DATA_PATH)/marvin-engine-executor-assembly-$(MARVIN_TOOLBOX_VERSION).jar build/marvin-engine-executor-assembly.jar
-	sudo docker build -t $(DOCKER_REGISTRY_ADRESS)/$(MARVIN_ENGINE_NAME):$(DOCKER_VERSION) .
-
-docker-run:
-	sudo docker run --name=marvin-$(MARVIN_ENGINE_NAME)-$(DOCKER_VERSION) --mount type=bind,source=$(MARVIN_DATA_PATH),destination=/marvin-data -p 8000:8000 $(DOCKER_REGISTRY_ADRESS)/$(MARVIN_ENGINE_NAME):$(DOCKER_VERSION)
-
-docker-push:
-	sudo docker push $(DOCKER_REGISTRY_ADRESS)/$(MARVIN_ENGINE_NAME):$(DOCKER_VERSION)
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/README.md b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/README.md
deleted file mode 100644
index 77f7630..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/README.md
+++ /dev/null
@@ -1,143 +0,0 @@
-# {{project.name}} v0.0.1
-
-## Overview
-
-{{project.description}}
-
-
-## Requirements
-
-_REPLACE: Add here the list of requirements. For example:_
-
- - Python 2.7
- - Numpy 1.11.0 or higher
-
-
-## Installation
-
-Use the Marvin toolbox to provision, deploy and start the remote HTTP server.
-
-First, edit the `marvin.ini` file, setting the options within the
-`ssh_deployment` section:
-
-1. `host`: the host IP address or name where the engine should be deployed. You
-can enable multi-host deployment using `,` to separate hosts
-2. `port`: the SSH connection port
-3. `user`: the SSH connection username. Currently, only a single user is
-supported. This user should be capable of *passwordless sudo*, although it can
-use password for the SSH connection
-
-Next, ensure that the remotes servers are provisioned (all required software
-are installed):
-
-    marvin engine-deploy --provision
-
-Next, package your engine:
-
-    marvin engine-deploy --package
-
-This will create a compressed archive containing your engine code under the
-`.packages` directory.
-
-Next, deploy your engine to remotes servers:
-
-    marvin engine-deploy
-
-By default, a dependency clean will be executed at each deploy. You can skip it
-using:
-
-    marvin engine-deploy --skip-clean
-
-Next, you can start the HTTP server in the remotes servers:
-
-    marvin engine-httpserver-remote start
-
-You can check if the HTTP server is running:
-
-    marvin engine-httpserver-remote status
-
-And stop it:
-
-    marvin engine-httpserver-remote stop
-
-After starting, you can test it by making a HTTP request to any endpoint, like:
-
-    curl -v http://example.com/predictor/health
-
-Under the hood, this engine uses Fabric to define provisioning and deployment
-process. Check the `fabfile.py` for more information. You can add new tasks or
-edit existing ones to match your provisioning and deployment pipeline.
-
-## Development
-
-### Getting started
-
-First, create a new virtualenv
-
-```
-mkvirtualenv {{project.package}}_env
-```
-
-Now install the development dependencies
-
-```
-make marvin
-```
-
-You are now ready to code.
-
-
-### Adding new dependencies
-
-It\`s very important. All development dependencies should be added to `setup.py`.
-
-### Running tests
-
-This project uses *[py.test](http://pytest.org/)* as test runner and *[Tox](https://tox.readthedocs.io)* to manage virtualenvs.
-
-To run all tests use the following command
-
-```
-marvin test
-```
-
-To run specific test
-
-```
-marvin test tests/test_file.py::TestClass::test_method
-```
-
-
-### Writting documentation
-
-The project documentation is written using *[Jupyter](http://jupyter.readthedocs.io/)* notebooks. 
-You can start the notebook server from the command line by running the following command
-
-```
-marvin notebook
-```
-
-Use notebooks to demonstrate how to use the lib features. It can also be useful to show some use cases.
-
-
-### Bumping version
-
-```
-marvin pkg-bumpversion [patch|minor|major]
-git add . && git commit -m "Bump version"
-```
-
-
-### Tagging version
-
-```
-marvin pkg-createtag
-git push origin master --follow-tags
-```
-
-
-### Logging
-
-The default log level is set to _WARNING_. You can change the log level at runtime setting another value to one of the following environment variable: `{{project.package|upper}}_LOG_LEVEL` or `LOG_LEVEL`. The available values are _CRITICAL_, _ERROR_, _WARNING_, _INFO_ and _DEBUG_.
-
-Be careful using `LOG_LEVEL`, it may affect another lib.
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/docs.yaml b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/docs.yaml
deleted file mode 100644
index d701870..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/docs.yaml
+++ /dev/null
@@ -1,567 +0,0 @@
-openapi: "3.0.0"
-info:
-  version: 0.0.1
-  title: {{project.package}} API Doc
-  contact:
-      name: mantainer {{mantainer.name}}
-      email: {{mantainer.email}}
-      url: https://marvin.apache.org
-  license:
-    name: Apache License 2.0
-servers:
-  - url: http://localhost:8000
-  - url: http://0.0.0.0:8000
-tags:
-  - name: Docker
-    description: For Docker users, please use "make docker-build" and "make docker-run" commands in your engine virtualenv to start the server
-  - name: Acquisitor
-    description: Setup the initial_dataset with all cleaned data necessary to build your dataset in the next action
-  - name: Tpreparator
-    description: Setup the dataset with the transformed data that is compatible with the algorithm used to build the model in the next action
-  - name: Trainer
-    description: Setup the model with the result of the algorithm used to training
-  - name: Evaluator
-    description: Setup the metrics with the result of the algorithms used to test the model
-  - name: Predictor
-    description: Return the predicted value in a json parsable object format
-  - name: Feedback
-    description: Receive feedback message, user can manipulate this message for any use
-  - name: Pipeline
-    description: Perform all batch actions in your right order
-paths:
-  /acquisitor/health:
-    get:
-      summary: Get acquisitor's service health
-      operationId: getAcquisitorHealth
-      tags:
-        - Acquisitor
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /acquisitor/status:
-    get:
-      summary: Get acquisitor's service status
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      operationId: getAcquisitorStatus
-      tags:
-        - Acquisitor
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /acquisitor:
-    post:
-      summary: Run acquisitor
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: acquisitor
-      tags:
-        - Acquisitor
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /tpreparator/health:
-    get:
-      summary: Get trainer preparator's service health
-      operationId: getTPreparatortorHealth
-      tags:
-        - Tpreparator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /tpreparator/status:
-    get:
-      summary: Get trainer preparator's service status
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      operationId: getTPreparatorStatus
-      tags:
-        - Tpreparator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /tpreparator/reload?protocol=:
-    put:
-      summary: Reload artifact for trainer preparator
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: reloadTPreparator
-      tags:
-        - Tpreparator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /tpreparator:
-    post:
-      summary: Run trainer preparator
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: tpreparator
-      tags:
-        - Tpreparator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /trainer/health:
-    get:
-      summary: Get trainer's service health
-      operationId: getTrainerHealth
-      tags:
-        - Trainer
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /trainer/status:
-    get:
-      summary: Get trainer's service status
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      operationId: getTrainerStatus
-      tags:
-        - Trainer
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /trainer/reload?protocol=:
-    put:
-      summary: Reload artifact for trainer
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: reloadTrainer
-      tags:
-        - Trainer
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /trainer:
-    post:
-      summary: Run trainer
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: trainer
-      tags:
-        - Trainer
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /evaluator/health:
-    get:
-      summary: Get evaluator's service health
-      operationId: getEvaluatorHealth
-      tags:
-        - Evaluator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /evaluator/status:
-    get:
-      summary: Get evaluator's service status
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      operationId: getEvaluatorStatus
-      tags:
-        - Evaluator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /evaluator/metrics?protocol=:
-    get:
-      summary: Get metrics's value
-      parameters: 
-        - in: query
-          name: protocol
-          schema: 
-            type: string
-          required: true
-          description: Metrics protocol value
-      operationId: getMetrics
-      tags:
-        - Evaluator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /evaluator/reload?protocol=:
-    put:
-      summary: Reload artifact for evaluator
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      operationId: reloadEvaluator
-      tags:
-        - Evaluator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /evaluator:
-    post:
-      summary: Run evaluator
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: evaluator
-      tags:
-        - Evaluator
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /predictor/health:
-    get:
-      summary: Get predictor's service health
-      operationId: getPredictorHealth
-      tags:
-        - Predictor
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /predictor/status:
-    get:
-      summary: Get predictor's service status
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      operationId: getPredictorStatus
-      tags:
-        - Predictor
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /predictor/reload?protocol=:
-    put:
-      summary: Reload artifact for predictor
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: reloadPredictor
-      tags:
-        - Predictor
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /predictor:
-    post:
-      summary: Run predictor
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: predictor
-      tags:
-        - Predictor
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /feedback/health:
-    get:
-      summary: Get feedback's service health
-      operationId: getFeedbackHealth
-      tags:
-        - Feedback
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /feedback/status:
-    get:
-      summary: Get feedback's service status
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      operationId: getFeedbackStatus
-      tags:
-        - Feedback
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /feedback/reload?protocol=:
-    put:
-      summary: Reload artifact for feedback
-      parameters: 
-        - in: query
-          name: protocol
-          schema:
-            type: string
-          required: true
-          description: The Protocol value generated from last action
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: reloadFeedback
-      tags:
-        - Feedback
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /feedback:
-    post:
-      summary: Run feedback
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: feedback
-      tags:
-        - Feedback
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
-  /pipeline:
-    post:
-      summary: Do all batch actions (from Acquisitor to Evaluator)
-      requestBody:
-        description: The default value for body is an empty json object
-        required: true
-        content:
-          application/json:
-            schema:
-              type: object
-      operationId: pipeline
-      tags:
-        - Pipeline
-      responses:
-        '200':
-          description: Result Message / Success
-        '400':
-          description: Bad Request / Illegal Argument / Missing Parameters
-        '500':
-          description: Internal Server Error / Timeout
-        '503':
-          description: Service Unavailable
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.messages b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.messages
deleted file mode 100644
index 5ff7ec4..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.messages
+++ /dev/null
@@ -1,3 +0,0 @@
-[{
-	"msg1": "Hello from marvin engine!"
-}]
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.metadata b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.metadata
deleted file mode 100644
index bb56837..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.metadata
+++ /dev/null
@@ -1,65 +0,0 @@
-{
-	"name": "{{project.name.lower()}}",
-	"version": "v0.0.1",
-	"engineType": "python",
-	"artifactsRemotePath": "/tmp/marvin",
-	"artifactManagerType": "FS",
-	"onlineActionTimeout": 1000,
-	"metricsTimeout": 10000,
-    "healthCheckTimeout": 2000,
-	"reloadTimeout": 600000,
-	"batchActionTimeout": 600000,
-	"pipelineActions": ["acquisitor", "tpreparator", "trainer", "evaluator"],
-	"actions": [{
-		"name": "acquisitor",
-		"actionType": "batch",
-		"port": 50051,
-		"host": "localhost",
-		"artifactsToPersist": ["initialdataset"],
-		"artifactsToLoad": [],
-		"pipeline": []
-	}, {
-		"name": "tpreparator",
-		"actionType": "batch",
-		"port": 50052,
-		"host": "localhost",
-		"artifactsToPersist": ["dataset"],
-		"artifactsToLoad": ["initialdataset"],
-		"pipeline": []
-	}, {
-		"name": "trainer",
-		"actionType": "batch",
-		"port": 50053,
-		"host": "localhost",
-		"artifactsToPersist": ["model"],
-		"artifactsToLoad": ["dataset"],
-		"pipeline": []
-	}, {
-		"name": "evaluator",
-		"actionType": "batch",
-		"port": 50054,
-		"host": "localhost",
-		"artifactsToPersist": ["metrics"],
-		"artifactsToLoad": ["dataset", "model"],
-		"pipeline": []
-	}, {
-		"name": "predictor",
-		"actionType": "online",
-		"port": 50055,
-		"host": "localhost",
-		"artifactsToPersist": [],
-		"artifactsToLoad": ["model", "metrics"],
-		"pipeline": ["ppreparator"]
-	}, {
-		"name": "feedback",
-		"actionType": "online",
-		"port": 50056,
-		"host": "localhost",
-		"artifactsToPersist": [],
-		"artifactsToLoad": [],
-		"pipeline": []
-	}],
-	"bigquery_project": "xxx-project",
-	"bigquery_queries": [],
-	"bigquery_csvfiles": []
-}
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.params b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.params
deleted file mode 100644
index 410f586..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/engine.params
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-	"PARAM_1" : "VALUE_OF_PARAM_1"
-}
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/fabfile.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/fabfile.py
deleted file mode 100644
index 1da9c50..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/fabfile.py
+++ /dev/null
@@ -1,188 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-
-from fabric.api import env
-from fabric.api import run
-from fabric.api import execute
-from fabric.api import cd
-from fabric.api import local
-from fabric.api import put
-from fabric.api import sudo
-from fabric.state import output
-from marvin_python_toolbox import __version__ as TOOLBOX_VERSION
-from marvin_python_toolbox.common.config import Config
-
-_host = Config.get("host", section="ssh_deployment").split(",")
-_port = Config.get("port", section="ssh_deployment")
-_user = Config.get("user", section="ssh_deployment")
-
-for h in _host:
-    env.hosts.append("{user}@{host}:{port}".format(user=_user, host=h, port=_port))
-
-output["everything"] = False
-output["running"] = True
-
-env.package = "{{project.package}}"
-env.margin_engine_executor_prefix = "/opt/marvin/engine-executor"
-env.margin_engine_executor_jar = "marvin-engine-executor-assembly-{version}.jar".format(version=TOOLBOX_VERSION)
-env.marvin_engine_executor_path = env.margin_engine_executor_prefix + "/" + env.margin_engine_executor_jar
-
-
-def install_oracle_jdk():
-    sudo("add-apt-repository ppa:webupd8team/java -y")
-    sudo("apt-get -qq update")
-    run("echo debconf shared/accepted-oracle-license-v1-1 select true | sudo debconf-set-selections")
-    run("echo debconf shared/accepted-oracle-license-v1-1 seen true | sudo debconf-set-selections")
-    sudo("apt-get install -y oracle-java8-installer")
-
-
-def install_virtualenvwrapper():
-    run("pip install virtualenvwrapper")
-    run("echo 'export WORKON_HOME=${HOME}/.virtualenvs' >> ${HOME}/.profile")
-    run("echo 'source /usr/local/bin/virtualenvwrapper.sh' >> ${HOME}/.profile")
-
-
-def install_apache_spark():
-    run("curl https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz -o /tmp/spark-2.1.1-bin-hadoop2.6.tgz")
-    sudo("tar -xf /tmp/spark-2.1.1-bin-hadoop2.6.tgz -C /opt/")
-    sudo("ln -s /opt/spark-2.1.1-bin-hadoop2.6 /opt/spark")
-    run("echo 'export SPARK_HOME=/opt/spark' >> ${HOME}/.profile")
-
-
-def install_required_packages():
-    sudo("apt-get update -y")
-    sudo("apt-get install -y git")
-    sudo("apt-get install -y wget")
-    sudo("apt-get install -y python2.7-dev")
-    sudo("apt-get install -y python-pip")
-    sudo("apt-get install -y ipython")
-    sudo("apt-get install -y libffi-dev")
-    sudo("apt-get install -y libssl-dev")
-    sudo("apt-get install -y libxml2-dev")
-    sudo("apt-get install -y libxslt1-dev")
-    sudo("apt-get install -y libpng12-dev")
-    sudo("apt-get install -y libfreetype6-dev")
-    sudo("apt-get install -y python-tk")
-    sudo("apt-get install -y libsasl2-dev")
-    sudo("apt-get install -y python-pip")
-    sudo("apt-get install -y graphviz")
-    sudo("pip install --upgrade pip")
-
-
-def install_marvin_engine_executor():
-    sudo("mkdir -p {prefix}".format(prefix=env.margin_engine_executor_prefix))
-    with cd("{prefix}".format(prefix=env.margin_engine_executor_prefix)):
-        sudo("wget https://s3.amazonaws.com/marvin-engine-executor/{jar}".format(jar=env.margin_engine_executor_jar))
-
-
-def create_marvin_engines_prefix():
-    sudo("mkdir -p /opt/marvin/engines")
-    sudo("chown {user}:{user} /opt/marvin/engines".format(user=env.user))
-    sudo("mkdir -p /var/log/marvin/engines")
-    sudo("chown {user}:{user} /var/log/marvin/engines".format(user=env.user))
-    sudo("mkdir -p /var/run/marvin/engines")
-    sudo("chown {user}:{user} /var/run/marvin/engines".format(user=env.user))
-
-
-def configure_marvin_environment():
-    run("echo 'export MARVIN_HOME=${HOME}/marvin' >> ${HOME}/.profile")
-    run("echo 'export MARVIN_DATA_PATH=${MARVIN_HOME}/data' >> ${HOME}/.profile")
-    run("mkdir -p ${MARVIN_HOME}")
-    run("mkdir -p ${MARVIN_DATA_PATH}")
-
-
-def provision():
-    execute(install_required_packages)
-    execute(install_virtualenvwrapper)
-    execute(install_oracle_jdk)
-    execute(install_apache_spark)
-    execute(install_marvin_engine_executor)
-    execute(create_marvin_engines_prefix)
-    execute(configure_marvin_environment)
-
-
-def package(version):
-    package = env.package
-    local("mkdir -p .packages")
-    local("tar czvf .packages/{package}-{version}.tar.gz --exclude='.packages' .".format(
-          package=package, version=version))
-
-
-def deploy(version, skip_clean=False):
-    execute(engine_stop)
-    package = env.package
-    put(local_path=".packages/{package}-{version}.tar.gz".format(
-        package=package, version=version), remote_path="/tmp/")
-    run("mkdir -p /opt/marvin/engines/{package}/{version}".format(
-        package=package, version=version))
-    with cd("/opt/marvin/engines/{package}/{version}".format(
-            package=package, version=version)):
-        run("tar xzvf /tmp/{package}-{version}.tar.gz".format(
-            package=package, version=version))
-    with cd("/opt/marvin/engines/{package}".format(package=package)):
-        symlink_exists = run("stat current", quiet=True).succeeded
-        if (symlink_exists):
-            run("rm current")
-        run("ln -s {version} current".format(version=version))
-    with cd("/opt/marvin/engines/{package}/current".format(package=package)):
-        run("mkvirtualenv {package}_env".format(package=package))
-        run("setvirtualenvproject")
-        if skip_clean:
-            run("workon {package}_env && make marvin".format(
-                package=package))
-        else:
-            run("workon {package}_env && make clean && make marvin".format(
-                package=package))
-    execute(engine_start)
-
-
-def engine_start(http_host, http_port):
-    package = env.package
-
-    command = (
-        "workon {package}_env &&"
-        " (marvin engine-httpserver"
-        " -h {http_host}"
-        " -p {http_port}"
-        " -e {executor}"
-        " 1> /var/log/marvin/engines/{package}.out"
-        " 2> /var/log/marvin/engines/{package}.err"
-        " & echo $! > /var/run/marvin/engines/{package}.pid)"
-    ).format(
-        package=package,
-        http_host=http_host,
-        http_port=http_port,
-        executor=env.marvin_engine_executor_path
-    )
-
-    with cd("/opt/marvin/engines/{package}/current".format(package=package)):
-        run(command, pty=False)
-
-
-def engine_stop():
-    package = env.package
-
-    pid_file_exists = run("cat /var/run/marvin/engines/{package}.pid".format(
-        package=package), quiet=True)
-    if pid_file_exists.succeeded:
-        with cd("/opt/marvin/engines/{package}/current".format(package=package)):
-            children_pids = run("ps --ppid $(cat /var/run/marvin/engines/{package}.pid) -o pid --no-headers |xargs echo".format(
-                package=package))
-            run("kill $(cat /var/run/marvin/engines/{package}.pid) {children_pids}".format(
-                package=package, children_pids=children_pids))
-            run("rm /var/run/marvin/engines/{package}.pid".format(package=package))
-
-
-def engine_status():
-    package = env.package
-    pid_file_exists = run("cat /var/run/marvin/engines/{package}.pid".format(
-        package=package), quiet=True)
-    if pid_file_exists.succeeded:
-        is_running = run("ps $(cat /var/run/marvin/engines/{package}.pid)".format(package=package), quiet=True)
-        if is_running.succeeded:
-            print "Your engine is running :)"
-        else:
-            print "Your engine is not running :("
-    else:
-        print "Your engine is not running :("
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/feedback.messages b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/feedback.messages
deleted file mode 100644
index 5ff7ec4..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/feedback.messages
+++ /dev/null
@@ -1,3 +0,0 @@
-[{
-	"msg1": "Hello from marvin engine!"
-}]
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/install_automl.sh b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/install_automl.sh
deleted file mode 100644
index 4bde530..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/install_automl.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/sh
-###############################################
-# Marvin AutoML installation Script           #
-###############################################
-cmd=(dialog --title "Marvin AutoML" --separate-output --checklist "Select tools:" 22 76 16)
-options=(1 "auto-sklearn" off    # any option can be set to default to "on"
-         2 "h2o AutoML" off
-         3 "TPOT" off)
-choices=$("${cmd[@]}" "${options[@]}" 2>&1 >/dev/tty)
-clear
-for choice in $choices
-do
-    case $choice in
-        1)
-            echo "Instaling auto-sklearn..."
-            wget https://raw.githubusercontent.com/automl/auto-sklearn/master/requirements.txt \
-                 | xargs -n 1 -L 1 pip install
-            pip install auto-sklearn
-            ;;
-        2)
-            echo "Installing h2o..."
-            pip install requests
-            pip install tabulate
-            pip install scikit-learn
-            pip install http://h2o-release.s3.amazonaws.com/h2o/rel-yau/3/Python/h2o-3.26.0.3-py2.py3-none-any.whl
-            wget http://h2o-release.s3.amazonaws.com/h2o/rel-yau/3/h2o-3.26.0.3.zip
-            unzip h2o-3.26.0.3.zip
-            rm h2o-3.26.0.3.zip
-            ;;
-        3)
-            echo "Installing TPOT..."
-            pip install tpot
-            ;;
-    esac
-done
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/marvin.ini b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/marvin.ini
deleted file mode 100644
index 0bd2f05..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/marvin.ini
+++ /dev/null
@@ -1,11 +0,0 @@
-[marvin]
-package = {{project.package}}
-type = {{project.type}}
-executor_url = https://s3.amazonaws.com/marvin-engine-executor/marvin-engine-executor-assembly-{{project.toolbox_version}}.jar
-
-[ssh_deployment]
-# You can enable multi-host deployment like this
-# host = host1.com,host2.com,hostN.com
-host = host1.com
-port = 22
-user = marvin
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/notebooks/sample.ipynb b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/notebooks/sample.ipynb
deleted file mode 100644
index d4d6a30..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/notebooks/sample.ipynb
+++ /dev/null
@@ -1,50 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Documentation"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Sample"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "import {{project.package}}"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "python2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.5"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 0
-}
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/VERSION b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/VERSION
deleted file mode 100644
index 8acdd82..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/VERSION
+++ /dev/null
@@ -1 +0,0 @@
-0.0.1
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/__init__.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/__init__.py
deleted file mode 100644
index a45c1f1..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-import os.path
-
-from .data_handler import *
-from .prediction import *
-from .training import *
-
-
-# Get package version number from "VERSION" file
-with open(os.path.join(os.path.dirname(__file__), 'VERSION'), 'rb') as f:
-    __version__ = f.read().decode('ascii').strip()
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/_compatibility.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/_compatibility.py
deleted file mode 100644
index 4ce10fe..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/_compatibility.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""Compatibility module.
-
-Import this module to help to write code compatible with Python 2 and 3.
-"""
-
-from __future__ import print_function
-from __future__ import division
-from __future__ import absolute_import
-from __future__ import unicode_literals
-
-import six
-
-__all__ = ['six']
-
-# Add here any code that have to differentiate between python 2 and 3.
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/_logging.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/_logging.py
deleted file mode 100644
index 4bbf0a4..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/_logging.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""Custom logging module.
-
-This module is responsible to manage log messages and log file.
-"""
-
-import sys
-import os
-import os.path
-import logging
-
-DEFAULT_LOG_LEVEL = logging.INFO
-DEFAULT_LOG_DIR = '/tmp'
-
-
-class Logger(logging.getLoggerClass()):
-    """Custom logger class.
-
-    Use this class to customize the logger behavior or to intercept the
-    messages.
-    """
-    def error(self, msg, *args, **kwargs):
-        # Add here code to intercept the project error messages
-        super(Logger, self).error(msg, *args, **kwargs)
-
-    def critical(self, msg, *args, **kwargs):
-        # Add here code to intercept the project critical messages
-        super(Logger, self).critical(msg, *args, **kwargs)
-
-
-logging.setLoggerClass(Logger)
-
-
-def get_logger(name, namespace='{{project.package}}',
-               log_level=DEFAULT_LOG_LEVEL, log_dir=DEFAULT_LOG_DIR):
-    """Build a logger that outputs to a file and to the console,"""
-
-    log_level = (os.getenv('{}_LOG_LEVEL'.format(namespace.upper())) or
-                 os.getenv('LOG_LEVEL', log_level))
-    log_dir = (os.getenv('{}_LOG_DIR'.format(namespace.upper())) or
-               os.getenv('LOG_DIR', log_dir))
-
-    logger = logging.getLogger('{}.{}'.format(namespace, name))
-    logger.setLevel(log_level)
-
-    formatter = logging.Formatter(
-        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
-
-    # Create a console stream handler
-    console_handler = logging.StreamHandler()
-    console_handler.setLevel(log_level)
-    console_handler.setFormatter(formatter)
-    logger.addHandler(console_handler)
-
-    try:
-        if log_dir:
-            log_path = os.path.abspath(log_dir)
-            log_filename = '{name}.{pid}.log'.format(
-                name=namespace, pid=os.getpid())
-
-            file_path = str(os.path.join(log_path, log_filename))
-
-            if not os.path.exists(log_path):
-                os.makedirs(log_path, mode=774)
-
-            # Create a file handler
-            file_handler = logging.FileHandler(file_path)
-            file_handler.setLevel(log_level)
-            file_handler.setFormatter(formatter)
-            logger.addHandler(file_handler)
-    except OSError as e:
-        logger.error('Could not create log file {file}: {error}'.format(
-            file=file_path, error=e.strerror))
-
-    return logger
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/__init__.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/__init__.py
deleted file mode 100644
index 6707e49..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-from .acquisitor_and_cleaner import AcquisitorAndCleaner
-from .training_preparator import TrainingPreparator
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/acquisitor_and_cleaner.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/acquisitor_and_cleaner.py
deleted file mode 100644
index b6da06a..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/acquisitor_and_cleaner.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""AcquisitorAndCleaner engine action.
-
-Use this module to add the project main code.
-"""
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from marvin_python_toolbox.engine_base import EngineBaseDataHandler
-
-__all__ = ['AcquisitorAndCleaner']
-
-
-logger = get_logger('acquisitor_and_cleaner')
-
-
-class AcquisitorAndCleaner(EngineBaseDataHandler):
-
-    def __init__(self, **kwargs):
-        super(AcquisitorAndCleaner, self).__init__(**kwargs)
-
-    def execute(self, params, **kwargs):
-        """
-        Setup the initial_dataset with all cleaned data necessary to build your dataset in the next action.
-
-        Eg.
-
-            self.marvin_initial_dataset = {...}
-        """
-        self.marvin_initial_dataset = {}
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/training_preparator.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/training_preparator.py
deleted file mode 100644
index 283e59d..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/training_preparator.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""TrainingPreparator engine action.
-
-Use this module to add the project main code.
-"""
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from marvin_python_toolbox.engine_base import EngineBaseDataHandler
-
-__all__ = ['TrainingPreparator']
-
-
-logger = get_logger('training_preparator')
-
-
-class TrainingPreparator(EngineBaseDataHandler):
-
-    def __init__(self, **kwargs):
-        super(TrainingPreparator, self).__init__(**kwargs)
-
-    def execute(self, params, **kwargs):
-        """
-        Setup the dataset with the transformed data that is compatible with the algorithm used to build the model in the next action.
-        Use the self.initial_dataset prepared in the last action as source of data.
-
-        Eg.
-
-            self.marvin_dataset = {...}
-        """
-        self.marvin_dataset = {}
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/__init__.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/__init__.py
deleted file mode 100644
index aa6291d..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/__init__.py
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-from .prediction_preparator import PredictionPreparator
-from .predictor import Predictor
-from .feedback import Feedback
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/feedback.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/feedback.py
deleted file mode 100644
index c47bfff..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/feedback.py
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""Feedback engine action.
-
-Use this module to add the project main code.
-"""
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from marvin_python_toolbox.engine_base import EngineBasePrediction
-
-__all__ = ['Feedback']
-
-
-logger = get_logger('feedback')
-
-
-class Feedback(EngineBasePrediction):
-
-    def __init__(self, **kwargs):
-        super(Feedback, self).__init__(**kwargs)
-
-    def execute(self, input_message, params, **kwargs):
-        """
-        Receive feedback message, user can manipulate this message for any use.
-        Return "Done" to signal that the message is received and processed.
-        """
-        return {"message": "Done"}
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/prediction_preparator.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/prediction_preparator.py
deleted file mode 100644
index f67a9b9..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/prediction_preparator.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""PredictionPreparator engine action.
-
-Use this module to add the project main code.
-"""
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from marvin_python_toolbox.engine_base import EngineBasePrediction
-
-__all__ = ['PredictionPreparator']
-
-
-logger = get_logger('prediction_preparator')
-
-
-class PredictionPreparator(EngineBasePrediction):
-
-    def __init__(self, **kwargs):
-        super(PredictionPreparator, self).__init__(**kwargs)
-
-    def execute(self, input_message, params, **kwargs):
-        """
-        Return a prepared input_message compatible to the predict algorithm used by the model.
-        Use the self.model and self.metrics objects if necessary.
-        """
-        return input_message
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/predictor.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/predictor.py
deleted file mode 100644
index f1c6b4e..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/predictor.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""Predictor engine action.
-
-Use this module to add the project main code.
-"""
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from marvin_python_toolbox.engine_base import EngineBasePrediction
-
-__all__ = ['Predictor']
-
-
-logger = get_logger('predictor')
-
-
-class Predictor(EngineBasePrediction):
-
-    def __init__(self, **kwargs):
-        super(Predictor, self).__init__(**kwargs)
-
-    def execute(self, input_message, params, **kwargs):
-        """
-        Return the predicted value in a json parsable object format.
-        Use the self.model and self.metrics objects if necessary.
-        """
-        return {"predicted_value": "mocked_return"}
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/__init__.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/__init__.py
deleted file mode 100644
index e1723b7..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/__init__.py
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-from .metrics_evaluator import MetricsEvaluator
-from .trainer import Trainer
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/metrics_evaluator.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/metrics_evaluator.py
deleted file mode 100644
index 9f4f09e..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/metrics_evaluator.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""MetricsEvaluator engine action.
-
-Use this module to add the project main code.
-"""
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from marvin_python_toolbox.engine_base import EngineBaseTraining
-
-__all__ = ['MetricsEvaluator']
-
-
-logger = get_logger('metrics_evaluator')
-
-
-class MetricsEvaluator(EngineBaseTraining):
-
-    def __init__(self, **kwargs):
-        super(MetricsEvaluator, self).__init__(**kwargs)
-
-    def execute(self, params, **kwargs):
-        """
-        Setup the metrics with the result of the algorithms used to test the model.
-        Use the self.dataset and self.model prepared in the last actions.
-
-        Eg.
-
-            self.marvin_metrics = {...}
-        """
-        self.marvin_metrics = {}
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/trainer.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/trainer.py
deleted file mode 100644
index a5dd23a..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/project_package/training/trainer.py
+++ /dev/null
@@ -1,34 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-"""Trainer engine action.
-
-Use this module to add the project main code.
-"""
-
-from .._compatibility import six
-from .._logging import get_logger
-
-from marvin_python_toolbox.engine_base import EngineBaseTraining
-
-__all__ = ['Trainer']
-
-
-logger = get_logger('trainer')
-
-
-class Trainer(EngineBaseTraining):
-
-    def __init__(self, **kwargs):
-        super(Trainer, self).__init__(**kwargs)
-
-    def execute(self, params, **kwargs):
-        """
-        Setup the model with the result of the algorithm used to training.
-        Use the self.dataset prepared in the last action as source of data.
-
-        Eg.
-
-            self.marvin_model = {...}
-        """
-        self.marvin_model = {}
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/pytest.ini b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/pytest.ini
deleted file mode 100644
index cab8644..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/pytest.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[pytest]
-minversion    = 2.0
-norecursedirs = .git .tox .eggs .cache *.egg build dist tmp*
-python_files  = test*.py
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/setup.cfg b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/setup.cfg
deleted file mode 100644
index e69de29..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/setup.cfg
+++ /dev/null
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/setup.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/setup.py
deleted file mode 100644
index 40d82f0..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/setup.py
+++ /dev/null
@@ -1,175 +0,0 @@
-from __future__ import print_function
-
-import os
-import shutil
-from os.path import dirname, join
-from setuptools import setup, find_packages
-from setuptools.command.test import test as TestCommand
-from setuptools.command.develop import develop as _develop
-from setuptools.command.install import install as _install
-
-
-REQUIREMENTS_TESTS = [
-    'pytest>=2.6.4',
-    'pytest-cov>=1.8.1',
-    'mock>=2.0.0',
-    'virtualenv>=15.0.1',
-    'tox>=2.2.0',
-]
-
-def _get_version():
-    """Return the project version from VERSION file."""
-
-    with open(join(dirname(__file__), '{{project.package}}/VERSION'), 'rb') as f:
-        version = f.read().decode('ascii').strip()
-    return version
-
-
-def _hooks(dir):
-    _set_autocomplete()
-    _install_notebook_extension()
-
-
-def _set_autocomplete():
-    import marvin_python_toolbox as toolbox
-    virtualenv = os.environ.get('VIRTUAL_ENV', None)
-
-    if virtualenv:
-        postactivate = os.path.join(virtualenv, 'bin', 'postactivate')
-
-        if os.path.exists(postactivate):
-            shutil.copy(
-                os.path.join(toolbox.__path__[0], 'extras', 'marvin_bash_completion'),
-                os.path.join(virtualenv, 'marvin_bash_completion')
-            )
-
-            command = 'source "{}"'.format(os.path.join(virtualenv, 'marvin_bash_completion'))
-
-            with open(postactivate, 'r+') as fp:
-                lines = fp.readlines()
-                fp.seek(0)
-                configured = False
-                for line in lines:
-                    if 'marvin_bash_completion' in line:
-                        # Replacing old autocomplete configuration
-                        fp.write(command)
-                        configured = True
-                    else:
-                        fp.write(line)
-
-                if not configured:
-                    fp.write(command)
-                    # 'Autocomplete was successfully configured'
-                fp.write('\n')
-                fp.truncate()
-
-
-def _install_notebook_extension():
-    import marvin_python_toolbox as toolbox
-
-    install_command = [
-        "jupyter",
-        "nbextension",
-        "install",
-        os.path.join(toolbox.__path__[0], 'extras', 'notebook_extensions', 'main.js'),
-        "--destination",
-        "marvin.js",
-        "--sys-prefix",
-        "--overwrite"
-    ]
-
-    os.system(' '.join(install_command))
-
-    enable_command = [
-        "jupyter",
-        "nbextension",
-        "enable",
-        "marvin",
-        "--sys-prefix"
-    ]
-
-    os.system(' '.join(enable_command))
-
-
-class develop(_develop):
-    def run(self):
-        _develop.run(self)
-        self.execute(_hooks, (self.install_lib,), msg="Running develop preparation task")
-
-
-class install(_install):
-    def run(self):
-        _install.run(self)
-        self.execute(_hooks, (self.install_lib,), msg="Running install preparation task")
-
-
-class Tox(TestCommand):
-    """Run the test cases using TOX command."""
-
-    user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
-
-    def initialize_options(self):
-        TestCommand.initialize_options(self)
-        self.tox_args = None
-
-    def finalize_options(self):
-        TestCommand.finalize_options(self)
-        self.test_args = []
-        self.test_suite = True
-
-    def run_tests(self):
-        # Import here, cause outside the eggs aren't loaded
-        import tox
-        import shlex
-        import sys
-        args = self.tox_args
-        if args:
-            args = shlex.split(self.tox_args)
-        else:
-            # Run all tests by default
-            args = ['-c', join(dirname(__file__), 'tox.ini'), 'tests']
-        errno = tox.cmdline(args=args)
-        sys.exit(errno)
-
-
-setup(
-    name='{{project.package}}',
-    version=_get_version(),
-    url='{{project.url}}',
-    description='{{project.description}}',
-    long_description=open(join(dirname(__file__), 'README.md')).read(),
-    author='{{mantainer.name}}',
-    maintainer='{{mantainer.name}}',
-    maintainer_email='{{mantainer.email}}',
-    packages=find_packages(exclude=('tests', 'tests.*')),
-    include_package_data=True,
-    zip_safe=False,
-    classifiers=[
-        'Development Status :: 3 - Alpha',
-        'Intended Audience :: Developers',
-        'Programming Language :: Python',
-        'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.7',
-        'Programming Language :: Python :: 3',
-        'Programming Language :: Python :: 3.3',
-        'Programming Language :: Python :: 3.4',
-        'Programming Language :: Python :: 3.5',
-        'Topic :: Software Development :: Libraries :: Python Modules',
-    ],
-    install_requires=[
-        'scikit-learn>=0.18.2',
-        'scipy>=0.19.1',
-        'numpy>=1.13.1',
-        'pandas>=0.20.3',
-        'matplotlib>=2.0.2',
-        'marvin-python-toolbox>={{project.toolbox_version}}',
-        'Fabric>=1.14.0',
-    ],
-    tests_require=REQUIREMENTS_TESTS,
-    extras_require={
-        'testing': REQUIREMENTS_TESTS,
-    },
-    cmdclass={
-        'test': Tox, 'develop': develop, 'install': install
-    },
-)
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/conftest.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/conftest.py
deleted file mode 100644
index 903cfc8..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/conftest.py
+++ /dev/null
@@ -1,13 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-import os
-import pytest
-
-os.environ['TESTING'] = 'True'
-
-
-@pytest.fixture
-def mocked_params():
-    return {'params': 1}
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_acquisitor_and_cleaner.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_acquisitor_and_cleaner.py
deleted file mode 100644
index 7b48c68..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_acquisitor_and_cleaner.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-try:
-    import mock
-
-except ImportError:
-    import unittest.mock as mock
-
-from {{project.package}}.data_handler import AcquisitorAndCleaner
-
-
-class TestAcquisitorAndCleaner:
-    def test_execute(self, mocked_params):
-        ac = AcquisitorAndCleaner()
-        ac.execute(params=mocked_params)
-        assert not ac._params
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_training_preparator.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_training_preparator.py
deleted file mode 100644
index 5a0965f..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_training_preparator.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-try:
-    import mock
-
-except ImportError:
-    import unittest.mock as mock
-
-from {{project.package}}.data_handler import TrainingPreparator
-
-
-class TestTrainingPreparator:
-    def test_execute(self, mocked_params):
-        ac = TrainingPreparator()
-        ac.execute(params=mocked_params)
-        assert not ac._params
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_feedback.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_feedback.py
deleted file mode 100644
index 5cdde72..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_feedback.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-try:
-    import mock
-
-except ImportError:
-    import unittest.mock as mock
-
-from {{project.package}}.prediction import Feedback
-
-
-class TestFeedback:
-    def test_execute(self, mocked_params):
-        fb = Feedback()
-        fb.execute(input_message="fake message", params=mocked_params)
-        assert not fb._params
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_prediction_preparator.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_prediction_preparator.py
deleted file mode 100644
index 301f518..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_prediction_preparator.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-try:
-    import mock
-
-except ImportError:
-    import unittest.mock as mock
-
-from {{project.package}}.prediction import PredictionPreparator
-
-
-class TestPredictionPreparator:
-    def test_execute(self, mocked_params):
-        ac = PredictionPreparator()
-        ac.execute(input_message="fake message", params=mocked_params)
-        assert not ac._params
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_predictor.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_predictor.py
deleted file mode 100644
index e7b3e4a..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_predictor.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-try:
-    import mock
-
-except ImportError:
-    import unittest.mock as mock
-
-from {{project.package}}.prediction import Predictor
-
-
-class TestPredictor:
-    def test_execute(self, mocked_params):
-        ac = Predictor()
-        ac.execute(input_message="fake message", params=mocked_params)
-        assert not ac._params
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/training/test_metrics_evaluator.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/training/test_metrics_evaluator.py
deleted file mode 100644
index a18f188..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/training/test_metrics_evaluator.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-try:
-    import mock
-
-except ImportError:
-    import unittest.mock as mock
-
-from {{project.package}}.training import MetricsEvaluator
-
-
-class TestMetricsEvaluator:
-    def test_execute(self, mocked_params):
-        ac = MetricsEvaluator()
-        ac.execute(params=mocked_params)
-        assert not ac._params
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/training/test_trainer.py b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/training/test_trainer.py
deleted file mode 100644
index e959207..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tests/training/test_trainer.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-try:
-    import mock
-
-except ImportError:
-    import unittest.mock as mock
-
-from {{project.package}}.training import Trainer
-
-
-class TestTrainer:
-    def test_execute(self, mocked_params):
-        ac = Trainer()
-        ac.execute(params=mocked_params)
-        assert not ac._params
-
diff --git a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tox.ini b/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tox.ini
deleted file mode 100644
index 19da870..0000000
--- a/python-toolbox/marvin_python_toolbox/management/templates/python-engine/tox.ini
+++ /dev/null
@@ -1,8 +0,0 @@
-[tox]
-envlist = py27
-
-[testenv]
-deps=pytest
-     pytest-cov
-     mock
-commands=py.test --cov={envsitepackagesdir}/{{project.package}} --cov-report html --cov-report xml {posargs}
\ No newline at end of file
diff --git a/python-toolbox/marvin_python_toolbox/management/test.py b/python-toolbox/marvin_python_toolbox/management/test.py
deleted file mode 100644
index 0f837a5..0000000
--- a/python-toolbox/marvin_python_toolbox/management/test.py
+++ /dev/null
@@ -1,141 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-
-import sys
-import os
-import os.path
-import subprocess
-import shutil
-import tempfile
-import click
-
-from .pkg import copy
-
-
-@click.group('test')
-def cli():
-    pass
-
-
-@cli.command('test', help='Run tests.')
-@click.option('--cov/--no-cov', default=True)
-@click.option('--no-capture', is_flag=True)
-@click.option('--pdb', is_flag=True)
-@click.argument('args', default='')
-@click.pass_context
-def test(ctx, cov, no_capture, pdb, args):
-    os.environ['TESTING'] = 'true'
-
-    if args:
-        args = args.split(' ')
-    else:
-        args = ['tests']
-
-    if no_capture:
-        args += ['--capture=no']
-
-    if pdb:
-        args += ['--pdb']
-
-    cov_args = []
-    if cov:
-        cov_args += ['--cov', os.path.relpath(ctx.obj['package_path'],
-                                              start=ctx.obj['base_path']),
-                     '--cov-report', 'html',
-                     '--cov-report', 'xml',
-                     '--cov-report', 'term-missing',
-                     ]
-
-    command = ['py.test'] + cov_args + args
-    print(' '.join(command))
-    env = os.environ.copy()
-    exitcode = subprocess.call(command, cwd=ctx.obj['base_path'], env=env)
-    sys.exit(exitcode)
-
-
-@cli.command('test-tox', help='Run tests using a new virtualenv.')
-@click.argument('args', default='')
-@click.pass_context
-def tox(ctx, args):
-    os.environ['TESTING'] = 'true'
-
-    if args:
-        args = ['-a'] + args.split(' ')
-    else:
-        args = []
-    # Copy the project to a tmp dir
-    tmp_dir = tempfile.mkdtemp()
-    tox_dir = os.path.join(tmp_dir, ctx.obj['package_name'])
-    copy(ctx.obj['base_path'], tox_dir)
-    command = ['python', 'setup.py', 'test'] + args
-    env = os.environ.copy()
-    exitcode = subprocess.call(command, cwd=tox_dir, env=env)
-    shutil.rmtree(tmp_dir)
-    sys.exit(exitcode)
-
-
-@cli.command('test-tdd', help='Watch for changes to run tests automatically.')
-@click.option('--cov/--no-cov', default=False)
-@click.option('--no-capture', is_flag=True)
-@click.option('--pdb', is_flag=True)
-@click.option('--partial', is_flag=True)
-@click.argument('args', default='')
-@click.pass_context
-def tdd(ctx, cov, no_capture, pdb, partial, args):
-    os.environ['TESTING'] = 'true'
-
-    if args:
-        args = args.split(' ')
-    else:
-        args = [os.path.relpath(
-            os.path.join(ctx.obj['base_path'], 'tests'))]
-
-    if no_capture:
-        args += ['--capture=no']
-
-    if pdb:
-        args += ['--pdb']
-
-    if partial:
-        args += ['--testmon']
-
-    cov_args = []
-    if cov:
-        cov_args += ['--cov', os.path.relpath(ctx.obj['package_path'],
-                                              start=ctx.obj['base_path']),
-                     '--cov-report', 'html',
-                     '--cov-report', 'xml',
-                     '--cov-report', 'term-missing',
-                     ]
-
-    command = ['ptw', '-p', '--'] + cov_args + args
-    print(' '.join(command))
-    env = os.environ.copy()
-    exitcode = subprocess.call(command, cwd=ctx.obj['base_path'], env=env)
-    sys.exit(exitcode)
-
-
-@cli.command('test-checkpep8', help='Check python code style.')
-@click.pass_context
-def pep8(ctx):
-    command = ['pep8', ctx.obj['package_name']]
-    exitcode = subprocess.call(command, cwd=ctx.obj['base_path'])
-    if exitcode == 0:
-        print('Congratulations! Everything looks good.')
-    sys.exit(exitcode)
diff --git a/python-toolbox/notebooks/count-db-tables-rows.ipynb b/python-toolbox/notebooks/count-db-tables-rows.ipynb
deleted file mode 100644
index a649288..0000000
--- a/python-toolbox/notebooks/count-db-tables-rows.ipynb
+++ /dev/null
@@ -1,150 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Count DB Tables Rows"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Getting spark session"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 1,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from marvin_python_toolbox.common.data_source_provider import get_spark_session"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 2,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "spark = get_spark_session(enable_hive=True)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Getting all hive local dbs"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "dbs = spark.sql(\"show databases\").collect()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 4,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "Connecting with core db ...\n",
-      "   bsc_product [14953204]\n",
-      "   mis_product_hierarchy [5796251]\n",
-      "Connecting with default db ...\n",
-      "Connecting with marvin db ...\n",
-      "   simple_product_classification_engine_core_bsc_product_120374ac16e58cdf8f0c050d0f698addadf2c41c [14953204]\n",
-      "   simple_product_classification_engine_core_mis_product_hierarchy_0b8069f3ba31eedca44b30bc8a61130f5776d119 [5796251]\n"
-     ]
-    }
-   ],
-   "source": [
-    "for db in dbs:\n",
-    "    db_name = db['databaseName']\n",
-    "    print(\"Connecting with {} db ...\".format(db_name)) \n",
-    "    spark.sql(\"use {}\".format(db_name))\n",
-    "    tables = spark.sql(\"show tables\").collect()\n",
-    "    \n",
-    "    for table in tables:\n",
-    "        table_name = table['tableName']\n",
-    "        count = spark.sql(\"select 1 from {}\".format(table_name)).count()\n",
-    "        print \"   {} [{}]\".format(table_name, count)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Stoping and realease spark session"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "spark.stop()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": 6,
-   "metadata": {},
-   "outputs": [
-    {
-     "data": {
-      "text/plain": [
-       "14953204"
-      ]
-     },
-     "execution_count": 6,
-     "metadata": {},
-     "output_type": "execute_result"
-    }
-   ],
-   "source": [
-    "spark.sql(\"select * from core.bsc_product\").count()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "python2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.6"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/python-toolbox/notebooks/spark_data_source_test.ipynb b/python-toolbox/notebooks/spark_data_source_test.ipynb
deleted file mode 100644
index 2c120b4..0000000
--- a/python-toolbox/notebooks/spark_data_source_test.ipynb
+++ /dev/null
@@ -1,49 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": 3,
-   "metadata": {},
-   "outputs": [
-    {
-     "name": "stdout",
-     "output_type": "stream",
-     "text": [
-      "+------------+\n",
-      "|databaseName|\n",
-      "+------------+\n",
-      "|     default|\n",
-      "+------------+\n",
-      "\n"
-     ]
-    }
-   ],
-   "source": [
-    "from marvin_python_toolbox.common.data_source_provider import get_spark_session\n",
-    "spark = get_spark_session()\n",
-    "spark.sql(\"show databases\").show()"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "python2"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.6"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/python-toolbox/pytest.ini b/python-toolbox/pytest.ini
deleted file mode 100644
index cab8644..0000000
--- a/python-toolbox/pytest.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[pytest]
-minversion    = 2.0
-norecursedirs = .git .tox .eggs .cache *.egg build dist tmp*
-python_files  = test*.py
\ No newline at end of file
diff --git a/python-toolbox/setup.cfg b/python-toolbox/setup.cfg
deleted file mode 100644
index e69de29..0000000
--- a/python-toolbox/setup.cfg
+++ /dev/null
diff --git a/python-toolbox/tests/management/test_engine.py b/python-toolbox/tests/management/test_engine.py
deleted file mode 100644
index 9daa4f4..0000000
--- a/python-toolbox/tests/management/test_engine.py
+++ /dev/null
@@ -1,217 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-try:
-    import mock
-except ImportError:
-    import unittest.mock as mock
-
-from mock import call
-from mock import ANY
-from marvin_python_toolbox.management.engine import MarvinDryRun
-from marvin_python_toolbox.management.engine import dryrun
-from marvin_python_toolbox.management.engine import engine_httpserver
-from marvin_python_toolbox.management.engine import _create_virtual_env
-from marvin_python_toolbox.management.engine import _make_data_link
-import os
-
-
-class mocked_ctx(object):
-    obj = {'package_name': 'test_package', 'config': {'inidir': 'test_dir'}}
-
-
-def mocked_sleep(value):
-    if value == 100:
-        raise KeyboardInterrupt()
-
-
-class mocked_acquisitor():
-    def __init__(self, persistence_mode, is_remote_calling, default_root_path):
-        self.persistence_mode = persistence_mode
-        self.is_remote_calling = is_remote_calling
-        self.default_root_path = default_root_path
-
-    def execute(self, **kwargs):
-        print ('test')
-
-
-@mock.patch('marvin_python_toolbox.management.engine.time.time')
-@mock.patch('marvin_python_toolbox.management.engine.MarvinDryRun')
-@mock.patch('marvin_python_toolbox.management.engine.sys.exit')
-@mock.patch('marvin_python_toolbox.management.engine.os.system')
-def test_dryrun(system_mocked, exit_mocked, MarvinDryRun_mocked, time_mocked):
-    params = '/tmp/params'
-    messages_file = '/tmp/messages'
-    feedback_file = '/tmp/feedback'
-    action = 'all'
-    spark_conf = '/opt/spark/conf'
-    time_mocked.return_value = 555
-
-    dryrun(ctx=mocked_ctx, action=action, params_file=params, messages_file=messages_file, feedback_file=feedback_file, initial_dataset=None,
-           dataset=None, model=None, metrics=None, response=False, spark_conf=spark_conf, profiling=None)
-
-    time_mocked.assert_called()
-    exit_mocked.assert_called_with("Stoping process!")
-    MarvinDryRun_mocked.assert_called_with(ctx=mocked_ctx, messages=[{}, {}], print_response=False)
-
-    MarvinDryRun_mocked.return_value.execute.assert_called_with(clazz='Feedback', dataset=None, initial_dataset=None, metrics=None, model=None,
-                                                                params={}, profiling_enabled=None)
-
-    action = 'acquisitor'
-
-    dryrun(ctx=mocked_ctx, action=action, params_file=params, messages_file=messages_file, feedback_file=feedback_file, initial_dataset=None,
-           dataset=None, model=None, metrics=None, response=False, spark_conf=spark_conf, profiling=None)
-
-    time_mocked.assert_called()
-    MarvinDryRun_mocked.assert_called_with(ctx=mocked_ctx, messages=[{}, {}], print_response=False)
-
-
-@mock.patch('marvin_python_toolbox.management.engine.json.dumps')
-@mock.patch('marvin_python_toolbox.management.engine.dynamic_import')
-def test_marvindryrun(import_mocked, dumps_mocked):
-    messages = ['/tmp/messages', '/tmp/feedback']
-    response = 'response'
-    clazz = 'PredictionPreparator'
-    import_mocked.return_value = mocked_acquisitor
-
-    test_dryrun = MarvinDryRun(ctx=mocked_ctx, messages=messages, print_response=response)
-    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=True)
-
-    import_mocked.assert_called_with("{}.{}".format('test_package', 'PredictionPreparator'))
-    dumps_mocked.assert_called_with(None, indent=4, sort_keys=True)
-
-    clazz = 'Feedback'
-    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
-
-    import_mocked.assert_called_with("{}.{}".format('test_package', 'Feedback'))
-
-    clazz = 'Predictor'
-    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
-
-    import_mocked.assert_called_with("{}.{}".format('test_package', 'PredictionPreparator'))
-
-    clazz = 'test'
-    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=True)
-    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
-
-    import_mocked.assert_called_with("{}.{}".format('test_package', 'test'))
-
-    response = False
-    clazz = 'PredictionPreparator'
-
-    MarvinDryRun(ctx=mocked_ctx, messages=messages, print_response=response)
-    test_dryrun = MarvinDryRun(ctx=mocked_ctx, messages=messages, print_response=response)
-    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
-
-    dumps_mocked.assert_called_with(None, indent=4, sort_keys=True)
-
-
-@mock.patch('marvin_python_toolbox.management.engine.sys.exit')
-@mock.patch('marvin_python_toolbox.management.engine.time.sleep')
-@mock.patch('marvin_python_toolbox.management.engine.MarvinData')
-@mock.patch('marvin_python_toolbox.management.engine.Config')
-@mock.patch('marvin_python_toolbox.management.engine.subprocess.Popen')
-def test_engine_httpserver(Popen_mocked, Config_mocked, MarvinData_mocked, sleep_mocked, exit_mocked):
-
-    sleep_mocked.side_effect = mocked_sleep
-
-    engine_httpserver(ctx=mocked_ctx, action='all', params_file='test_params', initial_dataset='test_id', dataset='test_d', model='test_m', metrics='test_me',
-                      protocol='test_protocol', spark_conf='test_conf', http_host='test_host', http_port=9999, executor_path='test_executor',
-                      max_workers=9, max_rpc_workers=99, extra_executor_parameters="-DXX=123")
-
-    expected_calls = []
-
-    expected_calls.append(call([
-        'marvin', 'engine-grpcserver',
-        '-a', 'all',
-        '-w', '9',
-        '-rw', '99',
-        ANY, ANY,
-        ANY, ANY,
-        ANY, ANY,
-        ANY, ANY,
-        ANY, ANY,
-        ANY, ANY]
-    ))
-
-    expected_calls.append(call([
-        'java',
-        '-DmarvinConfig.engineHome=test_dir',
-        '-DmarvinConfig.ipAddress=test_host',
-        '-DmarvinConfig.port=9999',
-        '-DmarvinConfig.protocol=test_protocol',
-        '-DXX=123',
-        '-jar',
-        MarvinData_mocked.download_file('test_executor')]
-    ))
-
-    Popen_mocked.assert_has_calls(expected_calls)
-    exit_mocked.assert_called_with(0)
-
-
-@mock.patch('marvin_python_toolbox.management.engine.subprocess.Popen')
-def test_create_virtual_env(Popen_mocked):
-    name = "my_project"
-    dest = "/tmp/xxx"
-    python = "python"
-
-    mockx = mock.MagicMock()
-    mockx.wait.return_value = 0
-    Popen_mocked.return_value = mockx
-
-    env_name = _create_virtual_env(name, dest, python)
-
-    commands = [
-        'bash',
-        '-c',
-        '. virtualenvwrapper.sh; mkvirtualenv -p {0} -a {1} {2};'.format(python, dest, env_name)
-    ]
-
-    Popen_mocked.assert_called_with(commands, env=os.environ)
-    assert env_name == 'my-project-env'
-
-
-@mock.patch('marvin_python_toolbox.management.engine.sys')
-@mock.patch('marvin_python_toolbox.management.engine.subprocess.Popen')
-def test_create_virtual_env_error(Popen_mocked, sys_mocked):
-    name = "my_project"
-    dest = "/tmp/xxx"
-    python = "python"
-
-    mockx = mock.MagicMock()
-    mockx.wait.return_value = 3
-    Popen_mocked.return_value = mockx
-
-    env_name = _create_virtual_env(name, dest, python)
-
-    commands = [
-        'bash',
-        '-c',
-        '. virtualenvwrapper.sh; mkvirtualenv -p {0} -a {1} {2};'.format(python, dest, env_name)
-    ]
-
-    Popen_mocked.assert_called_with(commands, env=os.environ)
-    mockx.wait.assert_called_once()
-    # sys_mocked.exit.assert_called_once_with(1)
-
-
-@mock.patch('marvin_python_toolbox.management.engine.os.symlink')
-def test_make_data_link_call_symlink(mock_symlink):
-    os.environ['MARVIN_DATA_PATH'] = '/tmp/'
-    dest = '/tmp/'
-    _make_data_link(dest)
-    mock_symlink.assert_called_once_with('/tmp/', '/tmp/notebooks/data')
diff --git a/python-toolbox/tests/management/test_hive.py b/python-toolbox/tests/management/test_hive.py
deleted file mode 100644
index 9d7ee7b..0000000
--- a/python-toolbox/tests/management/test_hive.py
+++ /dev/null
@@ -1,809 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-try:
-    import mock
-except ImportError:
-    import unittest.mock as mock
-
-from marvin_python_toolbox.management import hive
-
-
-@mock.patch('marvin_python_toolbox.management.hive.json')
-def test_hive_generateconf_write_file_with_json(mocked_json):
-    default_conf = [{
-        "origin_host": "xxx_host_name",
-        "origin_db": "xxx_db_name",
-        "origin_queue": "marvin",
-        "target_table_name": "xxx_table_name",
-        "sample_sql": "SELECT * FROM XXX",
-        "sql_id": "1"
-    }]
-
-    mocked_open = mock.mock_open()
-    with mock.patch('marvin_python_toolbox.management.hive.open', mocked_open, create=True):
-        hive.hive_generateconf(None)
-
-    mocked_open.assert_called_once_with('hive_dataimport.conf', 'w')
-    mocked_json.dump.assert_called_once_with(default_conf, mocked_open(), indent=2)
-
-
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.reset_remote_tables')
-def test_hive_resetremote_call_HiveDataImporter_reset_remote_tables(reset_mocked): 
-    hive.hive_resetremote(ctx=None, host="test", engine="test", queue="test")
-    reset_mocked.assert_called_once_with()
-
-
-@mock.patch('marvin_python_toolbox.management.hive.read_config')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.__init__')
-def test_hive_dataimport_without_config(init_mocked, read_config_mocked):
-    read_config_mocked.return_value = None
-
-    ctx = conf = sql_id = engine = \
-        skip_remote_preparation = force_copy_files = validate = force =\
-        force_remote = max_query_size = destination_host = destination_port =\
-        destination_host_username = destination_host_password = destination_hdfs_root_path = None
-
-    hive.hive_dataimport(
-        ctx, conf, sql_id, engine, 
-        skip_remote_preparation, force_copy_files, validate, force,
-        force_remote, max_query_size, destination_host, destination_port,
-        destination_host_username, destination_host_password, destination_hdfs_root_path
-    )
-
-    init_mocked.assert_not_called()
-
-
-@mock.patch('marvin_python_toolbox.management.hive.read_config')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.__init__')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.table_exists')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.import_sample')
-def test_hive_dataimport_with_config(import_sample_mocked, table_exists_mocked, init_mocked, read_config_mocked):
-    read_config_mocked.return_value = [{'origin_db': 'test', 'target_table_name': 'test'}]
-    init_mocked.return_value = None
-
-    ctx = sql_id = engine = \
-        skip_remote_preparation = force_copy_files = validate =\
-        force_remote = max_query_size = destination_port =\
-        destination_host_username = destination_host_password = destination_hdfs_root_path = None
-
-    force = True
-    conf = '/path/to/conf'
-    destination_host = 'test'
-
-    hive.hive_dataimport(
-        ctx, conf, sql_id, engine, 
-        skip_remote_preparation, force_copy_files, validate, force,
-        force_remote, max_query_size, destination_host, destination_port,
-        destination_host_username, destination_host_password, destination_hdfs_root_path
-    )
-
-    init_mocked.assert_called_once_with(
-        max_query_size=max_query_size,
-        destination_host=destination_host,
-        destination_port=destination_port,
-        destination_host_username=destination_host_username,
-        destination_host_password=destination_host_password,
-        destination_hdfs_root_path=destination_hdfs_root_path,
-        origin_db='test',
-        target_table_name='test',
-        engine=engine,
-    )
-    import_sample_mocked.assert_called_once_with(
-        create_temp_table=True,
-        copy_files=None,
-        validate_query=None,
-        force_create_remote_table=None
-    )
-
-
-@mock.patch('marvin_python_toolbox.management.hive.read_config')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.__init__')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.table_exists')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.import_sample')
-def test_hive_dataimport_with_config_sql_id(import_sample_mocked, table_exists_mocked, init_mocked, read_config_mocked):
-    read_config_mocked.return_value = [
-        {'origin_db': 'test', 'target_table_name': 'test', 'sql_id': 'test'},
-        {'origin_db': 'bla', 'target_table_name': 'bla', 'sql_id': 'bla'},
-    ]
-    init_mocked.return_value = None
-
-    ctx = sql_id = engine = \
-        skip_remote_preparation = force_copy_files = validate =\
-        force_remote = max_query_size = destination_port =\
-        destination_host_username = destination_host_password = destination_hdfs_root_path = None
-
-    sql_id= 'test'
-    force = True
-    conf = '/path/to/conf'
-    destination_host = 'test'
-
-    hive.hive_dataimport(
-        ctx, conf, sql_id, engine, 
-        skip_remote_preparation, force_copy_files, validate, force,
-        force_remote, max_query_size, destination_host, destination_port,
-        destination_host_username, destination_host_password, destination_hdfs_root_path
-    )
-
-    init_mocked.assert_called_once_with(
-        max_query_size=max_query_size,
-        destination_host=destination_host,
-        destination_port=destination_port,
-        destination_host_username=destination_host_username,
-        destination_host_password=destination_host_password,
-        destination_hdfs_root_path=destination_hdfs_root_path,
-        origin_db='test',
-        target_table_name='test',
-        sql_id='test',
-        engine=engine,
-    )
-    import_sample_mocked.assert_called_once_with(
-        create_temp_table=True,
-        copy_files=None,
-        validate_query=None,
-        force_create_remote_table=None
-    )
-
-
-@mock.patch('marvin_python_toolbox.management.hive.read_config')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.table_exists')
-@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.import_sample')
-def test_hive_dataimport_with_config_force_false(import_sample_mocked, table_exists_mocked, read_config_mocked):
-    table_exists_mocked.return_value = False
-    read_config_mocked.return_value = [{
-        'origin_db': 'test',
-        'target_table_name': 'test',
-        'origin_queue':'test',
-        'origin_host':'test',
-        'sample_sql':'test',
-        'sql_id':'test'
-    }]
-
-    ctx = sql_id = engine = \
-        skip_remote_preparation = force_copy_files = validate =\
-        force_remote = max_query_size = destination_port =\
-        destination_host_username = destination_host_password = destination_hdfs_root_path = None
-
-    force = False
-    conf = '/path/to/conf'
-    destination_host = 'test'
-
-    hdi = hive.HiveDataImporter(
-        max_query_size=max_query_size,
-        destination_host=destination_host,
-        destination_port=destination_port,
-        destination_host_username=destination_host_username,
-        destination_host_password=destination_host_password,
-        destination_hdfs_root_path=destination_hdfs_root_path,
-        origin_db='test',
-        target_table_name='test',
-        engine=engine,
-        sql_id='test',
-        origin_host='test',
-        origin_queue='test',
-        sample_sql='test',
-    )
-
-    with mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter', return_value=hdi):
-        hive.hive_dataimport(
-            ctx, conf, sql_id, engine, 
-            skip_remote_preparation, force_copy_files, validate, force,
-            force_remote, max_query_size, destination_host, destination_port,
-            destination_host_username, destination_host_password, destination_hdfs_root_path
-        )
-
-        table_exists_mocked.assert_called_once_with(
-            host=hdi.destination_host, db=hdi.origin_db, table=hdi.target_table_name
-        )
-
-        import_sample_mocked.assert_called_once_with(
-            create_temp_table=True,
-            copy_files=None,
-            validate_query=None,
-            force_create_remote_table=None
-        )
-
-
-@mock.patch('marvin_python_toolbox.management.hive.json')
-@mock.patch('marvin_python_toolbox.management.hive.os.path')
-def test_read_config_with_existing_path(path_mocked, json_mocked):
-    path_mocked.exists.return_value = True
-    path_mocked.join.return_value = 'test.conf'
-
-    mocked_open = mock.mock_open()
-    with mock.patch('marvin_python_toolbox.management.hive.open', mocked_open, create=True):
-        hive.read_config("test.conf")
-
-    mocked_open.assert_called_once_with('test.conf', 'r')
-    json_mocked.load.assert_called_once_with(mocked_open())
-
-
-@mock.patch('marvin_python_toolbox.management.hive.json')
-@mock.patch('marvin_python_toolbox.management.hive.os.path')
-def test_read_config_with_not_existing_path(path_mocked, json_mocked):
-    path_mocked.exists.return_value = False
-    path_mocked.join.return_value = 'test.conf'
-
-    mocked_open = mock.mock_open()
-    with mock.patch('marvin_python_toolbox.management.hive.open', mocked_open, create=True):
-        hive.read_config("test.conf")
-
-    mocked_open.assert_not_called()
-    json_mocked.load.assert_not_called()
-
-
-class TestHiveDataImporter:
-
-    def setup(self):
-        self.hdi = hive.HiveDataImporter(
-            max_query_size=13,
-            destination_host='test',
-            destination_port=None,
-            destination_host_username=None,
-            destination_host_password=None,
-            destination_hdfs_root_path='/tmp',
-            origin_db='test',
-            target_table_name='test',
-            engine='test',
-            sql_id='test',
-            origin_host='test',
-            origin_queue='test',
-            sample_sql='test',
-        )
-
-        self.mock_methods = {
-            'get_createtable_ddl': mock.DEFAULT,
-            'get_partitions': mock.DEFAULT,
-            'has_partitions': mock.DEFAULT,
-            'create_database': mock.DEFAULT,
-            'table_exists': mock.DEFAULT,
-            'drop_table': mock.DEFAULT,
-            'create_table': mock.DEFAULT,
-            'populate_table': mock.DEFAULT,
-            'get_table_location': mock.DEFAULT,
-            'generate_table_location': mock.DEFAULT,
-            'hdfs_dist_copy': mock.DEFAULT,
-            'create_external_table': mock.DEFAULT,
-            'refresh_partitions': mock.DEFAULT,
-            'drop_view': mock.DEFAULT,
-            'create_view': mock.DEFAULT,
-            'validade_query': mock.DEFAULT,
-            'get_connection': mock.DEFAULT,
-            'print_finish_step': mock.DEFAULT,
-            'print_start_step': mock.DEFAULT
-        }
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.count_rows')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.retrieve_data_sample')
-    def test_validade_query(self, retrieve_mocked, connection_mocked, count_rows_mocked):
-        count_rows_mocked.return_value = 1
-        connection_mocked.return_value = 'connection_mocked'
-        retrieve_mocked.return_value = {'estimate_query_mean_per_line': 42}
-
-        self.hdi.validade_query()
-
-        connection_mocked.assert_called_once_with(
-            host=self.hdi.origin_host, 
-            db=self.hdi.origin_db, 
-            queue=self.hdi.origin_queue
-        )
-        count_rows_mocked.assert_called_once_with(conn='connection_mocked', sql=self.hdi.sample_sql)
-        retrieve_mocked.assert_called_once_with(conn='connection_mocked', full_table_name=self.hdi.full_table_name)
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
-    def test_table_exists_table_not_exists(self, connection_mocked, show_log_mocked):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-        cursor.fetchall.return_value = []
-        connection_mocked.return_value = conn
-
-        table_exists = self.hdi.table_exists(host='host', db='db', table='table')
-
-        show_log_mocked.assert_called_once_with(cursor)
-        assert table_exists is False
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
-    def test_table_exists_table_exists(self, connection_mocked, show_log_mocked):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-        cursor.fetchall.return_value = ['test']
-        connection_mocked.return_value = conn
-
-        table_exists = self.hdi.table_exists(host='host', db='db', table='table')
-
-        show_log_mocked.assert_has_calls([mock.call(cursor)] * 2)
-        assert table_exists is True
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.drop_table')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.generate_table_location')
-    def test_reset_remote_tables_without_valids_tables(self, tb_loc_mock, ssh_cli_mock, conn_mock, 
-        delete_mock, drop_mock, log_mock):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-        cursor.fetchall.return_value = []
-        conn_mock.return_value = conn
-
-        self.hdi.reset_remote_tables()
-
-        conn_mock.assert_called_once_with(
-            host=self.hdi.origin_host, 
-            db=self.hdi.temp_db_name, 
-            queue=self.hdi.origin_queue
-        )
-        log_mock.assert_called_once_with(cursor)
-
-        drop_mock.assert_not_called()
-        tb_loc_mock.assert_not_called()
-        delete_mock.assert_not_called()
-        ssh_cli_mock.assert_not_called()
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.drop_table')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.generate_table_location')
-    def test_reset_remote_tables_with_valids_tables(self, tb_loc_mock, ssh_cli_mock, 
-        conn_mock, delete_mock, drop_mock, log_mock):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-        cursor.fetchall.return_value = [['test']]
-        conn_mock.return_value = conn
-
-        tb_loc_mock.return_value = 'test'
-        ssh_cli_mock.return_value = 'test'
-
-        self.hdi.reset_remote_tables()
-
-        conn_mock.assert_called_once_with(
-            host=self.hdi.origin_host, 
-            db=self.hdi.temp_db_name, 
-            queue=self.hdi.origin_queue
-        )
-        log_mock.assert_called_once_with(cursor)
-
-        drop_mock.assert_called_once_with(conn=conn, table_name="marvin.test")
-        tb_loc_mock.assert_called_once_with(
-            self.hdi.destination_hdfs_root_path,
-            self.hdi.origin_host,
-            self.hdi.temp_db_name + '.db',
-            "test"
-        )
-        delete_mock.assert_called_once_with('test', 'test')
-        ssh_cli_mock.assert_called_once_with(
-            self.hdi.origin_host,
-            self.hdi.destination_host_username,
-            self.hdi.destination_host_password
-        )
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.validade_query')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.print_finish_step')
-    def test_import_sample_with_invalid_query_and_flag_true_stop(self, finish_step_mock, conn_mock, val_query_mock):
-        val_query_mock.return_value = False
-
-        self.hdi.import_sample(validate_query=True)
-
-        val_query_mock.assert_called_once_with()
-        finish_step_mock.assert_called_once_with()
-        conn_mock.assert_not_called()
-
-    @mock.patch('marvin_python_toolbox.management.hive.print')
-    def test_import_sample_with_invalid_query_and_flag_false_dont_stop(self, print_mocked):
-        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
-            **self.mock_methods
-        ) as mocks:
-
-            self.hdi.import_sample(validate_query=False)
-
-            assert mocks['print_finish_step'].call_count == 6
-            assert mocks['get_connection'].call_count == 5
-            
-            mocks['validade_query'].assert_not_called()
-
-    @mock.patch('marvin_python_toolbox.management.hive.print')
-    def test_import_sample_with_partitions_stop(self, print_mocked):
-        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
-            **self.mock_methods
-        ) as mocks:
-
-            conn = mock.MagicMock()
-            mocks['has_partitions'].return_value = True
-            mocks['get_connection'].return_value = conn
-
-            self.hdi.import_sample(validate_query=True)
-
-            assert mocks['get_connection'].call_count == 2
-            mocks['get_createtable_ddl'].assert_called_once_with(
-                conn=conn,
-                origin_table_name=self.hdi.target_table_name,
-                dest_table_name=self.hdi.temp_table_name
-            )
-            mocks['get_partitions'].assert_called_once_with(
-                mocks['get_createtable_ddl'].return_value
-            )
-            mocks['create_database'].assert_not_called()
-
-    @mock.patch('marvin_python_toolbox.management.hive.print')
-    def test_import_sample_with_create_temp_table_false_dont_call_create_table(self, print_mocked):
-        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
-            **self.mock_methods
-        ) as mocks:
-
-            self.hdi.import_sample(create_temp_table=False)
-
-            mocks['table_exists'].assert_not_called()
-            mocks['drop_table'].assert_not_called()
-            mocks['create_table'].assert_not_called()
-            mocks['populate_table'].assert_not_called()
-
-    @mock.patch('marvin_python_toolbox.management.hive.print')
-    def test_import_sample_with_create_temp_table_true_call_create_table(self, print_mocked):
-        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
-            **self.mock_methods
-        ) as mocks:
-
-            mocks['has_partitions'].return_value = False
-            self.hdi.import_sample(create_temp_table=True, force_create_remote_table=True)
-
-            assert mocks['drop_table'].call_count == 2
-            assert mocks['create_table'].call_count == 1
-            assert mocks['populate_table'].call_count == 1
-
-    @mock.patch('marvin_python_toolbox.management.hive.print')
-    def test_import_sample(self, print_mocked):
-        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
-            **self.mock_methods
-        ) as mocks:
-
-            mocks['validade_query'].return_value = True
-            mocks['has_partitions'].return_value = False
-            self.hdi.import_sample()
-
-            assert mocks['print_finish_step'].call_count == 6
-            assert mocks['get_connection'].call_count == 5
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.clean_ddl')
-    def test_get_createtable_ddl(self, clean_ddl_mocked):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-        cursor.fetchall.return_value = [['l1'], ['l2']]
-        dll = mock.MagicMock()
-        clean_ddl_mocked.return_value = dll
-
-        self.hdi.get_createtable_ddl(conn, 'marvin', 'test')
-
-        cursor.execute.assert_called_once_with("SHOW CREATE TABLE marvin")
-        clean_ddl_mocked.assert_called_once_with('l1l2', remove_formats=False, remove_general=True)
-        dll.replace.assert_called_once_with('marvin', 'test')
-        cursor.close.assert_called_once_with()
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    def test_execute_db_command(self, show_log_mocked):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-        command = "bla bla bla"
-
-        self.hdi._execute_db_command(conn, command)
-
-        cursor.execute.assert_called_once_with(command)
-        show_log_mocked.assert_called_once_with(cursor)
-        cursor.close.assert_called_once_with()
-
-    @mock.patch('marvin_python_toolbox.management.hive.hive')
-    def test_get_connection(self, pyhive_mocked):
-        host = 'test'
-        self.hdi.get_connection(host, db='DEFAULT', queue='default')
-
-        pyhive_mocked.connect.assert_called_once_with(
-            host=host, database='DEFAULT',
-            configuration={'mapred.job.queue.name': 'default',
-                ' hive.exec.dynamic.partition.mode': 'nonstrict'}
-        )
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    def test_retrieve_data_sample(self, show_log_mocked):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-        cursor.description = [('table.col', 'type')]
-        cursor.fetchall.return_value = ['test']
-
-        full_table_name = 'test'
-        sample_limit = 10
-
-        data = self.hdi.retrieve_data_sample(conn, full_table_name, sample_limit)
-        
-        sql = "SELECT * FROM {} TABLESAMPLE ({} ROWS)".format(full_table_name, sample_limit)
-
-        cursor.execute.assert_called_once_with(sql)
-        assert data['data_header'][0]['col'] == 'col'
-        assert data['data_header'][0]['table'] == 'table'
-        assert data['data_header'][0]['type'] == 'type'
-        assert data['total_lines'] == 1
-        assert data['data'] == ['test']
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    def test_count_rows(self, show_log_mocked):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        cursor.fetchone.return_value = [42]
-        conn.cursor.return_value = cursor
-
-        sql = "SELECT COL1, COL2 FROM TABLE"
-        count = self.hdi.count_rows(conn, sql)
-
-        assert count == 42
-        cursor.execute.assert_called_once_with("SELECT COUNT(1) FROM TABLE")
-        show_log_mocked.assert_called_once_with(cursor)
-        cursor.close.assert_called_once_with()
-
-    @mock.patch('marvin_python_toolbox.management.hive.logger')
-    def test_show_log(self, logger_mocked):
-        cursor = mock.MagicMock()
-        cursor.fetch_logs.return_value = ['log log log']
-
-        self.hdi.show_log(cursor)
-
-        logger_mocked.debug.assert_called_once_with('log log log')
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
-    def test_save_data(self, show_log_mocked):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        conn.cursor.return_value = cursor
-
-        table = 'test'
-        data = {
-            'total_lines': 2,
-            'data_header': [
-                {'col': 'test_col_1'},
-                {'col': 'test_col_2'},
-            ],
-            'data': [
-                'header',
-                'test_val_1',
-                'test_val_2',
-            ]
-        }
-        self.hdi.save_data(conn, table, data)
-
-        dml = "INSERT INTO test (test_col_1, test_col_2) VALUES (%s, %s)"
-        cursor.executemany.assert_called_once_with(dml, [('test_val_1',), ('test_val_2',)])
-        show_log_mocked.assert_called_once_with(cursor)
-        cursor.close.assert_called_once_with()
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
-    def test_populate_table_with_partitions(self, exec_comm_mock):
-        conn = None
-        table_name = 'test'
-        sql = 'bla bla bla'
-        partitions = [{'col': 'test1'}, {'col': 'test2'}]
-
-        self.hdi.populate_table(conn, table_name, partitions, sql)
-
-        dml = "INSERT OVERWRITE TABLE test PARTITION (test1, test2) bla bla bla"
-        exec_comm_mock.assert_called_once_with(conn, dml)
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
-    def test_populate_table_without_partitions(self, exec_comm_mock):
-        conn = None
-        table_name = 'test'
-        sql = 'bla bla bla'
-        partitions = []
-
-        self.hdi.populate_table(conn, table_name, partitions, sql)
-
-        dml = "INSERT OVERWRITE TABLE test  bla bla bla"
-        exec_comm_mock.assert_called_once_with(conn, dml)
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
-    def test_create_view(self, exec_comm_mock):
-        conn = None
-        view_name = 'view_test'
-        table_name = 'table_test'
-
-        self.hdi.create_view(conn, view_name, table_name)
-
-        dml = "CREATE VIEW {0} AS SELECT * FROM {1}".format(view_name, table_name)
-        exec_comm_mock.assert_called_once_with(conn, dml)
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
-    def test_refresh_partitions(self, exec_comm_mock):
-        conn = None
-        table_name = 'table_test'
-
-        self.hdi.refresh_partitions(conn, table_name)
-
-        sttmt = "MSCK REPAIR TABLE {0}".format(table_name)
-        exec_comm_mock.assert_called_once_with(conn, sttmt)
-
-    def test_get_table_location(self):
-        cursor = mock.MagicMock()
-        conn = mock.MagicMock()
-        cursor.fetchall.return_value = [[' location: ', ' hdfs://test ']]
-        conn.cursor.return_value = cursor
-        table_name = 'test'
-
-        loc = self.hdi.get_table_location(conn, table_name)
-
-        cursor.execute.assert_called_once_with("DESCRIBE FORMATTED test")
-        assert loc == 'hftp://test'
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
-    def test_delete_files(self, cmd_mocked):
-        ssh = 'ssh'
-        url = 'test.com'
-        self.hdi.delete_files(ssh, url)
-
-        cmd = "hdfs dfs -rm -R 'test.com'"
-        cmd_mocked.assert_called_once_with(ssh, cmd)
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
-    def test_copy_files(self, cmd_mocked):
-        ssh = 'ssh'
-        origin = "/home/"
-        dest = "/tmp/"
-        self.hdi.copy_files(ssh, origin, dest)
-
-        cmd = "hadoop distcp --update '/home/' '/tmp/'"
-        cmd_mocked.assert_called_once_with(ssh, cmd)
-
-    @mock.patch('marvin_python_toolbox.management.hive.logger')
-    def test_hdfs_commands(self, logger_mocked):
-        i = mock.MagicMock()
-        o = mock.MagicMock()
-        e = mock.MagicMock()
-        ssh = mock.MagicMock()
-        o.readlines.return_value = 'output'
-        e.readlines.return_value = 'error'
-        ssh.exec_command.return_value = (i, o, e)
-        cmd = "command"
-
-        out, err = self.hdi._hdfs_commands(ssh, cmd)
-
-        assert (out, err) == ('output', 'error')
-        logger_mocked.debug.assert_any_call("Executing remote command: command")
-        logger_mocked.debug.assert_any_call("output")
-        logger_mocked.debug.assert_any_call("error")
-
-    @mock.patch('marvin_python_toolbox.management.hive.AutoAddPolicy', spec=True)
-    @mock.patch('marvin_python_toolbox.management.hive.SSHClient.connect')
-    @mock.patch('marvin_python_toolbox.management.hive.SSHClient.set_missing_host_key_policy')
-    def test_get_ssh_client(self, set_missing_mocked, connect_mocked, AutoAddPolicyMocked):
-        hdfs_host = 'hdfs://test.com'
-        hdfs_port = '1234'
-        username = 'user'
-        password = 'pass'
-        self.hdi._get_ssh_client(hdfs_host, hdfs_port, username, password)
-
-        set_missing_mocked.assert_called_once_with(AutoAddPolicyMocked.return_value)
-        connect_mocked.assert_called_once_with(
-            hostname=hdfs_host, port=hdfs_port, username=username, password=password
-        )
-
-    @mock.patch('marvin_python_toolbox.management.hive.sys')
-    @mock.patch('marvin_python_toolbox.management.hive.logger')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.copy_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
-    def test_hdfs_dist_copy(self, ssh_cli_mock, hdfs_comm_mock, del_files_mock, copy_mock, logger_mock, sys_mock):
-        hdfs_comm_mock.return_value = (42, None)
-        copy_mock.return_value = (None, None)
-        ssh = mock.MagicMock()
-        ssh_cli_mock.return_value = ssh
-
-        force = False
-        hdfs_host = 'hdfs://test.com'
-        hdfs_port = 1234
-        origin = '/home/'
-        dest = '/tmp/'
-
-        self.hdi.hdfs_dist_copy(force, hdfs_host, hdfs_port, origin, dest, username=None, password=None)
-
-        ssh_cli_mock.assert_called_once_with(hdfs_host, hdfs_port, None, None)
-        del_files_mock.assert_not_called()
-        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/home/' | grep -E '^-' | wc -l")
-        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/tmp/' | grep -E '^-' | wc -l")
-        logger_mock.debug.assert_not_called()
-        sys_mock.exit.assert_not_called()
-
-    @mock.patch('marvin_python_toolbox.management.hive.sys')
-    @mock.patch('marvin_python_toolbox.management.hive.logger')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.copy_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
-    def test_hdfs_dist_copy_with_force(self, ssh_cli_mock, hdfs_comm_mock, del_files_mock, copy_mock, logger_mock, sys_mock):
-        hdfs_comm_mock.return_value = (42, None)
-        copy_mock.return_value = (None, None)
-        ssh = mock.MagicMock()
-        ssh_cli_mock.return_value = ssh
-
-        force = True
-        hdfs_host = 'hdfs://test.com'
-        hdfs_port = 1234
-        origin = '/home/'
-        dest = '/tmp/'
-
-        self.hdi.hdfs_dist_copy(force, hdfs_host, hdfs_port, origin, dest, username=None, password=None)
-
-        ssh_cli_mock.assert_called_once_with(hdfs_host, hdfs_port, None, None)
-        del_files_mock.assert_called_once_with(ssh, dest)
-        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/home/' | grep -E '^-' | wc -l")
-        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/tmp/' | grep -E '^-' | wc -l")
-        logger_mock.debug.assert_not_called()
-        sys_mock.exit.assert_not_called()
-
-    @mock.patch('marvin_python_toolbox.management.hive.sys')
-    @mock.patch('marvin_python_toolbox.management.hive.logger')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.copy_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
-    def test_hdfs_dist_copy_error_copy(self, ssh_cli_mock, hdfs_comm_mock, del_files_mock, copy_mock, logger_mock, sys_mock):
-        hdfs_comm_mock.side_effect = [(42, None), (13, None)]
-        copy_mock.return_value = (None, ['error'])
-        ssh = mock.MagicMock()
-        ssh_cli_mock.return_value = ssh
-
-        force = False
-        hdfs_host = 'hdfs://test.com'
-        hdfs_port = 1234
-        origin = '/home/'
-        dest = '/tmp/'
-
-        self.hdi.hdfs_dist_copy(force, hdfs_host, hdfs_port, origin, dest, username=None, password=None)
-
-        ssh_cli_mock.assert_called_once_with(hdfs_host, hdfs_port, None, None)
-        del_files_mock.assert_not_called()
-        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/home/' | grep -E '^-' | wc -l")
-        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/tmp/' | grep -E '^-' | wc -l")
-        logger_mock.debug.assert_called_once_with('error')
-        sys_mock.exit.assert_called_once_with("Stoping process!")
-
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.clean_ddl')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.create_table')
-    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_table_format')
-    def test_create_external_table(self, table_formar_mock, create_table_mock, clean_ddl_mock):
-        table_formar_mock.return_value = 'test'
-        conn = None
-        temp_table_name = 'temp'
-        ddl = "CREATE TABLE bla bla bla"
-        parquet_file_location = "/tmp/"
-        clean_ddl_mock.return_value = ddl
-
-        self.hdi.create_external_table(conn, temp_table_name, ddl, parquet_file_location)
-
-        table_formar_mock.assert_called_once_with(ddl)
-        clean_ddl_mock.assert_called_once_with(ddl, remove_formats=True, remove_general=False)
-        ddl = "CREATE EXTERNAL TABLE bla bla bla STORED AS test LOCATION '/tmp/'"
-        create_table_mock.assert_called_once_with(conn=conn, table_name=temp_table_name, ddl=ddl)
\ No newline at end of file
diff --git a/python-toolbox/tests/management/test_notebook.py b/python-toolbox/tests/management/test_notebook.py
deleted file mode 100644
index 0c66eb7..0000000
--- a/python-toolbox/tests/management/test_notebook.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# from click.testing import CliRunner
-
-try:
-    import mock
-except ImportError:
-    import unittest.mock as mock
-
-import os
-from marvin_python_toolbox.management.notebook import notebook, lab
-
-
-class mocked_ctx(object):
-    obj = {'base_path': '/tmp'}
-
-
-@mock.patch('marvin_python_toolbox.management.notebook.sys')
-@mock.patch('marvin_python_toolbox.management.notebook.os.system')
-def test_notebook(system_mocked, sys_mocked):
-    ctx = mocked_ctx()
-    port = 8888
-    enable_security = False
-    allow_root = False
-    spark_conf = '/opt/spark/conf'
-    system_mocked.return_value = 1
-
-    notebook(ctx, port, enable_security, spark_conf, allow_root)
-
-    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter notebook --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --config ' + os.environ["MARVIN_ENGINE_PATH"] + '/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py --NotebookApp.token=')
-
-
-@mock.patch('marvin_python_toolbox.management.notebook.sys')
-@mock.patch('marvin_python_toolbox.management.notebook.os.system')
-def test_notebook_with_security(system_mocked, sys_mocked):
-    ctx = mocked_ctx()
-    port = 8888
-    enable_security = True
-    allow_root = False
-    spark_conf = '/opt/spark/conf'
-    system_mocked.return_value = 1
-
-    notebook(ctx, port, enable_security, spark_conf, allow_root)
-
-    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter notebook --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --config ' + os.environ["MARVIN_ENGINE_PATH"] + '/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py')
-
-
-@mock.patch('marvin_python_toolbox.management.notebook.sys')
-@mock.patch('marvin_python_toolbox.management.notebook.os.system')
-def test_jupyter_lab(system_mocked, sys_mocked):
-    ctx = mocked_ctx()
-    port = 8888
-    enable_security = False
-    spark_conf = '/opt/spark/conf'
-    system_mocked.return_value = 1
-
-    lab(ctx, port, enable_security, spark_conf)
-
-    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter-lab --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --NotebookApp.token=')
-
-
-@mock.patch('marvin_python_toolbox.management.notebook.sys')
-@mock.patch('marvin_python_toolbox.management.notebook.os.system')
-def test_jupyter_lab_with_security(system_mocked, sys_mocked):
-    ctx = mocked_ctx()
-    port = 8888
-    enable_security = True
-    spark_conf = '/opt/spark/conf'
-    system_mocked.return_value = 1
-
-    lab(ctx, port, enable_security, spark_conf)
-
-    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter-lab --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser')
diff --git a/python-toolbox/tests/management/test_pkg.py b/python-toolbox/tests/management/test_pkg.py
deleted file mode 100644
index 2369967..0000000
--- a/python-toolbox/tests/management/test_pkg.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# from click.testing import CliRunner
-
-try:
-    import mock
-except ImportError:
-    import unittest.mock as mock
-
-from marvin_python_toolbox.management.pkg import _clone
-from marvin_python_toolbox.management.pkg import copy
-from marvin_python_toolbox.management.pkg import get_git_branch
-from marvin_python_toolbox.management.pkg import is_git_clean
-from marvin_python_toolbox.management.pkg import get_git_tags
-from marvin_python_toolbox.management.pkg import get_git_repository_url
-from marvin_python_toolbox.management.pkg import get_git_tag
-from marvin_python_toolbox.management.pkg import get_git_commit
-from marvin_python_toolbox.management.pkg import get_tag_from_repo_url
-from marvin_python_toolbox.management.pkg import get_repos_from_requirements
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.open')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.join')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
-def test_get_repos_from_requirements(curdir_mocked, join_mocked, open_mocked):
-    join_mocked.return_value = '/tmp'
-
-    get_repos_from_requirements(path=None)
-
-    join_mocked.assert_called_with(curdir_mocked, 'requirements.txt')
-    open_mocked.assert_called_with('/tmp', 'r')
-
-    get_repos_from_requirements(path='/path')
-
-    join_mocked.assert_called_with('/path', 'requirements.txt')
-    open_mocked.assert_called_with('/tmp', 'r')
-
-
-def test_get_tag_from_repo_url():
-    repos = ['http://www.xxx.org:80/tag@/repo.html']
-
-    tags = get_tag_from_repo_url(repos)
-
-    assert tags == {'http://www.xxx.org:80/tag@/repo.html': '/repo.html'}
-
-    repos = ['http://www.xxx.org:80/tag/repo.html']
-
-    tags = get_tag_from_repo_url(repos)
-
-    assert tags == {'http://www.xxx.org:80/tag/repo.html': None}
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.git_clone')
-def test_clone(git_mocked):
-    git_mocked.return_value = 1
-    repo = 'http://xxx.git'
-    result = _clone(repo)
-
-    assert result == (repo, 1)
-    git_mocked.assert_called_once_with(repo, checkout=False, depth=1)
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.shutil.ignore_patterns')
-@mock.patch('marvin_python_toolbox.management.pkg.shutil.copytree')
-def test_copy(copytree_mocked, ignore_mocked):
-    src = '/xpto'
-    dest = '/xpto_dest'
-    ignore = ('.git')
-    ignore_mocked.return_value = 1
-    copy(src, dest, ignore)
-
-    copytree_mocked.assert_called_once_with(src, dest, ignore=1)
-    ignore_mocked.assert_called_once_with(*ignore)
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
-def test_get_git_branch(popen_mocked, curdir_mocked, pipe_mocked):
-    mockx = mock.MagicMock()
-    mockx.stdout.read.return_value = b'branch '
-    popen_mocked.return_value = mockx
-
-    branch = get_git_branch()
-
-    popen_mocked.assert_called_once_with(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], stdout=pipe_mocked, cwd=curdir_mocked)
-
-    assert branch == 'branch'
-
-    branch = get_git_branch(path='/tmp')
-
-    popen_mocked.assert_called_with(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], stdout=pipe_mocked, cwd='/tmp')
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
-def test_get_git_tag(popen_mocked, curdir_mocked, pipe_mocked):
-    mockx = mock.MagicMock()
-    mockx.stdout.read.return_value = b'tag '
-    popen_mocked.return_value = mockx
-
-    tags = get_git_tag()
-
-    popen_mocked.assert_called_with(['git', 'describe', '--tags', 'tag'], stdout=pipe_mocked, cwd=curdir_mocked)
-
-    assert tags == 'tag'
-
-    tags = get_git_tag(path='/tmp')
-
-    popen_mocked.assert_called_with(['git', 'describe', '--tags', 'tag'], stdout=pipe_mocked, cwd='/tmp')
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
-def test_get_git_commit(popen_mocked, curdir_mocked, pipe_mocked):
-    mockx = mock.MagicMock()
-    mockx.stdout.read.return_value = b'commit '
-    popen_mocked.return_value = mockx
-
-    commit = get_git_commit()
-
-    popen_mocked.assert_called_once_with(['git', 'rev-parse', 'HEAD'], stdout=pipe_mocked, cwd=curdir_mocked)
-
-    assert commit == 'commit'
-
-    commit = get_git_commit(path='/tmp')
-    popen_mocked.assert_called_with(['git', 'rev-parse', 'HEAD'], stdout=pipe_mocked, cwd='/tmp')
-
-    commit = get_git_commit(tag='tag')
-    popen_mocked.assert_called_with(['git', 'rev-list', '-n', '1', 'tag'], stdout=pipe_mocked, cwd=curdir_mocked)
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
-def test_get_git_repository_url(popen_mocked, curdir_mocked, pipe_mocked):
-    mockx = mock.MagicMock()
-    mockx.stdout.read.return_value = b'url '
-    popen_mocked.return_value = mockx
-
-    url = get_git_repository_url()
-
-    popen_mocked.assert_called_once_with(['git', 'config', '--get', 'remote.origin.url'], stdout=pipe_mocked, cwd=curdir_mocked)
-
-    assert url == 'url'
-
-    url = get_git_repository_url(path='www.xxx.com')
-
-    popen_mocked.assert_called_with(['git', 'config', '--get', 'remote.origin.url'], stdout=pipe_mocked, cwd='www.xxx.com')
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
-def test_get_git_tags(popen_mocked, curdir_mocked, pipe_mocked):
-    mockx = mock.MagicMock()
-    mockx.stdout.read.return_value = 'git\ntags '
-    popen_mocked.return_value = mockx
-
-    tags = get_git_tags()
-
-    popen_mocked.assert_called_once_with(['git', 'tag'], stdout=pipe_mocked, cwd=curdir_mocked)
-
-    assert tags == ['tags', 'git']
-
-    tags = get_git_tags(path='/tmp')
-
-    popen_mocked.assert_called_with(['git', 'tag'], stdout=pipe_mocked, cwd='/tmp')
-
-
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
-@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
-@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
-def test_is_git_clean(curdir_mocked, popen_mocked, pipe_mocked):
-    mockx = mock.MagicMock()
-    mockx.stdout.read.return_value = 'done'
-    popen_mocked.return_value = mockx
-
-    clean = is_git_clean()
-    popen_mocked.assert_called_once_with(['git', 'diff', '--quiet', 'HEAD'], stdout=pipe_mocked, cwd=curdir_mocked)
-
-    assert clean == 'done'
-
-    clean = is_git_clean('/tmp')
-
-    popen_mocked.assert_called_with(['git', 'diff', '--quiet', 'HEAD'], stdout=pipe_mocked, cwd='/tmp')
-
-    assert clean == 'done'
diff --git a/python-toolbox/tests/test_loader.py b/python-toolbox/tests/test_loader.py
deleted file mode 100644
index d09ef0d..0000000
--- a/python-toolbox/tests/test_loader.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-
-# Copyright [2019] [Apache Software Foundation]
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-try:
-    import mock
-except ImportError:
-    import unittest.mock as mock
-
-from marvin_python_toolbox.loader import load_commands_from_file
-
-
-@mock.patch("marvin_python_toolbox.loader.isinstance")
-@mock.patch("marvin_python_toolbox.loader.getmembers")
-@mock.patch("marvin_python_toolbox.loader.imp.load_source")
-def test_load_commands_from_file(load_source_mocked, getmembers_mocked, isinstance_mocked):
-    path = '/tmp'
-    load_source_mocked.return_value = 'source'
-
-    commands = load_commands_from_file(path)
-
-    load_source_mocked.assert_called_once_with('custom_commands', '/tmp')
-    getmembers_mocked.assert_called_once_with('source')
-
-    assert commands == []
diff --git a/python-toolbox/tox.ini b/python-toolbox/tox.ini
deleted file mode 100644
index 12c79da..0000000
--- a/python-toolbox/tox.ini
+++ /dev/null
@@ -1,8 +0,0 @@
-[tox]
-envlist = py27
-
-[testenv]
-deps=pytest
-     pytest-cov
-     mock
-commands=py.test --cov={envsitepackagesdir}/marvin_python_toolbox --cov-report html --cov-report xml {posargs}