Merge remote-tracking branch 'python-toolbox/master'
diff --git a/.bumpversion.cfg b/.bumpversion.cfg
new file mode 100644
index 0000000..dd72cce
--- /dev/null
+++ b/.bumpversion.cfg
@@ -0,0 +1,7 @@
+[bumpversion]
+current_version = 0.0.4
+
+[bumpversion:file:marvin_python_toolbox/VERSION]
+
+[bumpversion:file:README.md]
+
diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 0000000..9c77704
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,22 @@
+[run]
+omit = tests/*
+branch = True
+
+[report]
+exclude_lines =
+    pragma: no cover
+
+    def __repr__
+    if self\.debug
+
+    raise AssertionError
+    raise NotImplementedError
+
+    if 0:
+    if __name__ == .__main__.:
+
+[html]
+directory = coverage_report
+
+[xml]
+output = coverage.xml
\ No newline at end of file
diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md
new file mode 100644
index 0000000..8e7cb86
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/bug_report.md
@@ -0,0 +1,30 @@
+---
+name: Bug report
+about: Create a report to help us improve
+
+---
+
+**Describe the bug**
+A clear and concise description of what the bug is.
+
+**To Reproduce**
+Steps to reproduce the behavior:
+1. Go to '...'
+2. Click on '....'
+3. Scroll down to '....'
+4. See error
+
+**Expected behavior**
+A clear and concise description of what you expected to happen.
+
+**Screenshots**
+If applicable, add screenshots to help explain your problem.
+
+**Desktop (please complete the following information):**
+ - OS: [e.g. iOS]
+ - Browser [e.g. chrome, safari]
+ - Version [e.g. 22]
+ - Log [Warning, Error]
+
+**Additional context**
+Add any other context about the problem here.
diff --git a/.github/ISSUE_TEMPLATE/custom.md b/.github/ISSUE_TEMPLATE/custom.md
new file mode 100644
index 0000000..99bb9a0
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/custom.md
@@ -0,0 +1,7 @@
+---
+name: Custom issue template
+about: Describe this issue template's purpose here.
+
+---
+
+
diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md
new file mode 100644
index 0000000..066b2d9
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE/feature_request.md
@@ -0,0 +1,17 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..7f355d0
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,18 @@
+.cache
+.eggs
+.tox
+.testmondata
+.coverage
+.coverage.*
+coverage.xml
+coverage_report
+*.egg
+*.egg-info
+*.pyc
+tests/__pycache__
+.DS_Store
+build
+.idea
+python-toolbox
+.profiling
+dist
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000..1fbcabb
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,38 @@
+language: python
+sudo: required
+dist: trusty
+
+matrix:
+    include:
+        - os: linux
+          python: "2.7"
+        - os: linux
+          python: "3.6"
+        - os: osx
+          language: generic
+
+before_install:
+  - curl https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz -o ./spark-2.1.1-bin-hadoop2.6.tgz
+  - sudo tar -xf ./spark-2.1.1-bin-hadoop2.6.tgz
+  - mkdir -p marvin_data
+  - mkdir -p marvin_home
+  - export MARVIN_HOME=./marvin_home
+  - export MARVIN_DATA_PATH=./marvin_data
+  - export SPARK_HOME=./spark-2.1.1-bin-hadoop2.6
+  - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo curl https://bootstrap.pypa.io/get-pip.py -o get-pip.py          ; fi
+  - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then sudo python get-pip.py                   ; fi
+  - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update                    ; fi
+  - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew install openssl graphviz  ; fi
+  - if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then sudo apt-get install libsasl2-dev python-pip graphviz -y ; fi
+  - sudo pip install --upgrade pip
+  - sudo pip install virtualenvwrapper --ignore-installed six
+  - source virtualenvwrapper.sh
+  - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then mkvirtualenv marvin-env         ; fi
+
+install:
+  - pip install codecov
+  - make marvin
+
+script:
+  - marvin test
+  - codecov
diff --git a/CHANGES.md b/CHANGES.md
new file mode 100644
index 0000000..f88edae
--- /dev/null
+++ b/CHANGES.md
@@ -0,0 +1,48 @@
+## Changes log
+
+### 0.0.4
+
+   - Docs.yaml file update for metrics api
+   - Some adjustments in toolbox and template makefiles #104
+   - Removing some commands by install mode (dev and prod) #104
+   - Moving autocomplete and notebook extension from toolbox setup to engine template setup. Close #107
+   - Separating tests dependencies and creating a new make command. close #100
+   - Metrics as json and Keras serializser to Closes #86 and Closes #98
+   - Saving and loading metrics artifacts as json files to Fix #98
+   - Adding a symlink to the data path on engine generate. close #93
+   - Marvin is now installable with pip. fix #84
+   - ASCII encode error fix for accented words in predict message
+   - Add Jupyter Lab command. Fix #85
+   - Cli parameter conflit fix
+   - New param to force reload #80
+   - Improving test coverage
+   - New python binary parameter to be used in the creation of virtual env
+   - Fix tornado 4.5.3 and pip 9.0.1
+
+### 0.0.3
+
+	- Python 3 support general compatibility refactoring (#68)
+    - Add marvin_ prefix in artefacts getters and setters to avoid user code conflicts   
+    - Fixing #66 bug related to override the params default values
+    - Refact artifacts setter and getter in engine templates
+    - Making marvin.ini from toolbox be found by default
+    - Making "params" as an execute method parameter to be possible to overriden default values in runtime
+    - Enabling to inform extra parameters for executor's jvm customization. Fix #65
+    - Improve spark conf parameter usage in cli's commands to use SPARK_CONF_DIR and SPARK_HOME envs.
+    - Not use json dumps if response type is string. Fixed #67
+    - Adding gitter tag to README file.
+    - Remove deploy to pipy from build
+    - Install twine in distribution task
+    - Add --process-dependency-links in pip install command
+    - General bug fixes
+
+### 0.0.2
+
+    - change executor vm parameter from modelProtocol to protocol
+    - Generic Dockerfile template and make commands to be used to build, run and push containers    
+    - fix spark conf dir parameter bug
+    - create distribute task to simplify the pypi package distribution.
+
+### 0.0.1
+
+ - initial version
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..72ec3a7
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,46 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at marvin-ai@googlegroups.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+## Attribution
+
+This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
+
+[homepage]: http://contributor-covenant.org
+[version]: http://contributor-covenant.org/version/1/4/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..7b87e9a
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,41 @@
+# Contributing
+
+ When contributing to this repository, please discuss the change you wish to make via issue or 
+ Gitter with the members/administrators of this repository before. 
+
+ ## Get started
+
+ Clone the repository, [install][installguide] and run "marvin test" to confirm the installation.
+
+ Choose the issues that are labeled as "Good First Issue".
+
+ ## Unit test
+
+ The unit test is very important and of course it's required for this project, you can use "marvin test" command to run your tests.
+
+
+ ## New issue and pull request
+
+ The community discusses and tracks known bugs and potential features in the [Github Issue Tracker][tbissue]. If you have a new idea or have identified a bug then you should raise it there to start public discussion
+
+ This repository contains Issue Templates and Pull Request Templates, just follow the instructions described in templates.
+
+ ## Where to ask for help
+
+ 1. [Github Issue Tracker][tbissue]: for discussions about new features or established bugs
+ 2. [Gitter chat][gchat]: for real-time discussion
+
+ ## Separate Code Repositories
+
+ [Marvin-AI][marvinai] maintains all your code and documentation in multiple repositories on the Github. This includes the primary repository and several others repositories for different components.
+
+ # Code of Conduct
+
+ Our Code of Conduct is available at [Here][codepage]
+
+ [codepage]: https://github.com/marvin-ai/marvin-python-toolbox/blob/master/CODE_OF_CONDUCT.md
+ [installguide]: https://github.com/marvin-ai/marvin-python-toolbox/blob/master/README.md
+ [tbissue]: https://github.com/marvin-ai/marvin-python-toolbox/issues
+ [gchat]: https://gitter.im/marvin-ai
+ [marvinai]: https://github.com/marvin-ai
+
diff --git a/INSTALL b/INSTALL
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/INSTALL
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..f49a4e1
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,201 @@
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
\ No newline at end of file
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000..b219cb6
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,13 @@
+include CHANGES.md
+include INSTALL
+include LICENSE
+include MANIFEST.in
+include README.md
+include marvin_python_toolbox/VERSION
+recursive-include marvin_python_toolbox/extras *
+recursive-include marvin_python_toolbox/management *
+recursive-include notebooks *
+recursive-include tests *
+global-exclude __pycache__
+global-exclude *.py[co]
+prune notebooks/build
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..84ce1ef
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,81 @@
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+.PHONY: help marvin marvin-prod update clean-pyc clean-build clean-reports clean-deps clean grpc distribution
+
+help:
+	@echo "    marvin"
+	@echo "        Prepare project to be used as a marvin package."
+	@echo "    marvin-prod"
+	@echo "        Prepare project to be used in production environment."
+	@echo "    update"
+	@echo "        Reinstall requirements and setup.py dependencies."
+	@echo "    clean-all"
+	@echo "        Remove all generated artifacts."
+	@echo "    clean-pyc"
+	@echo "        Remove python artifacts."
+	@echo "    clean-build"
+	@echo "        Remove build artifacts."
+	@echo "    clean-reports"
+	@echo "        Remove coverage reports."
+	@echo "    clean-deps"
+	@echo "        Remove marvin setup.py dependencies."
+	@echo "    grpc"
+	@echo "        Build grpc stubs."
+	@echo "    distribution"
+	@echo "        Build and upload the toolbox as a wheel package in pypi."
+
+marvin:
+	pip install -e ".[testing]" --process-dependency-links
+	touch .dev
+	marvin --help
+
+update:
+	pip install -e . --process-dependency-links -U 
+
+marvin-prod:
+	pip install . --process-dependency-links
+	rm -f .dev 
+	marvin --help
+
+clean-pyc:
+	find . -name '*.pyc' -exec rm -f {} +
+	find . -name '*.pyo' -exec rm -f {} +
+	find . -name '*~' -exec rm -f  {} +
+
+clean-build:
+	rm -f .prod 
+	rm -rf *.egg-info
+	rm -rf .cache
+	rm -rf .eggs
+	rm -rf dist
+
+clean-reports:
+	rm -rf coverage_report/
+	rm -f coverage.xml
+	rm -f .coverage
+
+clean-deps:
+	pip freeze | grep -v "^-e" | xargs pip uninstall -y
+
+clean: clean-build clean-pyc clean-reports clean-deps
+
+grpc:
+	python -m grpc_tools.protoc --proto_path=marvin_python_toolbox/engine_base/protos --python_out=marvin_python_toolbox/engine_base/stubs --grpc_python_out=marvin_python_toolbox/engine_base/stubs marvin_python_toolbox/engine_base/protos/actions.proto
+	ls -la marvin_python_toolbox/engine_base/stubs/*.py
+
+distribution: clean-build
+	pip install twine
+	python setup.py bdist_wheel --universal
+	twine upload dist/marvin_python_toolbox*.whl
diff --git a/NOTICE b/NOTICE
new file mode 100644
index 0000000..bf5c89e
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1,5 @@
+Marvin Artifical Intelligence Platform
+Copyright 2017 B2W Digital
+
+This product includes software developed at
+B2W Digital (http://www.b2w.digital)
\ No newline at end of file
diff --git a/PULL_REQUEST_TEMPLATE.md b/PULL_REQUEST_TEMPLATE.md
new file mode 100644
index 0000000..0334db7
--- /dev/null
+++ b/PULL_REQUEST_TEMPLATE.md
@@ -0,0 +1,17 @@
+Fixes # .
+
+Changes proposed in this pull request:
+-
+-
+-
+-
+-
+
+How to test new changes:
+-
+-
+-
+-
+-
+
+@marvin-ai/marvin-core-team
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..2bbce65
--- /dev/null
+++ b/README.md
@@ -0,0 +1,112 @@
+[![Build Status](https://travis-ci.org/marvin-ai/marvin-python-toolbox.svg)](https://travis-ci.org/marvin-ai/marvin-python-toolbox) [![codecov](https://codecov.io/gh/marvin-ai/marvin-python-toolbox/branch/master/graph/badge.svg)](https://codecov.io/gh/marvin-ai/marvin-python-toolbox)
+
+[![Join the chat at https://gitter.im/gitterHQ/gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/marvin-ai)
+
+# Marvin Toolbox v0.0.4
+
+![](https://images-americanas.b2w.io/img/_staging/marvin/marvin.png)
+
+# Quick Start
+
+## Review
+
+**Marvin** is an open-source Artificial Intelligence platform that focuses on helping data scientists deliver meaningful solutions to complex problems. Supported by a standardized large-scale, language-agnostic architecture, Marvin simplifies the process of exploration and modeling.
+
+## Getting Started
+* [Installing Marvin (Ubuntu)](https://www.marvin-ai.org/book/installing-marvin/ubuntu-debian-installation)
+* [Installing Marvin (MacOS)](https://www.marvin-ai.org/book/installing-marvin/macos-installation)
+* [Installing Marvin (Other OS) Vagrant](https://www.marvin-ai.org/book/installing-marvin/vagrant-installation)
+* [Creating a new engine](#creating-a-new-engine)
+* [Working in an existing engine](#working-in-an-existing-engine)
+* [Command line interface](#command-line-interface)
+* [Running an example engine](#running-a-example-engine)
+
+
+### Creating a new engine
+1. To create a new engine
+```
+workon python-toolbox-env
+marvin engine-generate
+```
+Respond to the prompt and wait for the engine environment preparation to complete. Don't forget to start dev box before if you are using vagrant.
+
+2. Test the new engine
+```
+workon <new_engine_name>-env
+marvin test
+```
+
+3. For more information
+```
+marvin --help
+```
+
+### Working in an existing engine
+
+1. Set VirtualEnv and get to the engine's path
+```
+workon <engine_name>-env
+```
+
+2. Test your engine
+```
+marvin test
+```
+
+3. Bring up the notebook and access it from your browser
+```
+marvin notebook
+```
+
+### Command line interface
+Usage: marvin [OPTIONS] COMMAND [ARGS]
+
+Options:
+```
+  --debug       #Enable debug mode.
+  --version     #Show the version and exit.
+  --help        #Show this command line interface and exit.
+```
+
+Commands:
+```
+  engine-generate     #Generate a new marvin engine project.
+  engine-generateenv  #Generate a new marvin engine environment.
+  engine-grpcserver   #Marvin gRPC engine action server starts.
+  engine-httpserver   #Marvin http api server starts.
+  hive-dataimport     #Import data samples from a hive databse to the hive running in this toolbox.
+  hive-generateconf   #Generate default configuration file.
+  hive-resetremote    #Drop all remote tables from informed engine on host.
+  notebook            #Start the Jupyter notebook server.
+  pkg-bumpversion     #Bump the package version.
+  pkg-createtag       #Create git tag using the package version.
+  pkg-showchanges     #Show the package changelog.
+  pkg-showinfo        #Show information about the package.
+  pkg-showversion     #Show the package version.
+  pkg-updatedeps      #Update requirements.txt.
+  test                #Run tests.
+  test-checkpep8      #Check python code style.
+  test-tdd            #Watch for changes to run tests automatically.
+  test-tox            #Run tests using a new virtualenv.
+```
+
+### Running a example engine 
+
+1. Clone the example engine from the repository
+```
+git clone https://github.com/marvin-ai/engines.git
+```
+
+2. Generate a new Marvin engine environment for the Iris species engine
+```
+workon python-toolbox-env
+marvin engine-generateenv ../engines/iris-species-engine/
+```
+
+3. Run the Iris species engine
+```
+workon iris-species-engine-env
+marvin engine-dryrun 
+```
+
+> Marvin is a project started at B2W Digital offices and released open source on September 2017.
diff --git a/bin/marvin b/bin/marvin
new file mode 100644
index 0000000..80c2f6a
--- /dev/null
+++ b/bin/marvin
@@ -0,0 +1,80 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import sys
+import os.path
+
+from marvin_python_toolbox.config import find_inidir, parse_ini
+from marvin_python_toolbox.management import create_cli
+import marvin_python_toolbox as toolbox
+from marvin_python_toolbox import __version__ as TOOLBOX_VERSION
+
+# Find the ini directory
+inifilename = 'marvin.ini'
+inidir = find_inidir(inifilename)
+
+if not inidir:
+    default_ini_path = os.path.dirname(__file__)
+    if os.path.exists(os.path.join(default_ini_path, inifilename)):
+        inidir = default_ini_path
+
+if not inidir:
+    print("ERROR: marvinini file '{}' not found".format(inifilename))
+    sys.exit(1)
+
+# Load the ini file
+inipath = os.path.join(inidir, inifilename)
+
+os.environ["DEFAULT_CONFIG_PATH"] = inipath
+os.environ["MARVIN_ENGINE_PATH"] = inidir
+os.environ["MARVIN_TOOLBOX_PATH"] = toolbox.__path__[0]
+os.environ["TOOLBOX_VERSION"] = TOOLBOX_VERSION
+
+if not os.getenv("LOG_LEVEL"):
+    os.environ["LOG_LEVEL"] = 'INFO'
+
+config_defaults = {
+    'inidir': inidir,
+    'marvin_packagedir': '{inidir}/{marvin_package}',
+}
+
+config = parse_ini(inipath, config_defaults)
+
+package_name = config['marvin_package']
+package_path = config['marvin_packagedir']
+
+home = os.environ['HOME']
+os.environ.setdefault('SPARK_HOME', '/opt/spark')
+os.environ.setdefault('WORKON_HOME', os.path.join(home, '.virtualenvs'))
+os.environ.setdefault('MARVIN_HOME', os.path.join(home, 'marvin'))
+os.environ.setdefault('MARVIN_DATA_PATH', os.path.join(home, 'marvin/data'))
+
+marvin_data = os.environ['MARVIN_DATA_PATH']
+if not os.path.exists(marvin_data):
+    os.makedirs(marvin_data)
+
+
+type_ = config.get('marvin_type', None)
+
+exclude_commands = config.get('marvin_exclude', None)
+
+cli = create_cli(package_name, package_path, type_=type_, exclude=exclude_commands, config=config)
+
+cli()
+
diff --git a/marvin.ini b/marvin.ini
new file mode 100644
index 0000000..bf06992
--- /dev/null
+++ b/marvin.ini
@@ -0,0 +1,3 @@
+[marvin]
+package = marvin_python_toolbox
+type = tool
diff --git a/marvin_python_toolbox/VERSION b/marvin_python_toolbox/VERSION
new file mode 100644
index 0000000..81340c7
--- /dev/null
+++ b/marvin_python_toolbox/VERSION
@@ -0,0 +1 @@
+0.0.4
diff --git a/marvin_python_toolbox/__init__.py b/marvin_python_toolbox/__init__.py
new file mode 100644
index 0000000..cdde2f0
--- /dev/null
+++ b/marvin_python_toolbox/__init__.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+
+from .manage import *
+from .common import *
+from .engine_base import *
+
+# Get package version number from "VERSION" file
+with open(os.path.join(os.path.dirname(__file__), 'VERSION'), 'rb') as f:
+    __version__ = f.read().decode('ascii').strip()
diff --git a/marvin_python_toolbox/_compatibility.py b/marvin_python_toolbox/_compatibility.py
new file mode 100644
index 0000000..c796f4e
--- /dev/null
+++ b/marvin_python_toolbox/_compatibility.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Compatibility module.
+
+Import this module to help to write code compatible with Python 2 and 3.
+"""
+
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import unicode_literals
+from six.moves.urllib.parse import urlparse, quote
+from six import StringIO, iteritems, text_type
+from six.moves import xrange
+import six
+
+__all__ = ['six']
+
+# Add here any code that have to differentiate between python 2 and 3.
diff --git a/marvin_python_toolbox/_logging.py b/marvin_python_toolbox/_logging.py
new file mode 100644
index 0000000..45635a8
--- /dev/null
+++ b/marvin_python_toolbox/_logging.py
@@ -0,0 +1,89 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Custom logging module.
+
+This module is responsible to manage log messages and log file.
+"""
+
+import os
+import os.path
+import logging
+
+DEFAULT_LOG_LEVEL = logging.WARNING
+DEFAULT_LOG_DIR = '/tmp'
+
+
+class Logger(logging.getLoggerClass()):
+    """Custom logger class.
+
+    Use this class to customize the logger behavior or to intercept the
+    messages.
+    """
+    # def error(self, msg, *args, **kwargs):
+    #     # Add here code to intercept the project error messages
+    #     super(Logger, self).error(msg, *args, **kwargs)
+
+    # def critical(self, msg, *args, **kwargs):
+    #     # Add here code to intercept the project critical messages
+    #     super(Logger, self).critical(msg, *args, **kwargs)
+
+
+logging.setLoggerClass(Logger)
+
+
+def get_logger(name, namespace='marvin_python_toolbox',
+               log_level=DEFAULT_LOG_LEVEL, log_dir=DEFAULT_LOG_DIR):
+    """Build a logger that outputs to a file and to the console,"""
+
+    log_level = (os.getenv('{}_LOG_LEVEL'.format(namespace.upper())) or
+                 os.getenv('LOG_LEVEL', log_level))
+    log_dir = (os.getenv('{}_LOG_DIR'.format(namespace.upper())) or
+               os.getenv('LOG_DIR', log_dir))
+
+    logger = logging.getLogger('{}.{}'.format(namespace, name))
+    logger.setLevel(log_level)
+
+    formatter = logging.Formatter(
+        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+
+    # Create a console stream handler
+    console_handler = logging.StreamHandler()
+    console_handler.setLevel(log_level)
+    console_handler.setFormatter(formatter)
+    logger.addHandler(console_handler)
+
+    try:
+        log_path = os.path.abspath(log_dir)
+        log_filename = '{name}.{pid}.log'.format(
+            name=namespace, pid=os.getpid())
+
+        file_path = str(os.path.join(log_path, log_filename))
+
+        if not os.path.exists(log_path):  # pragma: no cover
+            os.makedirs(log_path, mode=774)
+
+        # Create a file handler
+        file_handler = logging.FileHandler(file_path)
+        file_handler.setLevel(log_level)
+        file_handler.setFormatter(formatter)
+        logger.addHandler(file_handler)
+    except OSError as e:
+        logger.error('Could not create log file {file}: {error}'.format(
+            file=file_path, error=e.strerror))
+
+    return logger
diff --git a/marvin_python_toolbox/common/__init__.py b/marvin_python_toolbox/common/__init__.py
new file mode 100644
index 0000000..ba979d3
--- /dev/null
+++ b/marvin_python_toolbox/common/__init__.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
diff --git a/marvin_python_toolbox/common/config.py b/marvin_python_toolbox/common/config.py
new file mode 100644
index 0000000..c91bfd5
--- /dev/null
+++ b/marvin_python_toolbox/common/config.py
@@ -0,0 +1,131 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Configuration module.
+"""
+
+import os
+from configparser import ConfigParser, NoSectionError
+from configobj import ConfigObj
+
+# Use six to create code compatible with Python 2 and 3.
+# See http://pythonhosted.org/six/
+from .._compatibility import six
+from .._logging import get_logger
+
+from .exceptions import InvalidConfigException
+from .utils import from_json
+
+
+__all__ = ['Configuration', 'Config', 'load_conf_from_file']
+
+
+logger = get_logger('core')
+
+
+def load_conf_from_file(path=None, section='marvin'):
+    data = {}
+    config_path = path  # try to get config path from args
+    if not config_path:  # try to get config file from env
+        config_path = os.getenv('MARVIN_CONFIG_FILE') or os.getenv('CONFIG_FILE')
+    if not config_path:  # use default file
+        config_path = os.getenv("DEFAULT_CONFIG_PATH")
+    logger.info('Loading configuration values from "{path}"...'.format(path=config_path))
+    config_parser = ConfigObj(config_path)
+    try:
+        data = config_parser[section]
+    except NoSectionError:
+        logger.warn('Couldn\'t find "{section}" section in "{path}"'.format(
+            section=section, path=config_path
+        ))
+
+    return data
+
+
+DEFAULT_PREFIX = 'marvin.'
+DEFAULT_SECT = 'marvin'
+
+
+class Configuration(object):
+    """
+    Abstracts persistent configuration.
+
+    Reads configurations and defaults from a `/etc/marvin/marvin.ini` file.
+
+    Usage:
+
+        Configuration.get('my.key')
+
+    """
+    _conf = {}
+    _default_sect = DEFAULT_SECT
+    PREFIX = DEFAULT_PREFIX
+
+    @classmethod
+    def reset(cls):
+        cls._conf = {}
+        cls._default_sect = DEFAULT_SECT
+        cls.PREFIX = DEFAULT_PREFIX
+
+    @classmethod
+    def _load(cls, section=None):
+        section = section or cls._default_sect
+        cls._conf[section] = load_conf_from_file(section=section)
+
+    @classmethod
+    def get(cls, key, section=None, **kwargs):
+        """
+        Retrieves a config value from dict.
+        If not found twrows an InvalidScanbooconfigException.
+        """
+        section = section or cls._default_sect
+        if section not in cls._conf:
+            cls._load(section=section)
+
+        value = cls._conf[section].get(key)
+
+        # if not found in context read default
+        if not value and section != cls._default_sect:
+            value = cls._conf[cls._default_sect].get(key) if cls._default_sect in cls._conf else None
+
+        if value is None:
+            if 'default' in kwargs:  # behave as {}.get(x, default='fallback')
+                _def_value = kwargs['default']
+                logger.warn("Static configuration [{}] was not found. Using the default value [{}].".format(key, _def_value))
+                return _def_value
+            else:
+                raise InvalidConfigException(u'Not found entry: {}'.format(key))
+
+        try:
+            value = from_json(value)  # parse value
+        except (TypeError, ValueError):
+            pass  # if not json parseable, then keep the string value
+
+        return value
+
+    @classmethod
+    def keys(cls, section=None):
+        """Get a list with all config keys"""
+        section = section or cls._default_sect
+        if section not in cls._conf:
+            cls._load(section=section)
+        return cls._conf[section].keys()
+
+
+# Alias
+Config = Configuration
+
diff --git a/marvin_python_toolbox/common/data.py b/marvin_python_toolbox/common/data.py
new file mode 100644
index 0000000..d86bd5b
--- /dev/null
+++ b/marvin_python_toolbox/common/data.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Data Module.
+
+"""
+
+import os
+import requests
+import progressbar
+
+# Use six to create code compatible with Python 2 and 3.
+# See http://pythonhosted.org/six/
+from .._compatibility import six
+from .utils import check_path
+from .exceptions import InvalidConfigException
+from six import with_metaclass
+from .._logging import get_logger
+
+logger = get_logger('common.data')
+
+
+class AbstractMarvinData(type):
+    @property
+    def data_path(cls):
+        return cls.get_data_path()
+
+
+class MarvinData(with_metaclass(AbstractMarvinData)):
+    _key = 'MARVIN_DATA_PATH'
+
+    @classmethod
+    def get_data_path(cls):
+        """
+        Read data path from the following sources in order of priority:
+
+        1. Environment variable
+
+        If not found raises an exception
+
+        :return: str - datapath
+        """
+        marvin_path = os.environ.get(cls._key)
+        if not marvin_path:
+            raise InvalidConfigException('Data path not set!')
+
+        is_path_created = check_path(marvin_path, create=True)
+        if not is_path_created:
+            raise InvalidConfigException('Data path does not exist!')
+
+        return marvin_path
+
+    @classmethod
+    def _convert_path_to_key(cls, path):
+        if path.startswith(os.path.sep):
+            path = os.path.relpath(path, start=cls.data_path)
+        return '/'.join(path.split(os.path.sep))
+
+    @classmethod
+    def load_data(cls, relpath):
+        """
+        Load data from the following sources in order of priority:
+
+        1. Filesystem
+
+        :param relpath: path relative to "data_path"
+        :return: str - data content
+        """
+        filepath = os.path.join(cls.data_path, relpath)
+        with open(filepath) as fp:
+            content = fp.read()
+
+        return content
+
+    @classmethod
+    def download_file(cls, url, local_file_name=None, force=False, chunk_size=1024):
+        """
+        Download file from a given url
+        """
+
+        local_file_name = local_file_name if local_file_name else url.split('/')[-1]
+        filepath = os.path.join(cls.data_path, local_file_name)
+
+        if not os.path.exists(filepath) or force:
+            try:
+                headers = requests.head(url, allow_redirects=True).headers
+                length = headers.get('Content-Length')
+
+                logger.info("Starting download of {} file with {} bytes ...".format(url, length))
+
+                widgets = [
+                    'Downloading file please wait...', progressbar.Percentage(),
+                    ' ', progressbar.Bar(),
+                    ' ', progressbar.ETA(),
+                    ' ', progressbar.FileTransferSpeed(),
+                ]
+                bar = progressbar.ProgressBar(widgets=widgets, max_value=int(length) + chunk_size).start()
+
+                r = requests.get(url, stream=True)
+
+                with open(filepath, 'wb') as f:
+                    total_chunk = 0
+
+                    for chunk in r.iter_content(chunk_size):
+                        if chunk:
+                            f.write(chunk)
+                            total_chunk += chunk_size
+                            bar.update(total_chunk)
+
+                bar.finish()
+
+            except:
+                if os.path.exists(filepath):
+                    os.remove(filepath)
+
+                raise
+
+        return filepath
diff --git a/marvin_python_toolbox/common/data_source_provider.py b/marvin_python_toolbox/common/data_source_provider.py
new file mode 100644
index 0000000..2a2e78e
--- /dev/null
+++ b/marvin_python_toolbox/common/data_source_provider.py
@@ -0,0 +1,43 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Marvin Data Source module.
+
+This module is responsible to create and provide diferents types of data source objects.
+
+"""
+
+
+def get_spark_session(enable_hive=False, app_name='marvin-engine', configs=[]):
+    """Return a Spark Session object"""
+
+    # Prepare spark context to be used
+    import findspark
+    findspark.init()
+    from pyspark.sql import SparkSession
+
+    # prepare spark sesseion to be returned
+    spark = SparkSession.builder
+
+    spark = spark.appName(app_name)
+    spark = spark.enableHiveSupport() if enable_hive else spark
+
+    # if has configs
+    for config in configs:
+        spark = spark.config(config)
+
+    return spark.getOrCreate()
diff --git a/marvin_python_toolbox/common/exceptions.py b/marvin_python_toolbox/common/exceptions.py
new file mode 100644
index 0000000..309f21e
--- /dev/null
+++ b/marvin_python_toolbox/common/exceptions.py
@@ -0,0 +1,39 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from http.client import HTTPException as HTTPExceptionBase
+
+
+class ConfigException(Exception):
+    """
+    Marvin Config Base Exception
+    """
+
+class InvalidConfigException(ConfigException):
+    """
+    Invalid Marvin Config Base Exception
+    """
+
+class InvalidJsonException(Exception):
+    """
+    Invalid JSON
+    """
+
+class HTTPException(HTTPExceptionBase):
+    """
+    HTTP exception
+    """
diff --git a/marvin_python_toolbox/common/http_client.py b/marvin_python_toolbox/common/http_client.py
new file mode 100644
index 0000000..a198c04
--- /dev/null
+++ b/marvin_python_toolbox/common/http_client.py
@@ -0,0 +1,214 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Http Client Module.
+
+"""
+import requests
+import math
+
+from .utils import to_json
+
+# Use six to create code compatible with Python 2 and 3.
+# See http://pythonhosted.org/six/
+from .._compatibility import six
+from .._logging import get_logger
+from .exceptions import HTTPException
+
+
+__all__ = ['HttpClient', 'ListResultSet', 'HttpResponse', 'ApiClient']
+
+
+logger = get_logger('http_client')
+
+
+class HttpClient(object):
+    """
+    Http REST client
+
+    used as superclass for the specific API classes
+    override the `host` property method to return the api server host
+
+    usage:
+
+        class MyApiClient(object):
+            @property
+            def host(self):
+                return "http://myapiurl:8000"
+
+    """
+
+    def url(self, path):
+        """
+        Build url for the specified path.
+
+        :param path: [string] the url path for the api endpoint
+
+        :return: [string] formated url
+        """
+
+        if 'http://' in path or 'https://' in path:
+            return path
+        else:
+            return self.host + path
+
+    def parse_response(self, response):
+        """
+        Parse the response and build a `scanboo_common.http_client.HttpResponse` object.
+        For successful responses, convert the json data into a dict.
+
+        :param response: the `requests` response
+
+        :return: [HttpResponse] response object
+        """
+        status = response.status_code
+        if response.ok:
+            data = response.json()
+            return HttpResponse(ok=response.ok, status=status, errors=None, data=data)
+        else:
+            try:
+                errors = response.json()
+            except ValueError:
+                errors = response.content
+            return HttpResponse(ok=response.ok, status=status, errors=errors, data=None)
+
+    def request_header(self):
+        """
+        Build a headers dict with:
+          - the content type as json
+
+        :return: [dict] headers dict
+        """
+        return {
+            'Content-Type': 'application/json',
+            'Csrf-Token': 'nocheck',
+        }
+
+    def get_all(self, path, data=None, limit=100):
+        """Encapsulates GET all requests"""
+        return ListResultSet(path=path, data=data or {}, limit=limit)
+
+    def get(self, path, data=None):
+        """Encapsulates GET requests"""
+        data = data or {}
+        response = requests.get(self.url(path), params=data, headers=self.request_header())
+        return self.parse_response(response)
+
+    def post(self, path, data=None):
+        """Encapsulates POST requests"""
+        data = data or {}
+        response = requests.post(self.url(path), data=to_json(data), headers=self.request_header())
+        return self.parse_response(response)
+
+    def put(self, path, data=None):
+        """Encapsulates PUT requests"""
+        data = data or {}
+        response = requests.put(self.url(path), data=to_json(data), headers=self.request_header())
+        return self.parse_response(response)
+
+    def delete(self, path, data=None):
+        """Encapsulates DELETE requests"""
+        data = data or {}
+        response = requests.delete(self.url(path), data=to_json(data), headers=self.request_header())
+        return self.parse_response(response)
+
+
+class ListResultSet(object):
+    """
+    Used to encapsulate the result of requests of lists.
+    """
+
+    def __init__(self, path, data=None, limit=50, page=1):
+        self.path = path
+        self.params = data or {}
+        self.limit = limit
+        self.page = page
+
+        self.response = None
+        self._objects = []
+
+        self._process()
+
+    def __len__(self):
+        return self.response.data.get('total', 0)
+
+    def __iter__(self):
+        more_results = True
+
+        while more_results:
+            for item in self._objects:
+                yield item
+
+            next_page = self._next_page()
+            if next_page:
+                self.page = next_page
+                self._process()
+            else:
+                more_results = False
+
+    def _next_page(self):
+        new_page = None
+        if math.ceil(self.response.data['total'] / float(self.limit)) >= (self.page + 1):
+            new_page = self.page + 1
+        return new_page
+
+    def _process(self):
+        url = api_client.url(self.path)
+        self.params.update({'page': self.page, 'per_page': self.limit})
+
+        response = requests.get(url, params=self.params, headers=api_client.request_header())
+        response = api_client.parse_response(response)
+
+        try:
+            self._objects = response.data['objects']
+        except TypeError:
+            raise HTTPException(response.errors)
+        self.response = response
+
+
+class ApiClient(HttpClient):
+    """
+    Data Api client abstracttion.
+    See `scanboo_common.http_client.HttpClient` for more info.
+    """
+
+    @property
+    def host(self):
+        # url = Configuration.get('api.url')
+        url = 'http://localhost:8000'
+        return url[:-1] if url.endswith('/') else url
+
+
+class HttpResponse(object):
+    """
+    Http response utility class.
+
+    :attr ok: [bool] request returned ok
+    :attr status: [int] http status code
+    :attr data: [dict] dictionary containing the returned data
+    :attr errors: [dict | str] dict when errors is json parsable or the raw error string
+    """
+
+    def __init__(self, ok, status, errors, data):
+        self.ok = ok
+        self.status = status
+        self.errors = errors
+        self.data = data
+
+
+# ApiClient "singleton"
+api_client = ApiClient()
diff --git a/marvin_python_toolbox/common/profiling.py b/marvin_python_toolbox/common/profiling.py
new file mode 100644
index 0000000..1ed4e6b
--- /dev/null
+++ b/marvin_python_toolbox/common/profiling.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import json
+import subprocess
+import cProfile
+import pstats
+import uuid
+from functools import wraps
+from .._compatibility import StringIO
+
+from .._logging import get_logger
+
+logger = get_logger('profiling')
+
+
+class Profile(cProfile.Profile):
+    def __init__(self, sortby='tottime', *args, **kwargs):
+        self.sortby = sortby
+        self.image_path = None
+        super(Profile, self).__init__(*args, **kwargs)
+
+    def _repr_html_(self):
+        s = StringIO()
+        stats = pstats.Stats(self, stream=s).sort_stats(self.sortby)
+        stats.print_stats(10)
+        stats_value = s.getvalue()
+        html = '<pre>{}</pre>'.format(stats_value)
+        if self.image_path:
+            html += '<img src="{}" style="margin: 0 auto;">'.format(self.image_path)
+        return html
+
+
+class profiling(object):
+    def __init__(self, enable=True, output_path='profiling', uid=uuid.uuid4, info=None, sortby='tottime'):
+        self.enable = enable
+        self.output_path = output_path
+        self.uid = uid
+        self.info = info
+        self.sortby = sortby
+
+        self.enable_profiling = enable
+
+    def __call__(self, func):
+
+        @wraps(func)
+        def func_wrapper(*args, **kwargs):
+            enable_ = self.enable
+            if callable(enable_):
+                enable_ = enable_(*args, **kwargs)
+                self.enable_profiling = bool(enable_)
+
+            if self.enable_profiling:
+                self.__enter__()
+            response = None
+            try:
+                response = func(*args, **kwargs)
+            except Exception:
+                raise
+            finally:
+                if self.enable_profiling:
+                    output_path_ = self.output_path
+                    if callable(output_path_):
+                        self.output_path = output_path_(*args, **kwargs)
+                    uid_ = self.uid
+                    if callable(uid_):
+                        self.uid = uid_()
+                    info_ = self.info
+                    if callable(info_):
+                        self.info = info_(response, *args, **kwargs)
+
+                    self.__exit__(None, None, None)
+
+            return response
+
+        return func_wrapper
+
+    def __enter__(self):
+        pr = None
+        if self.enable_profiling:
+            pr = Profile(sortby=self.sortby)
+            pr.enable()
+        self.pr = pr
+        return pr
+
+    def __exit__(self, type, value, traceback):
+        if self.enable_profiling:
+            pr = self.pr
+            pr.disable()
+            # args accept functions
+            output_path = self.output_path
+            uid = self.uid
+            info = self.info
+            if callable(uid):
+                uid = uid()
+
+            # make sure the output path exists
+            if not os.path.exists(output_path):  # pragma: no cover
+                os.makedirs(output_path, mode=0o774)
+
+            # collect profiling info
+            stats = pstats.Stats(pr)
+            stats.sort_stats(self.sortby)
+            info_path = os.path.join(output_path, '{}.json'.format(uid))
+            stats_path = os.path.join(output_path, '{}.pstats'.format(uid))
+            dot_path = os.path.join(output_path, '{}.dot'.format(uid))
+            png_path = os.path.join(output_path, '{}.png'.format(uid))
+            if info:
+                try:
+                    with open(info_path, 'w') as fp:
+                        json.dump(info, fp, indent=2, encoding='utf-8')
+                except Exception as e:
+                    logger.error('An error occurred while saving %s: %s.', info_path, e)
+            stats.dump_stats(stats_path)
+            # create profiling graph
+            try:
+                subprocess.call(['gprof2dot', '-f', 'pstats', '-o', dot_path, stats_path])
+                subprocess.call(['dot', '-Tpng', '-o', png_path, dot_path])
+                pr.image_path = png_path
+            except Exception:
+                logger.error('An error occurred while creating profiling image! '
+                             'Please make sure you have installed GraphViz.')
+            logger.info('Saving profiling data (%s)', stats_path[:-7])
diff --git a/marvin_python_toolbox/common/utils.py b/marvin_python_toolbox/common/utils.py
new file mode 100644
index 0000000..3799d61
--- /dev/null
+++ b/marvin_python_toolbox/common/utils.py
@@ -0,0 +1,295 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Utils Module.
+
+"""
+import os
+import re
+import datetime
+import time
+import json
+import simplejson
+import uuid
+import hashlib
+import jsonschema
+import warnings
+from slugify import slugify
+
+# Use six to create code compatible with Python 2 and 3.
+# See http://pythonhosted.org/six/
+from .._compatibility import xrange, text_type, quote
+from .._logging import get_logger
+from .exceptions import InvalidJsonException
+
+
+logger = get_logger('utils')
+
+
+class memoized_class_property(object):
+    """Creates a singleton class property
+
+    Usage:
+
+        class MyClass:
+            @memoized_class_property
+            def bla(cls):
+                print 'only once'
+                return 42
+
+        MyClass.bla
+        # 'only once'
+        # 42
+
+        MyClass.bla
+        # 42
+    """
+
+    def __init__(self, wrapped):
+        self.wrapped = wrapped
+        try:
+            self.__doc__ = wrapped.__doc__
+        except:  # pragma: no cover
+            pass
+
+    # if called on a class, inst is None and objtype is the class
+    # if called on an instance, inst is the instance, and objtype
+    # the class
+    def __get__(self, inst, objtype=None):
+        val = self.wrapped(objtype)
+        setattr(objtype, self.wrapped.__name__, val)
+        return val
+
+
+class class_property(object):
+    """Creates a class property
+
+    Usage:
+
+        class MyClass:
+            @classproperty
+            def bla(cls):
+                print 'hi'
+                return 42
+
+        MyClass.bla
+        # 'hi'
+        # 42
+
+        MyClass.bla
+        # 'hi'
+        # 42
+    """
+
+    def __init__(self, wrapped):
+        self.wrapped = wrapped
+        try:
+            self.__doc__ = wrapped.__doc__
+        except:  # pragma: no cover
+            pass
+
+    def __get__(self, inst, objtype=None):
+        val = self.wrapped(objtype)
+        return val
+
+
+def chunks(lst, size):
+    """Yield successive n-sized chunks from lst."""
+    for i in xrange(0, len(lst), size):
+        yield lst[i:i + size]
+
+
+def _to_json_default(obj):
+    """Helper to convert non default objects to json.
+
+    Usage:
+        simplejson.dumps(data, default=_to_json_default)
+    """
+    # Datetime
+    if isinstance(obj, datetime.datetime):
+        return obj.isoformat()
+
+    # UUID
+    if isinstance(obj, uuid.UUID):
+        return str(obj)
+
+    # numpy
+    if hasattr(obj, 'item'):
+        return obj.item()
+
+    # # Enum
+    # if hasattr(obj, 'value'):
+    #     return obj.value
+
+    try:
+        return obj.id
+    except Exception:
+        raise TypeError('{obj} is not JSON serializable'.format(obj=repr(obj)))
+
+
+datetime_regex = re.compile('(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2})')
+uuid_regex = re.compile('^[0-9a-f]{8}(-[0-9a-f]{4}){3}-[0-9a-f]{12}$')
+
+
+def _from_json_object_hook(obj):
+    """Converts a json string, where datetime and UUID objects were converted
+    into strings using the '_to_json_default', into a python object.
+
+    Usage:
+        simplejson.loads(data, object_hook=_from_json_object_hook)
+    """
+
+    for key, value in obj.items():
+        # Check for datetime objects
+        if isinstance(value, str):
+            dt_result = datetime_regex.match(value)
+            if dt_result:
+                year, month, day, hour, minute, second = map(
+                    lambda x: int(x), dt_result.groups())
+                obj[key] = datetime.datetime(
+                    year, month, day, hour, minute, second)
+            else:
+                dt_result = uuid_regex.match(value)
+                if dt_result:
+                    obj[key] = uuid.UUID(value)
+    return obj
+
+
+def to_json(data):
+    """Convert non default objects to json."""
+    return json.dumps(data, default=_to_json_default)
+
+
+def from_json(json_str):
+    return simplejson.loads(json_str, object_hook=_from_json_object_hook)
+
+
+def validate_json(data, schema):
+    if isinstance(data, str):
+        data = from_json(data)
+    if isinstance(schema, str):
+        schema = from_json(schema)
+
+    try:
+        jsonschema.validate(data, schema)
+    except jsonschema.ValidationError as e:
+        raise InvalidJsonException(e.message)
+
+
+def is_valid_json(data, schema):
+    ret = True
+    try:
+        validate_json(data, schema)
+    except InvalidJsonException:
+        ret = False
+    return ret
+
+
+def generate_key(string):
+    return hashlib.sha256(string.encode('utf-8')).hexdigest()
+
+
+def to_slug(strvalue):
+    """Wrapper to convert any string on slug"""
+    return slugify(strvalue)
+
+
+def getattr_qualified(obj, name, *args):
+    if len(args) > 1:
+        raise TypeError(
+            'getattr_qualified expected at most 3 arguments, got {}'.format(len(args) + 2))
+    has_default = False
+    if args:
+        default = args[0]
+        has_default = True
+    # get attribute names
+    for attr in name.split("."):
+        key = None
+        # check if is a dict
+        if '[' in attr:
+            # get the attr name and key
+            attr, key = attr[:-1].split('[')
+            # remove quotes
+            if key[0] in ('"', "'") and key[0] == key[-1]:
+                key = key[1:-1]
+        try:
+            obj = getattr(obj, attr)
+        except AttributeError:
+            if not has_default:
+                raise
+            return default
+        if key:
+            try:
+                obj = obj[key]
+            except KeyError:
+                if not has_default:
+                    raise
+                return default
+    return obj
+
+
+def check_path(path, create=False):
+    """
+    Check for a path on filesystem
+
+    :param path: str - path name
+    :param create: bool - create if do not exist
+    :return: bool - path exists
+    """
+    if not os.path.exists(path):
+        if create:
+            os.makedirs(path)
+            return os.path.exists(path)
+        else:
+            return False
+
+    return True
+
+
+def get_datetime():
+    """
+    Get the current date and time in UTC
+
+    :return: string
+    """
+    return datetime.datetime.fromtimestamp(time.time()).strftime('%Y-%m-%d %H:%M:%S') + ' UTC'
+
+
+def deprecated(func):
+    def new_func(*args, **kwargs):
+        warnings.simplefilter('always', DeprecationWarning)  # turn off filter
+        warnings.warn("Call to deprecated function {}.".format(
+            func.__name__), category=DeprecationWarning, stacklevel=2)
+        warnings.simplefilter('default', DeprecationWarning)  # reset filter
+        return func(*args, **kwargs)
+
+    new_func.__name__ = func.__name__
+    new_func.__doc__ = func.__doc__
+    new_func.__dict__.update(func.__dict__)
+    return new_func
+
+
+def url_encode(url):
+    """
+    Convert special characters using %xx escape.
+
+    :param url: str
+    :return: str - encoded url
+    """
+    if isinstance(url, text_type):
+        url = url.encode('utf8')
+    return quote(url, ':/%?&=')
diff --git a/marvin_python_toolbox/config.py b/marvin_python_toolbox/config.py
new file mode 100644
index 0000000..99654c8
--- /dev/null
+++ b/marvin_python_toolbox/config.py
@@ -0,0 +1,86 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+import os.path
+import copy
+
+import configparser
+
+from ._compatibility import six
+from ._logging import get_logger
+
+
+__all__ = ['find_inidir', 'parse_ini']
+
+
+logger = get_logger('config')
+
+
+def find_inidir(inifilename='marvin.ini'):
+    inidir = None
+    currentdir = os.getcwd()
+
+    while True:
+        logger.debug('Looking for marvinini in {}'.format(currentdir))
+        if os.path.exists(os.path.join(currentdir, inifilename)):
+            inidir = currentdir
+            logger.debug('marvinini found {}'.format(inidir))
+            break
+
+        parentdir = os.path.abspath(os.path.join(currentdir, os.pardir))
+        if currentdir == parentdir:
+            # currentdir is '/'
+            logger.debug('marvinini not found')
+            break
+
+        currentdir = parentdir
+
+    return inidir
+
+
+def parse_ini(inipath, defaults=None):
+    if defaults is None:
+        defaults = {}
+
+    logger.debug("Parsing marvinini '{}' with defaults '{}'".format(inipath, defaults))
+
+    config_raw = configparser.ConfigParser()
+    config_raw.read(inipath)
+
+    config = copy.deepcopy(defaults)
+
+    for section in config_raw.sections():
+        # Firt pass
+        for key, value in config_raw.items(section):
+            key = '_'.join((section, key)).lower()
+            logger.debug('Processing {}: {}'.format(key, value))
+            processed_value = value.format(**config)
+            config[key] = processed_value
+
+    # Second pass
+    for key, value in config.items():
+        processed_value = value.format(**config)
+        if ',' in processed_value:
+            processed_value = processed_value.split(',')
+        config[key] = processed_value
+
+    logger.debug('marvinini loaded: {}'.format(config))
+
+    return config
diff --git a/marvin_python_toolbox/decorators.py b/marvin_python_toolbox/decorators.py
new file mode 100644
index 0000000..976b4a2
--- /dev/null
+++ b/marvin_python_toolbox/decorators.py
@@ -0,0 +1,29 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from functools import update_wrapper
+from click.decorators import pass_context
+from click.decorators import command as click_command
+
+
+def command(*args, **kwargs):
+    def wrapper(func):
+        @pass_context
+        def new_func(ctx, *args, **kwargs):
+            return ctx.invoke(func, ctx, *args, **kwargs)
+        return click_command(*args, **kwargs)(update_wrapper(new_func, func))
+    return wrapper
diff --git a/marvin_python_toolbox/engine_base/__init__.py b/marvin_python_toolbox/engine_base/__init__.py
new file mode 100644
index 0000000..2fc12f0
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/__init__.py
@@ -0,0 +1,23 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .engine_base_action import EngineBaseAction, EngineBaseOnlineAction, EngineBaseBatchAction
+from .engine_base_prediction import EngineBasePrediction
+from .engine_base_data_handler import EngineBaseDataHandler
+from .engine_base_training import EngineBaseTraining
+from .stubs import actions_pb2, actions_pb2_grpc
+from .serializers import KerasSerializer
\ No newline at end of file
diff --git a/marvin_python_toolbox/engine_base/engine_base_action.py b/marvin_python_toolbox/engine_base/engine_base_action.py
new file mode 100644
index 0000000..daead75
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/engine_base_action.py
@@ -0,0 +1,229 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import unicode_literals
+import os
+
+from abc import ABCMeta, abstractmethod
+import joblib as serializer
+from concurrent import futures
+import grpc
+import json
+
+from .stubs.actions_pb2 import BatchActionResponse, OnlineActionResponse, ReloadResponse, HealthCheckResponse
+from .stubs import actions_pb2_grpc
+
+from .._compatibility import six
+from .._logging import get_logger
+
+
+__all__ = ['EngineBaseAction', 'EngineBaseBatchAction', 'EngineBaseOnlineAction']
+logger = get_logger('engine_base_action')
+
+
+class EngineBaseAction():
+    __metaclass__ = ABCMeta
+
+    _params = {}
+    _persistence_mode = None
+    _default_root_path = None
+    _previous_step = None
+    _is_remote_calling = False
+    _local_saved_objects = {}
+
+    def __init__(self, **kwargs):
+        self.action_name = self.__class__.__name__
+        self._params = self._get_arg(kwargs=kwargs, arg='params')
+        self._persistence_mode = self._get_arg(kwargs=kwargs, arg='persistence_mode', default_value='memory')
+        self._default_root_path = self._get_arg(kwargs=kwargs, arg='default_root_path', default_value=os.path.join(os.environ['MARVIN_DATA_PATH'], '.artifacts'))
+        self._is_remote_calling = self._get_arg(kwargs=kwargs, arg='is_remote_calling', default_value=False)
+        logger.debug("Starting {} engine action with {} persistence mode...".format(self.__class__.__name__, self._persistence_mode))
+
+    def _get_arg(self, kwargs, arg, default_value=None):
+        return kwargs.get(arg, default_value)
+
+    def _get_object_file_path(self, object_reference):
+        engine_name = self.__module__.split('.')[0].replace('marvin_', '').replace('_engine', '')
+        directory = os.path.join(self._default_root_path, engine_name)
+
+        if not os.path.exists(directory):
+            os.makedirs(directory)
+
+        return os.path.join(directory, "{}".format(object_reference.replace('_', '')))
+
+    def _serializer_dump(self, obj, object_file_path):
+        if object_file_path.split(os.sep)[-1] == 'metrics':
+            with open(object_file_path, 'w') as f:
+                json.dump(obj, f, sort_keys=True, indent=4, separators=(',', ': '))
+        else:
+            serializer.dump(obj, object_file_path, protocol=2, compress=3)
+
+    def _serializer_load(self, object_file_path):
+        if object_file_path.split(os.sep)[-1] == 'metrics':
+            with open(object_file_path, 'r') as f:
+                return json.load(f)
+        else:
+            return serializer.load(object_file_path)
+
+    def _save_obj(self, object_reference, obj):
+        if not self._is_remote_calling:
+            if getattr(self, object_reference, None) is not None:
+                logger.error("Object {} must be assign only once in each action".format(object_reference))
+                raise Exception('MultipleAssignException', object_reference)
+
+        setattr(self, object_reference, obj)
+
+        if self._persistence_mode == 'local':
+            object_file_path = self._get_object_file_path(object_reference)
+            logger.info("Saving object to {}".format(object_file_path))
+            self._serializer_dump(obj, object_file_path)
+            logger.info("Object {} saved!".format(object_reference))
+            self._local_saved_objects[object_reference] = object_file_path
+
+    def _load_obj(self, object_reference, force=False):
+        if (getattr(self, object_reference, None) is None and self._persistence_mode == 'local') or force:
+            object_file_path = self._get_object_file_path(object_reference)
+            logger.info("Loading object from {}".format(object_file_path))
+            setattr(self, object_reference, self._serializer_load(object_file_path))
+            logger.info("Object {} loaded!".format(object_reference))
+
+        return getattr(self, object_reference)
+
+    def _release_local_saved_objects(self):
+        for object_reference in self._local_saved_objects.keys():
+            logger.info("Removing object {} from memory..".format(object_reference))
+            setattr(self, object_reference, None)
+
+        self._local_saved_objects = {}
+
+    @classmethod
+    def retrieve_obj(self, object_file_path):
+        logger.info("Retrieve object from {}".format(object_file_path))
+        return serializer.load(object_file_path)
+
+    def _remote_reload(self, request, context):
+        protocol = request.protocol
+        artifacts = request.artifacts
+
+        logger.info("Received message from client with protocol [{}] to reload the [{}] artifacts...".format(protocol, artifacts))
+
+        message = "Reloaded"
+
+        if artifacts:
+            for artifact in artifacts.split(","):
+                self._load_obj(object_reference=artifact, force=True)
+
+        else:
+            message = "Nothing to reload"
+
+        response_message = ReloadResponse(message=message)
+
+        logger.info("Return final results to the client!")
+        return response_message
+
+    def _health_check(self, request, context):
+        logger.info("Received message from client with protocol health check [{}] artifacts...".format(request.artifacts))
+        try:
+            if request.artifacts:
+                for artifact in request.artifacts.split(","):
+                    if not getattr(self, artifact):
+                        return HealthCheckResponse(status=HealthCheckResponse.NOK)
+            return HealthCheckResponse(status=HealthCheckResponse.OK)
+
+        except Exception as e:
+            logger.error(e)
+            return HealthCheckResponse(status=HealthCheckResponse.NOK)
+
+
+class EngineBaseBatchAction(EngineBaseAction):
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def execute(self, params, **kwargs):
+        pass
+
+    def _pipeline_execute(self, params):
+        if self._previous_step:
+            self._previous_step._pipeline_execute(params)
+
+        logger.info("Start of the {} execute method!".format(self.action_name))
+        self.execute(params)
+        logger.info("Finish of the {} execute method!".format(self.action_name))
+
+    def _remote_execute(self, request, context):
+        logger.info("Received message from client and sending to engine action...")
+        logger.debug("Received Params: {}".format(request.params))
+
+        params = json.loads(request.params) if request.params else self._params
+
+        self._pipeline_execute(params=params)
+
+        self._release_local_saved_objects()
+
+        logger.info("Handling returned message from engine action...")
+        response_message = BatchActionResponse(message="Done")
+
+        logger.info("Return final results to the client!")
+        return response_message
+
+    def _prepare_remote_server(self, port, workers, rpc_workers):
+        server = grpc.server(thread_pool=futures.ThreadPoolExecutor(max_workers=workers), maximum_concurrent_rpcs=rpc_workers)
+        actions_pb2_grpc.add_BatchActionHandlerServicer_to_server(self, server)
+        server.add_insecure_port('[::]:{}'.format(port))
+        return server
+
+
+class EngineBaseOnlineAction(EngineBaseAction):
+    __metaclass__ = ABCMeta
+
+    @abstractmethod
+    def execute(self, input_message, params, **kwargs):
+        pass
+
+    def _pipeline_execute(self, input_message, params):
+        if self._previous_step:
+            input_message = self._previous_step._pipeline_execute(input_message, params)
+
+        logger.info("Start of the {} execute method!".format(self.action_name))
+        return self.execute(input_message, params)
+        logger.info("Finish of the {} execute method!".format(self.action_name))
+
+    def _remote_execute(self, request, context):
+        logger.info("Received message from client and sending to engine action...")
+        logger.debug("Received Params: {}".format(request.params))
+        logger.debug("Received Message: {}".format(request.message))
+
+        input_message = json.loads(request.message) if request.message else None
+        params = json.loads(request.params) if request.params else self._params
+
+        _message = self._pipeline_execute(input_message=input_message, params=params)
+
+        logger.info("Handling returned message from engine action...")
+
+        if type(_message) != str:
+            _message = json.dumps(_message)
+
+        response_message = OnlineActionResponse(message=_message)
+
+        logger.info("Return final results to the client!")
+        return response_message
+
+    def _prepare_remote_server(self, port, workers, rpc_workers):
+        server = grpc.server(thread_pool=futures.ThreadPoolExecutor(max_workers=workers), maximum_concurrent_rpcs=rpc_workers)
+        actions_pb2_grpc.add_OnlineActionHandlerServicer_to_server(self, server)
+        server.add_insecure_port('[::]:{}'.format(port))
+        return server
diff --git a/marvin_python_toolbox/engine_base/engine_base_data_handler.py b/marvin_python_toolbox/engine_base/engine_base_data_handler.py
new file mode 100644
index 0000000..df0a011
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/engine_base_data_handler.py
@@ -0,0 +1,54 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABCMeta
+from .._compatibility import six
+from .._logging import get_logger
+
+from .engine_base_action import EngineBaseBatchAction
+
+
+__all__ = ['EngineBaseDataHandler']
+logger = get_logger('engine_base_data_handler')
+
+
+class EngineBaseDataHandler(EngineBaseBatchAction):
+    __metaclass__ = ABCMeta
+
+    _initial_dataset = None
+    _dataset = None
+
+    def __init__(self, **kwargs):
+        self._initial_dataset = self._get_arg(kwargs=kwargs, arg='initial_dataset')
+        self._dataset = self._get_arg(kwargs=kwargs, arg='dataset')
+        super(EngineBaseDataHandler, self).__init__(**kwargs)
+
+    @property
+    def marvin_initial_dataset(self):
+        return self._load_obj(object_reference='_initial_dataset')
+
+    @marvin_initial_dataset.setter
+    def marvin_initial_dataset(self, initial_dataset):
+        self._save_obj(object_reference='_initial_dataset', obj=initial_dataset)
+
+    @property
+    def marvin_dataset(self):
+        return self._load_obj(object_reference='_dataset')
+
+    @marvin_dataset.setter
+    def marvin_dataset(self, dataset):
+        self._save_obj(object_reference='_dataset', obj=dataset)
diff --git a/marvin_python_toolbox/engine_base/engine_base_prediction.py b/marvin_python_toolbox/engine_base/engine_base_prediction.py
new file mode 100644
index 0000000..1761c3b
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/engine_base_prediction.py
@@ -0,0 +1,56 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABCMeta
+from .._compatibility import six
+from .._logging import get_logger
+
+from .engine_base_action import EngineBaseOnlineAction
+
+
+__all__ = ['EngineBasePrediction']
+logger = get_logger('engine_base_prediction')
+
+
+class EngineBasePrediction(EngineBaseOnlineAction):
+    __metaclass__ = ABCMeta
+
+    _model = None
+    _metrics = None
+
+    def __init__(self, **kwargs):
+        self._model = self._get_arg(kwargs=kwargs, arg='model')
+        self._metrics = self._get_arg(kwargs=kwargs, arg='metrics')
+
+        super(EngineBasePrediction, self).__init__(**kwargs)
+
+    @property
+    def marvin_model(self):
+        return self._load_obj(object_reference='_model')
+
+    @marvin_model.setter
+    def marvin_model(self, model):
+        self._save_obj(object_reference='_model', obj=model)
+
+    @property
+    def marvin_metrics(self):
+        return self._load_obj(object_reference='_metrics')
+
+    @marvin_metrics.setter
+    def marvin_metrics(self, metrics):
+        self._save_obj(object_reference='_metrics', obj=metrics)
+
diff --git a/marvin_python_toolbox/engine_base/engine_base_training.py b/marvin_python_toolbox/engine_base/engine_base_training.py
new file mode 100644
index 0000000..7df7d9f
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/engine_base_training.py
@@ -0,0 +1,66 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from abc import ABCMeta
+from .._compatibility import six
+from .._logging import get_logger
+
+from .engine_base_action import EngineBaseBatchAction
+
+
+__all__ = ['EngineBaseTraining']
+logger = get_logger('engine_base_training')
+
+
+class EngineBaseTraining(EngineBaseBatchAction):
+    __metaclass__ = ABCMeta
+
+    _dataset = None
+    _model = None
+    _metrics = None
+
+    def __init__(self, **kwargs):
+        self._dataset = self._get_arg(kwargs=kwargs, arg='dataset')
+        self._model = self._get_arg(kwargs=kwargs, arg='model')
+        self._metrics = self._get_arg(kwargs=kwargs, arg='metrics')
+
+        super(EngineBaseTraining, self).__init__(**kwargs)
+
+    @property
+    def marvin_dataset(self):
+        return self._load_obj(object_reference='_dataset')
+
+    @marvin_dataset.setter
+    def marvin_dataset(self, dataset):
+        self._save_obj(object_reference='_dataset', obj=dataset)
+
+    @property
+    def marvin_model(self):
+        return self._load_obj(object_reference='_model')
+
+    @marvin_model.setter
+    def marvin_model(self, model):
+        self._save_obj(object_reference='_model', obj=model)
+
+    @property
+    def marvin_metrics(self):
+        return self._load_obj(object_reference='_metrics')
+
+    @marvin_metrics.setter
+    def marvin_metrics(self, metrics):
+        self._save_obj(object_reference='_metrics', obj=metrics)
+
diff --git a/marvin_python_toolbox/engine_base/protos/actions.proto b/marvin_python_toolbox/engine_base/protos/actions.proto
new file mode 100644
index 0000000..4ab7f60
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/protos/actions.proto
@@ -0,0 +1,66 @@
+/** Copyright [2017] [B2W Digital]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+    http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+**/
+
+syntax = "proto3";
+
+service OnlineActionHandler {
+	rpc _remote_execute (OnlineActionRequest) returns (OnlineActionResponse) {}
+	rpc _remote_reload (ReloadRequest) returns (ReloadResponse) {}
+	rpc _health_check (HealthCheckRequest) returns (HealthCheckResponse) {}
+}
+
+service BatchActionHandler {
+	rpc _remote_execute (BatchActionRequest) returns (BatchActionResponse) {}
+	rpc _remote_reload (ReloadRequest) returns (ReloadResponse) {}
+	rpc _health_check (HealthCheckRequest) returns (HealthCheckResponse) {}
+}
+
+message OnlineActionRequest {
+	string message = 1;
+	string params = 2;
+}
+
+message OnlineActionResponse {
+	string message = 1;
+}
+
+message BatchActionRequest {
+	string params = 1;
+}
+
+message BatchActionResponse {
+	string message = 1;
+}
+
+message ReloadRequest {
+	string protocol = 1;
+	string artifacts = 2;
+}
+
+message ReloadResponse {
+	string message = 1;
+}
+
+message HealthCheckRequest {
+	string artifacts = 2;
+}
+
+message HealthCheckResponse {
+	enum Status {
+		OK = 0;
+		NOK = 1;
+	}
+	Status status = 1;
+}
\ No newline at end of file
diff --git a/marvin_python_toolbox/engine_base/serializers/__init__.py b/marvin_python_toolbox/engine_base/serializers/__init__.py
new file mode 100644
index 0000000..73f7796
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/serializers/__init__.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .keras_serializer import KerasSerializer
diff --git a/marvin_python_toolbox/engine_base/serializers/keras_serializer.py b/marvin_python_toolbox/engine_base/serializers/keras_serializer.py
new file mode 100644
index 0000000..1407960
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/serializers/keras_serializer.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+from ..._logging import get_logger
+
+logger = get_logger('engine_base_data_handler')
+__all__ = ['KerasSerializer']
+
+
+class KerasSerializer(object):
+    def _serializer_load(self, object_file_path):
+        if object_file_path.split(os.sep)[-1] == 'model':
+            from keras.models import load_model
+
+            logger.debug("Loading model {} using keras serializer.".format(object_file_path))
+            return load_model(object_file_path)
+        else:
+            return super(KerasSerializer, self)._serializer_load(object_file_path)
+
+    def _serializer_dump(self, obj, object_file_path):
+        if object_file_path.split(os.sep)[-1] == 'model':
+            logger.debug("Saving model {} using keras serializer.".format(object_file_path))
+            obj.save(object_file_path)
+        else:
+            super(KerasSerializer, self)._serializer_dump(obj, object_file_path)
diff --git a/marvin_python_toolbox/engine_base/stubs/__init__.py b/marvin_python_toolbox/engine_base/stubs/__init__.py
new file mode 100644
index 0000000..613c8bc
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/stubs/__init__.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .actions_pb2_grpc import *
+from .actions_pb2 import *
\ No newline at end of file
diff --git a/marvin_python_toolbox/engine_base/stubs/actions_pb2.py b/marvin_python_toolbox/engine_base/stubs/actions_pb2.py
new file mode 100644
index 0000000..a05f55d
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/stubs/actions_pb2.py
@@ -0,0 +1,821 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: actions.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='actions.proto',
+  package='',
+  syntax='proto3',
+  serialized_pb=_b('\n\ractions.proto\"6\n\x13OnlineActionRequest\x12\x0f\n\x07message\x18\x01 \x01(\t\x12\x0e\n\x06params\x18\x02 \x01(\t\"\'\n\x14OnlineActionResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"$\n\x12\x42\x61tchActionRequest\x12\x0e\n\x06params\x18\x01 \x01(\t\"&\n\x13\x42\x61tchActionResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"4\n\rReloadRequest\x12\x10\n\x08protocol\x18\x01 \x01(\t\x12\x11\n\tartifacts\x18\x02 \x01(\t\"!\n\x0eReloadResponse\x12\x0f\n\x07message\x18\x01 \x01(\t\"\'\n\x12HealthCheckRequest\x12\x11\n\tartifacts\x18\x02 \x01(\t\"]\n\x13HealthCheckResponse\x12+\n\x06status\x18\x01 \x01(\x0e\x32\x1b.HealthCheckResponse.Status\"\x19\n\x06Status\x12\x06\n\x02OK\x10\x00\x12\x07\n\x03NOK\x10\x01\x32\xca\x01\n\x13OnlineActionHandler\x12@\n\x0f_remote_execute\x12\x14.OnlineActionRequest\x1a\x15.OnlineActionResponse\"\x00\x12\x33\n\x0e_remote_reload\x12\x0e.ReloadRequest\x1a\x0f.ReloadResponse\"\x00\x12<\n\r_health_check\x12\x13.HealthCheckRequest\x1a\x14.HealthCheckResponse\"\x00\x32\xc7\x01\n\x12\x42\x61tchActionHandler\x12>\n\x0f_remote_execute\x12\x13.BatchActionRequest\x1a\x14.BatchActionResponse\"\x00\x12\x33\n\x0e_remote_reload\x12\x0e.ReloadRequest\x1a\x0f.ReloadResponse\"\x00\x12<\n\r_health_check\x12\x13.HealthCheckRequest\x1a\x14.HealthCheckResponse\"\x00\x62\x06proto3')
+)
+
+
+
+_HEALTHCHECKRESPONSE_STATUS = _descriptor.EnumDescriptor(
+  name='Status',
+  full_name='HealthCheckResponse.Status',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='OK', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NOK', index=1, number=1,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=390,
+  serialized_end=415,
+)
+_sym_db.RegisterEnumDescriptor(_HEALTHCHECKRESPONSE_STATUS)
+
+
+_ONLINEACTIONREQUEST = _descriptor.Descriptor(
+  name='OnlineActionRequest',
+  full_name='OnlineActionRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='message', full_name='OnlineActionRequest.message', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='params', full_name='OnlineActionRequest.params', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=17,
+  serialized_end=71,
+)
+
+
+_ONLINEACTIONRESPONSE = _descriptor.Descriptor(
+  name='OnlineActionResponse',
+  full_name='OnlineActionResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='message', full_name='OnlineActionResponse.message', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=73,
+  serialized_end=112,
+)
+
+
+_BATCHACTIONREQUEST = _descriptor.Descriptor(
+  name='BatchActionRequest',
+  full_name='BatchActionRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='params', full_name='BatchActionRequest.params', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=114,
+  serialized_end=150,
+)
+
+
+_BATCHACTIONRESPONSE = _descriptor.Descriptor(
+  name='BatchActionResponse',
+  full_name='BatchActionResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='message', full_name='BatchActionResponse.message', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=152,
+  serialized_end=190,
+)
+
+
+_RELOADREQUEST = _descriptor.Descriptor(
+  name='ReloadRequest',
+  full_name='ReloadRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='protocol', full_name='ReloadRequest.protocol', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='artifacts', full_name='ReloadRequest.artifacts', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=192,
+  serialized_end=244,
+)
+
+
+_RELOADRESPONSE = _descriptor.Descriptor(
+  name='ReloadResponse',
+  full_name='ReloadResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='message', full_name='ReloadResponse.message', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=246,
+  serialized_end=279,
+)
+
+
+_HEALTHCHECKREQUEST = _descriptor.Descriptor(
+  name='HealthCheckRequest',
+  full_name='HealthCheckRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='artifacts', full_name='HealthCheckRequest.artifacts', index=0,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=281,
+  serialized_end=320,
+)
+
+
+_HEALTHCHECKRESPONSE = _descriptor.Descriptor(
+  name='HealthCheckResponse',
+  full_name='HealthCheckResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='status', full_name='HealthCheckResponse.status', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+    _HEALTHCHECKRESPONSE_STATUS,
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=322,
+  serialized_end=415,
+)
+
+_HEALTHCHECKRESPONSE.fields_by_name['status'].enum_type = _HEALTHCHECKRESPONSE_STATUS
+_HEALTHCHECKRESPONSE_STATUS.containing_type = _HEALTHCHECKRESPONSE
+DESCRIPTOR.message_types_by_name['OnlineActionRequest'] = _ONLINEACTIONREQUEST
+DESCRIPTOR.message_types_by_name['OnlineActionResponse'] = _ONLINEACTIONRESPONSE
+DESCRIPTOR.message_types_by_name['BatchActionRequest'] = _BATCHACTIONREQUEST
+DESCRIPTOR.message_types_by_name['BatchActionResponse'] = _BATCHACTIONRESPONSE
+DESCRIPTOR.message_types_by_name['ReloadRequest'] = _RELOADREQUEST
+DESCRIPTOR.message_types_by_name['ReloadResponse'] = _RELOADRESPONSE
+DESCRIPTOR.message_types_by_name['HealthCheckRequest'] = _HEALTHCHECKREQUEST
+DESCRIPTOR.message_types_by_name['HealthCheckResponse'] = _HEALTHCHECKRESPONSE
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+OnlineActionRequest = _reflection.GeneratedProtocolMessageType('OnlineActionRequest', (_message.Message,), dict(
+  DESCRIPTOR = _ONLINEACTIONREQUEST,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:OnlineActionRequest)
+  ))
+_sym_db.RegisterMessage(OnlineActionRequest)
+
+OnlineActionResponse = _reflection.GeneratedProtocolMessageType('OnlineActionResponse', (_message.Message,), dict(
+  DESCRIPTOR = _ONLINEACTIONRESPONSE,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:OnlineActionResponse)
+  ))
+_sym_db.RegisterMessage(OnlineActionResponse)
+
+BatchActionRequest = _reflection.GeneratedProtocolMessageType('BatchActionRequest', (_message.Message,), dict(
+  DESCRIPTOR = _BATCHACTIONREQUEST,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:BatchActionRequest)
+  ))
+_sym_db.RegisterMessage(BatchActionRequest)
+
+BatchActionResponse = _reflection.GeneratedProtocolMessageType('BatchActionResponse', (_message.Message,), dict(
+  DESCRIPTOR = _BATCHACTIONRESPONSE,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:BatchActionResponse)
+  ))
+_sym_db.RegisterMessage(BatchActionResponse)
+
+ReloadRequest = _reflection.GeneratedProtocolMessageType('ReloadRequest', (_message.Message,), dict(
+  DESCRIPTOR = _RELOADREQUEST,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:ReloadRequest)
+  ))
+_sym_db.RegisterMessage(ReloadRequest)
+
+ReloadResponse = _reflection.GeneratedProtocolMessageType('ReloadResponse', (_message.Message,), dict(
+  DESCRIPTOR = _RELOADRESPONSE,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:ReloadResponse)
+  ))
+_sym_db.RegisterMessage(ReloadResponse)
+
+HealthCheckRequest = _reflection.GeneratedProtocolMessageType('HealthCheckRequest', (_message.Message,), dict(
+  DESCRIPTOR = _HEALTHCHECKREQUEST,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:HealthCheckRequest)
+  ))
+_sym_db.RegisterMessage(HealthCheckRequest)
+
+HealthCheckResponse = _reflection.GeneratedProtocolMessageType('HealthCheckResponse', (_message.Message,), dict(
+  DESCRIPTOR = _HEALTHCHECKRESPONSE,
+  __module__ = 'actions_pb2'
+  # @@protoc_insertion_point(class_scope:HealthCheckResponse)
+  ))
+_sym_db.RegisterMessage(HealthCheckResponse)
+
+
+
+_ONLINEACTIONHANDLER = _descriptor.ServiceDescriptor(
+  name='OnlineActionHandler',
+  full_name='OnlineActionHandler',
+  file=DESCRIPTOR,
+  index=0,
+  options=None,
+  serialized_start=418,
+  serialized_end=620,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='_remote_execute',
+    full_name='OnlineActionHandler._remote_execute',
+    index=0,
+    containing_service=None,
+    input_type=_ONLINEACTIONREQUEST,
+    output_type=_ONLINEACTIONRESPONSE,
+    options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='_remote_reload',
+    full_name='OnlineActionHandler._remote_reload',
+    index=1,
+    containing_service=None,
+    input_type=_RELOADREQUEST,
+    output_type=_RELOADRESPONSE,
+    options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='_health_check',
+    full_name='OnlineActionHandler._health_check',
+    index=2,
+    containing_service=None,
+    input_type=_HEALTHCHECKREQUEST,
+    output_type=_HEALTHCHECKRESPONSE,
+    options=None,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_ONLINEACTIONHANDLER)
+
+DESCRIPTOR.services_by_name['OnlineActionHandler'] = _ONLINEACTIONHANDLER
+
+
+_BATCHACTIONHANDLER = _descriptor.ServiceDescriptor(
+  name='BatchActionHandler',
+  full_name='BatchActionHandler',
+  file=DESCRIPTOR,
+  index=1,
+  options=None,
+  serialized_start=623,
+  serialized_end=822,
+  methods=[
+  _descriptor.MethodDescriptor(
+    name='_remote_execute',
+    full_name='BatchActionHandler._remote_execute',
+    index=0,
+    containing_service=None,
+    input_type=_BATCHACTIONREQUEST,
+    output_type=_BATCHACTIONRESPONSE,
+    options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='_remote_reload',
+    full_name='BatchActionHandler._remote_reload',
+    index=1,
+    containing_service=None,
+    input_type=_RELOADREQUEST,
+    output_type=_RELOADRESPONSE,
+    options=None,
+  ),
+  _descriptor.MethodDescriptor(
+    name='_health_check',
+    full_name='BatchActionHandler._health_check',
+    index=2,
+    containing_service=None,
+    input_type=_HEALTHCHECKREQUEST,
+    output_type=_HEALTHCHECKRESPONSE,
+    options=None,
+  ),
+])
+_sym_db.RegisterServiceDescriptor(_BATCHACTIONHANDLER)
+
+DESCRIPTOR.services_by_name['BatchActionHandler'] = _BATCHACTIONHANDLER
+
+try:
+  # THESE ELEMENTS WILL BE DEPRECATED.
+  # Please use the generated *_pb2_grpc.py files instead.
+  import grpc
+  from grpc.beta import implementations as beta_implementations
+  from grpc.beta import interfaces as beta_interfaces
+  from grpc.framework.common import cardinality
+  from grpc.framework.interfaces.face import utilities as face_utilities
+
+
+  class OnlineActionHandlerStub(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def __init__(self, channel):
+      """Constructor.
+
+      Args:
+        channel: A grpc.Channel.
+      """
+      self._remote_execute = channel.unary_unary(
+          '/OnlineActionHandler/_remote_execute',
+          request_serializer=OnlineActionRequest.SerializeToString,
+          response_deserializer=OnlineActionResponse.FromString,
+          )
+      self._remote_reload = channel.unary_unary(
+          '/OnlineActionHandler/_remote_reload',
+          request_serializer=ReloadRequest.SerializeToString,
+          response_deserializer=ReloadResponse.FromString,
+          )
+      self._health_check = channel.unary_unary(
+          '/OnlineActionHandler/_health_check',
+          request_serializer=HealthCheckRequest.SerializeToString,
+          response_deserializer=HealthCheckResponse.FromString,
+          )
+
+
+  class OnlineActionHandlerServicer(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def _remote_execute(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+      context.set_details('Method not implemented!')
+      raise NotImplementedError('Method not implemented!')
+
+    def _remote_reload(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+      context.set_details('Method not implemented!')
+      raise NotImplementedError('Method not implemented!')
+
+    def _health_check(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+      context.set_details('Method not implemented!')
+      raise NotImplementedError('Method not implemented!')
+
+
+  def add_OnlineActionHandlerServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+        '_remote_execute': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_execute,
+            request_deserializer=OnlineActionRequest.FromString,
+            response_serializer=OnlineActionResponse.SerializeToString,
+        ),
+        '_remote_reload': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_reload,
+            request_deserializer=ReloadRequest.FromString,
+            response_serializer=ReloadResponse.SerializeToString,
+        ),
+        '_health_check': grpc.unary_unary_rpc_method_handler(
+            servicer._health_check,
+            request_deserializer=HealthCheckRequest.FromString,
+            response_serializer=HealthCheckResponse.SerializeToString,
+        ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+        'OnlineActionHandler', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+  class BatchActionHandlerStub(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def __init__(self, channel):
+      """Constructor.
+
+      Args:
+        channel: A grpc.Channel.
+      """
+      self._remote_execute = channel.unary_unary(
+          '/BatchActionHandler/_remote_execute',
+          request_serializer=BatchActionRequest.SerializeToString,
+          response_deserializer=BatchActionResponse.FromString,
+          )
+      self._remote_reload = channel.unary_unary(
+          '/BatchActionHandler/_remote_reload',
+          request_serializer=ReloadRequest.SerializeToString,
+          response_deserializer=ReloadResponse.FromString,
+          )
+      self._health_check = channel.unary_unary(
+          '/BatchActionHandler/_health_check',
+          request_serializer=HealthCheckRequest.SerializeToString,
+          response_deserializer=HealthCheckResponse.FromString,
+          )
+
+
+  class BatchActionHandlerServicer(object):
+    # missing associated documentation comment in .proto file
+    pass
+
+    def _remote_execute(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+      context.set_details('Method not implemented!')
+      raise NotImplementedError('Method not implemented!')
+
+    def _remote_reload(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+      context.set_details('Method not implemented!')
+      raise NotImplementedError('Method not implemented!')
+
+    def _health_check(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+      context.set_details('Method not implemented!')
+      raise NotImplementedError('Method not implemented!')
+
+
+  def add_BatchActionHandlerServicer_to_server(servicer, server):
+    rpc_method_handlers = {
+        '_remote_execute': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_execute,
+            request_deserializer=BatchActionRequest.FromString,
+            response_serializer=BatchActionResponse.SerializeToString,
+        ),
+        '_remote_reload': grpc.unary_unary_rpc_method_handler(
+            servicer._remote_reload,
+            request_deserializer=ReloadRequest.FromString,
+            response_serializer=ReloadResponse.SerializeToString,
+        ),
+        '_health_check': grpc.unary_unary_rpc_method_handler(
+            servicer._health_check,
+            request_deserializer=HealthCheckRequest.FromString,
+            response_serializer=HealthCheckResponse.SerializeToString,
+        ),
+    }
+    generic_handler = grpc.method_handlers_generic_handler(
+        'BatchActionHandler', rpc_method_handlers)
+    server.add_generic_rpc_handlers((generic_handler,))
+
+
+  class BetaOnlineActionHandlerServicer(object):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This class was generated
+    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+    # missing associated documentation comment in .proto file
+    pass
+    def _remote_execute(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+    def _remote_reload(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+    def _health_check(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+
+  class BetaOnlineActionHandlerStub(object):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This class was generated
+    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+    # missing associated documentation comment in .proto file
+    pass
+    def _remote_execute(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+      # missing associated documentation comment in .proto file
+      pass
+      raise NotImplementedError()
+    _remote_execute.future = None
+    def _remote_reload(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+      # missing associated documentation comment in .proto file
+      pass
+      raise NotImplementedError()
+    _remote_reload.future = None
+    def _health_check(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+      # missing associated documentation comment in .proto file
+      pass
+      raise NotImplementedError()
+    _health_check.future = None
+
+
+  def beta_create_OnlineActionHandler_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This function was
+    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+    request_deserializers = {
+      ('OnlineActionHandler', '_health_check'): HealthCheckRequest.FromString,
+      ('OnlineActionHandler', '_remote_execute'): OnlineActionRequest.FromString,
+      ('OnlineActionHandler', '_remote_reload'): ReloadRequest.FromString,
+    }
+    response_serializers = {
+      ('OnlineActionHandler', '_health_check'): HealthCheckResponse.SerializeToString,
+      ('OnlineActionHandler', '_remote_execute'): OnlineActionResponse.SerializeToString,
+      ('OnlineActionHandler', '_remote_reload'): ReloadResponse.SerializeToString,
+    }
+    method_implementations = {
+      ('OnlineActionHandler', '_health_check'): face_utilities.unary_unary_inline(servicer._health_check),
+      ('OnlineActionHandler', '_remote_execute'): face_utilities.unary_unary_inline(servicer._remote_execute),
+      ('OnlineActionHandler', '_remote_reload'): face_utilities.unary_unary_inline(servicer._remote_reload),
+    }
+    server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
+    return beta_implementations.server(method_implementations, options=server_options)
+
+
+  def beta_create_OnlineActionHandler_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This function was
+    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+    request_serializers = {
+      ('OnlineActionHandler', '_health_check'): HealthCheckRequest.SerializeToString,
+      ('OnlineActionHandler', '_remote_execute'): OnlineActionRequest.SerializeToString,
+      ('OnlineActionHandler', '_remote_reload'): ReloadRequest.SerializeToString,
+    }
+    response_deserializers = {
+      ('OnlineActionHandler', '_health_check'): HealthCheckResponse.FromString,
+      ('OnlineActionHandler', '_remote_execute'): OnlineActionResponse.FromString,
+      ('OnlineActionHandler', '_remote_reload'): ReloadResponse.FromString,
+    }
+    cardinalities = {
+      '_health_check': cardinality.Cardinality.UNARY_UNARY,
+      '_remote_execute': cardinality.Cardinality.UNARY_UNARY,
+      '_remote_reload': cardinality.Cardinality.UNARY_UNARY,
+    }
+    stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
+    return beta_implementations.dynamic_stub(channel, 'OnlineActionHandler', cardinalities, options=stub_options)
+
+
+  class BetaBatchActionHandlerServicer(object):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This class was generated
+    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+    # missing associated documentation comment in .proto file
+    pass
+    def _remote_execute(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+    def _remote_reload(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+    def _health_check(self, request, context):
+      # missing associated documentation comment in .proto file
+      pass
+      context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
+
+
+  class BetaBatchActionHandlerStub(object):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This class was generated
+    only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0."""
+    # missing associated documentation comment in .proto file
+    pass
+    def _remote_execute(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+      # missing associated documentation comment in .proto file
+      pass
+      raise NotImplementedError()
+    _remote_execute.future = None
+    def _remote_reload(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+      # missing associated documentation comment in .proto file
+      pass
+      raise NotImplementedError()
+    _remote_reload.future = None
+    def _health_check(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
+      # missing associated documentation comment in .proto file
+      pass
+      raise NotImplementedError()
+    _health_check.future = None
+
+
+  def beta_create_BatchActionHandler_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This function was
+    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+    request_deserializers = {
+      ('BatchActionHandler', '_health_check'): HealthCheckRequest.FromString,
+      ('BatchActionHandler', '_remote_execute'): BatchActionRequest.FromString,
+      ('BatchActionHandler', '_remote_reload'): ReloadRequest.FromString,
+    }
+    response_serializers = {
+      ('BatchActionHandler', '_health_check'): HealthCheckResponse.SerializeToString,
+      ('BatchActionHandler', '_remote_execute'): BatchActionResponse.SerializeToString,
+      ('BatchActionHandler', '_remote_reload'): ReloadResponse.SerializeToString,
+    }
+    method_implementations = {
+      ('BatchActionHandler', '_health_check'): face_utilities.unary_unary_inline(servicer._health_check),
+      ('BatchActionHandler', '_remote_execute'): face_utilities.unary_unary_inline(servicer._remote_execute),
+      ('BatchActionHandler', '_remote_reload'): face_utilities.unary_unary_inline(servicer._remote_reload),
+    }
+    server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
+    return beta_implementations.server(method_implementations, options=server_options)
+
+
+  def beta_create_BatchActionHandler_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
+    """The Beta API is deprecated for 0.15.0 and later.
+
+    It is recommended to use the GA API (classes and functions in this
+    file not marked beta) for all further purposes. This function was
+    generated only to ease transition from grpcio<0.15.0 to grpcio>=0.15.0"""
+    request_serializers = {
+      ('BatchActionHandler', '_health_check'): HealthCheckRequest.SerializeToString,
+      ('BatchActionHandler', '_remote_execute'): BatchActionRequest.SerializeToString,
+      ('BatchActionHandler', '_remote_reload'): ReloadRequest.SerializeToString,
+    }
+    response_deserializers = {
+      ('BatchActionHandler', '_health_check'): HealthCheckResponse.FromString,
+      ('BatchActionHandler', '_remote_execute'): BatchActionResponse.FromString,
+      ('BatchActionHandler', '_remote_reload'): ReloadResponse.FromString,
+    }
+    cardinalities = {
+      '_health_check': cardinality.Cardinality.UNARY_UNARY,
+      '_remote_execute': cardinality.Cardinality.UNARY_UNARY,
+      '_remote_reload': cardinality.Cardinality.UNARY_UNARY,
+    }
+    stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
+    return beta_implementations.dynamic_stub(channel, 'BatchActionHandler', cardinalities, options=stub_options)
+except ImportError:
+  pass
+# @@protoc_insertion_point(module_scope)
diff --git a/marvin_python_toolbox/engine_base/stubs/actions_pb2_grpc.py b/marvin_python_toolbox/engine_base/stubs/actions_pb2_grpc.py
new file mode 100644
index 0000000..1821456
--- /dev/null
+++ b/marvin_python_toolbox/engine_base/stubs/actions_pb2_grpc.py
@@ -0,0 +1,158 @@
+# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+import grpc
+
+# from .actions_pb2 import * /python3
+# import actions_pb2 as actions__pb2 /python2
+from ..stubs import actions_pb2 as actions__pb2
+
+
+class OnlineActionHandlerStub(object):
+  # missing associated documentation comment in .proto file
+  pass
+
+  def __init__(self, channel):
+    """Constructor.
+
+    Args:
+      channel: A grpc.Channel.
+    """
+    self._remote_execute = channel.unary_unary(
+        '/OnlineActionHandler/_remote_execute',
+        request_serializer=actions__pb2.OnlineActionRequest.SerializeToString,
+        response_deserializer=actions__pb2.OnlineActionResponse.FromString,
+        )
+    self._remote_reload = channel.unary_unary(
+        '/OnlineActionHandler/_remote_reload',
+        request_serializer=actions__pb2.ReloadRequest.SerializeToString,
+        response_deserializer=actions__pb2.ReloadResponse.FromString,
+        )
+    self._health_check = channel.unary_unary(
+        '/OnlineActionHandler/_health_check',
+        request_serializer=actions__pb2.HealthCheckRequest.SerializeToString,
+        response_deserializer=actions__pb2.HealthCheckResponse.FromString,
+        )
+
+
+class OnlineActionHandlerServicer(object):
+  # missing associated documentation comment in .proto file
+  pass
+
+  def _remote_execute(self, request, context):
+    # missing associated documentation comment in .proto file
+    pass
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def _remote_reload(self, request, context):
+    # missing associated documentation comment in .proto file
+    pass
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def _health_check(self, request, context):
+    # missing associated documentation comment in .proto file
+    pass
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+
+def add_OnlineActionHandlerServicer_to_server(servicer, server):
+  rpc_method_handlers = {
+      '_remote_execute': grpc.unary_unary_rpc_method_handler(
+          servicer._remote_execute,
+          request_deserializer=actions__pb2.OnlineActionRequest.FromString,
+          response_serializer=actions__pb2.OnlineActionResponse.SerializeToString,
+      ),
+      '_remote_reload': grpc.unary_unary_rpc_method_handler(
+          servicer._remote_reload,
+          request_deserializer=actions__pb2.ReloadRequest.FromString,
+          response_serializer=actions__pb2.ReloadResponse.SerializeToString,
+      ),
+      '_health_check': grpc.unary_unary_rpc_method_handler(
+          servicer._health_check,
+          request_deserializer=actions__pb2.HealthCheckRequest.FromString,
+          response_serializer=actions__pb2.HealthCheckResponse.SerializeToString,
+      ),
+  }
+  generic_handler = grpc.method_handlers_generic_handler(
+      'OnlineActionHandler', rpc_method_handlers)
+  server.add_generic_rpc_handlers((generic_handler,))
+
+
+class BatchActionHandlerStub(object):
+  # missing associated documentation comment in .proto file
+  pass
+
+  def __init__(self, channel):
+    """Constructor.
+
+    Args:
+      channel: A grpc.Channel.
+    """
+    self._remote_execute = channel.unary_unary(
+        '/BatchActionHandler/_remote_execute',
+        request_serializer=actions__pb2.BatchActionRequest.SerializeToString,
+        response_deserializer=actions__pb2.BatchActionResponse.FromString,
+        )
+    self._remote_reload = channel.unary_unary(
+        '/BatchActionHandler/_remote_reload',
+        request_serializer=actions__pb2.ReloadRequest.SerializeToString,
+        response_deserializer=actions__pb2.ReloadResponse.FromString,
+        )
+    self._health_check = channel.unary_unary(
+        '/BatchActionHandler/_health_check',
+        request_serializer=actions__pb2.HealthCheckRequest.SerializeToString,
+        response_deserializer=actions__pb2.HealthCheckResponse.FromString,
+        )
+
+
+class BatchActionHandlerServicer(object):
+  # missing associated documentation comment in .proto file
+  pass
+
+  def _remote_execute(self, request, context):
+    # missing associated documentation comment in .proto file
+    pass
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def _remote_reload(self, request, context):
+    # missing associated documentation comment in .proto file
+    pass
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+  def _health_check(self, request, context):
+    # missing associated documentation comment in .proto file
+    pass
+    context.set_code(grpc.StatusCode.UNIMPLEMENTED)
+    context.set_details('Method not implemented!')
+    raise NotImplementedError('Method not implemented!')
+
+
+def add_BatchActionHandlerServicer_to_server(servicer, server):
+  rpc_method_handlers = {
+      '_remote_execute': grpc.unary_unary_rpc_method_handler(
+          servicer._remote_execute,
+          request_deserializer=actions__pb2.BatchActionRequest.FromString,
+          response_serializer=actions__pb2.BatchActionResponse.SerializeToString,
+      ),
+      '_remote_reload': grpc.unary_unary_rpc_method_handler(
+          servicer._remote_reload,
+          request_deserializer=actions__pb2.ReloadRequest.FromString,
+          response_serializer=actions__pb2.ReloadResponse.SerializeToString,
+      ),
+      '_health_check': grpc.unary_unary_rpc_method_handler(
+          servicer._health_check,
+          request_deserializer=actions__pb2.HealthCheckRequest.FromString,
+          response_serializer=actions__pb2.HealthCheckResponse.SerializeToString,
+      ),
+  }
+  generic_handler = grpc.method_handlers_generic_handler(
+      'BatchActionHandler', rpc_method_handlers)
+  server.add_generic_rpc_handlers((generic_handler,))
diff --git a/marvin_python_toolbox/extras/marvin_bash_completion b/marvin_python_toolbox/extras/marvin_bash_completion
new file mode 100644
index 0000000..4ab247b
--- /dev/null
+++ b/marvin_python_toolbox/extras/marvin_bash_completion
@@ -0,0 +1,8 @@
+_marvin_completion() {
+    COMPREPLY=( $( env COMP_WORDS="${COMP_WORDS[*]}" \
+                   COMP_CWORD=$COMP_CWORD \
+                   _MARVIN_COMPLETE=complete $1 ) )
+    return 0
+}
+
+complete -F _marvin_completion -o default marvin;
diff --git a/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py b/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py
new file mode 100644
index 0000000..b5f7de0
--- /dev/null
+++ b/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py
@@ -0,0 +1,109 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+def marvin_code_export(model, **kwargs):
+
+    import autopep8
+    import inspect
+    import re
+    from marvin_python_toolbox.common.config import Config
+
+    print("Executing the marvin export hook script...")
+
+    if model['type'] != 'notebook':
+        return
+
+    # import ipdb; ipdb.set_trace()
+
+    cells = model['content']['cells']
+
+    artifacts = {
+        'marvin_initial_dataset': re.compile(r"(\bmarvin_initial_dataset\b)"),
+        'marvin_dataset': re.compile(r"(\bmarvin_dataset\b)"),
+        'marvin_model': re.compile(r"(\bmarvin_model\b)"),
+        'marvin_metrics': re.compile(r"(\bmarvin_metrics\b)")
+    }
+
+    batch_exec_pattern = re.compile("(def\s+execute\s*\(\s*self\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)")
+    online_exec_pattern = re.compile("(def\s+execute\s*\(\s*self\s*,\s*input_message\s*,\s*params\s*,\s*\*\*kwargs\s*\)\s*:)")
+
+    CLAZZES = {
+        "acquisitor": "AcquisitorAndCleaner",
+        "tpreparator": "TrainingPreparator",
+        "trainer": "Trainer",
+        "evaluator": "MetricsEvaluator",
+        "ppreparator": "PredictionPreparator",
+        "predictor": "Predictor",
+        "feedback": "Feedback"
+    }
+
+    for cell in cells:
+        if cell['cell_type'] == 'code' and cell["metadata"].get("marvin_cell", False):
+            source = cell["source"]
+            new_source = autopep8.fix_code(source, options={'max_line_length': 160})
+
+            marvin_action = cell["metadata"]["marvin_cell"]
+            marvin_action_clazz = getattr(__import__(Config.get("package")), CLAZZES[marvin_action])
+            source_path = inspect.getsourcefile(marvin_action_clazz)
+
+            fnew_source_lines = []
+            for new_line in new_source.split("\n"):
+                fnew_line = "        " + new_line + "\n" if new_line.strip() else "\n"
+
+                if not new_line.startswith("import") and not new_line.startswith("from") and not new_line.startswith("print"):
+                    for artifact in artifacts.keys():
+                        fnew_line = re.sub(artifacts[artifact], 'self.' + artifact, fnew_line)
+
+                fnew_source_lines.append(fnew_line)
+
+            if marvin_action == "predictor":
+                fnew_source_lines.append("        return final_prediction\n")
+                exec_pattern = online_exec_pattern
+
+            elif marvin_action == "ppreparator":
+                fnew_source_lines.append("        return input_message\n")
+                exec_pattern = online_exec_pattern
+
+            elif marvin_action == "feedback":
+                fnew_source_lines.append("        return \"Thanks for the feedback!\"\n")
+                exec_pattern = online_exec_pattern
+
+            else:
+                exec_pattern = batch_exec_pattern
+
+            fnew_source = "".join(fnew_source_lines)
+
+            with open(source_path, 'r+') as fp:
+                lines = fp.readlines()
+                fp.seek(0)
+                for line in lines:
+                    if re.findall(exec_pattern, line):
+                        fp.write(line)
+                        fp.write(fnew_source)
+                        fp.truncate()
+
+                        break
+                    else:
+                        fp.write(line)
+
+            print ("File {} updated!".format(source_path))
+
+    print("Finished the marvin export hook script...")
+
+
+c.FileContentsManager.pre_save_hook = marvin_code_export
diff --git a/marvin_python_toolbox/extras/notebook_extensions/main.js b/marvin_python_toolbox/extras/notebook_extensions/main.js
new file mode 100644
index 0000000..dea722b
--- /dev/null
+++ b/marvin_python_toolbox/extras/notebook_extensions/main.js
@@ -0,0 +1,152 @@
+//    Copyright [2017] [B2W Digital]
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+define([
+    'base/js/namespace',
+    'base/js/events',
+], function(
+    Jupyter,
+    events
+) {
+	"use strict";
+
+	var log_prefix = '[MARVIN]';
+
+    function load_ipython_extension() {
+    	
+    	var add_marvin_options = function () {
+    		var select = $('<select/>').attr("class", "form-control select-xs").attr("id", "marvin_action");
+    		select.append($('<option/>'));
+    		select.append($('<option/>').attr('value', 'acquisitor').attr("id", "acquisitor").text('Acquisitor and Cleaner'));
+    		select.append($('<option/>').attr('value', 'tpreparator').attr("id", "tpreparator").text('Training Preparator'));
+    		select.append($('<option/>').attr('value', 'trainer').attr("id", "trainer").text('Trainer'));
+    		select.append($('<option/>').attr('value', 'evaluator').attr("id", "evaluator").text('Metrics Evaluator'));
+    		select.append($('<option/>').attr('value', 'ppreparator').attr("id", "ppreparator").text('Prediction Preparator'));
+    		select.append($('<option/>').attr('value', 'predictor').attr("id", "predictor").text('Predictor'));
+            select.append($('<option/>').attr('value', 'feedback').attr("id", "feedback").text('Feedback'));
+    		Jupyter.toolbar.element.append(select);
+    	};
+
+    	var handler = function () {
+    		
+    		var cell = Jupyter.notebook.get_selected_cell();
+    		if (cell.cell_type != "code"){
+    			alert("This is not a python code cell !!!");
+    			return;
+    		}
+
+    		var marvin_action = document.getElementById("marvin_action");
+			if (marvin_action.selectedIndex == 0 && !cell.metadata.marvin_cell){
+				marvin_action.focus();
+				return;
+			}
+
+			if(cell.metadata.marvin_cell){
+				marvin_action.options.namedItem(cell.metadata.marvin_cell).disabled = false;
+				update_div(cell, cell.metadata.marvin_cell);
+				delete cell.metadata.marvin_cell;
+				console.log(log_prefix, "Unmark cell %s as marvin_cell!", cell.cell_id);
+
+			}else{
+				marvin_action.selectedOptions[0].disabled = true;
+				cell.metadata.marvin_cell = marvin_action.selectedOptions[0].value;
+				update_div(cell, cell.metadata.marvin_cell);
+				console.log(log_prefix, "Mark cell %s as marvin_cell!", cell.cell_id);
+			}
+        };
+
+        var update_div = function (cell, action){
+
+        	if(cell.input[0].style.borderRight.length == 0){
+        		cell.input[0].style.borderRight = "10px solid lightgreen";
+
+        		var action_text = document.getElementById("marvin_action").options.namedItem(cell.metadata.marvin_cell).text;
+
+        		var label = $('<div/>').attr("id", "marvin_" + cell.cell_id).text(action_text);
+        		label[0].style = "font-size: smaller; color: gray;";
+
+        		cell.input[0].parentNode.insertBefore(label[0], cell.input[0].nextSibling)
+
+        	}else{
+        		cell.input[0].style.borderRight = "";
+        		document.getElementById("marvin_" + cell.cell_id).remove();
+			}
+
+        	console.log(log_prefix, "updating cell %d", cell.cell_id);
+        };
+
+        var initialize_marvin_cells = function () {
+	        console.log(log_prefix, 'updating all marvin cells');
+	        var cells = Jupyter.notebook.get_cells();
+	        
+	        for (var i = 0; i < cells.length; i++) {
+	            var cell = cells[i];
+	            if (cell.cell_type == "code" && cell.metadata.marvin_cell) {
+	            	document.getElementById("marvin_action").options.namedItem(cell.metadata.marvin_cell).disabled = true;
+	            	update_div(cell, cell.metadata.marvin_cell);
+	            }
+	        }
+
+	        var cell = Jupyter.notebook.get_selected_cell();
+	        if (cell.cell_type != "code"){
+        		$('button[data-jupyter-action="marvin_extension:export"]')[0].disabled = true;
+        	}
+
+	    };
+
+        events.on("select.Cell", function(event, params){
+        	var cell = params.cell;
+
+        	if (cell.cell_type != "code"){
+        		$('button[data-jupyter-action="marvin_extension:export"]')[0].disabled = true;
+        	}else{
+        		$('button[data-jupyter-action="marvin_extension:export"]')[0].disabled = false;
+        	}
+
+        	if (cell.cell_type == "code" && cell.metadata.marvin_cell){
+        		document.getElementById("marvin_action").value = cell.metadata.marvin_cell;
+        	}else{
+        		document.getElementById("marvin_action").value = "";
+        	}
+        });
+
+        events.on("delete.Cell", function(event, params){
+        	var cell = params.cell;
+
+        	if (cell.cell_type == "code" && cell.metadata.marvin_cell){
+        		document.getElementById("marvin_action").options.namedItem(cell.metadata.marvin_cell).disabled = false;
+        	}
+        });
+
+        var action = {
+            icon: 'fa-code',
+            help    : 'Mark and Unmark cell as marvin action. To export code save the notebook.',
+            help_index : 'mm',
+            handler : handler
+        };
+
+        var full_action_name = Jupyter.actions.register(action, 'export', 'marvin_extension');
+        
+        add_marvin_options();
+        Jupyter.toolbar.add_buttons_group([full_action_name]);
+
+        if (Jupyter.notebook !== undefined && Jupyter.notebook._fully_loaded) {
+            initialize_marvin_cells();
+        }
+    }
+
+    return {
+        load_ipython_extension: load_ipython_extension
+    };
+});
\ No newline at end of file
diff --git a/marvin_python_toolbox/loader.py b/marvin_python_toolbox/loader.py
new file mode 100644
index 0000000..7e54db3
--- /dev/null
+++ b/marvin_python_toolbox/loader.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import imp
+from inspect import getmembers
+import click
+
+
+def load_commands_from_file(path):
+    module = imp.load_source('custom_commands', path)
+    commands = [obj for name, obj in getmembers(module) if isinstance(obj, click.core.Command)]
+    return commands
diff --git a/marvin_python_toolbox/manage.py b/marvin_python_toolbox/manage.py
new file mode 100644
index 0000000..2fc817a
--- /dev/null
+++ b/marvin_python_toolbox/manage.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from .management import *
diff --git a/marvin_python_toolbox/management/__init__.py b/marvin_python_toolbox/management/__init__.py
new file mode 100644
index 0000000..a4ce0ca
--- /dev/null
+++ b/marvin_python_toolbox/management/__init__.py
@@ -0,0 +1,142 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+import click
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from .pkg import cli as cli_pkg
+from .test import cli as cli_test
+from .notebook import cli as cli_notebook
+from .hive import cli as cli_hive
+from .engine import cli as cli_engine
+
+from ..config import parse_ini
+from ..loader import load_commands_from_file
+
+
+__all__ = ['create_cli']
+
+
+logger = get_logger('management')
+
+TOOL_EXCLUDE = ['engine-server', 'engine-dryrun', 'engine-httpserver', 'engine-grpcserver', 'engine-deploy', 'engine-httpserver-remote', 'pkg-showversion']
+PROD_EXCLUDE = ['test', 'test-tdd', 'test-tox', 'test-checkpep8', 'lab', 'notebook', 'pkg-bumpversion', 'pkg-createtag', 'pkg-showchanges', 'pkg-showinfo', 'pkg-updatedeps']
+
+EXCLUDE_BY_TYPE = {
+    'python-engine': ['engine-generate', 'engine-generateenv'],
+    'tool': TOOL_EXCLUDE
+}
+
+
+VERSION_MSG = '''
+  __  __            _____ __      __ _____  _   _       
+ |  \/  |    /\    |  __ \\\ \    / /|_   _|| \ | |
+ | \  / |   /  \   | |__) |\ \  / /   | |  |  \| | 
+ | |\/| |  / /\ \  |  _  /  \ \/ /    | |  | . ` | 
+ | |  | | / ____ \ | | \ \   \  /    _| |_ | |\  | 
+ |_|  |_|/_/    \_\|_|  \_\   \/    |_____||_| \_| 
+            _    _             _                 _  _                                                              
+           | |  | |           | |               | || |                                                             
+           | |_ | |__    ___  | |_  ___    ___  | || |__    ___ __  __                                             
+           | __|| '_ \  / _ \ | __|/ _ \  / _ \ | || '_ \  / _ \\ \/ /                                             
+  _  _  _  | |_ | | | ||  __/ | |_| (_) || (_) || || |_) || (_) |>  <                                              
+ (_)(_)(_)  \__||_| |_| \___|  \__|\___/  \___/ |_||_.__/  \___//_/\_\ v%(version)s
+'''
+
+
+def create_cli(package_name, package_path, type_=None, exclude=None, config=None):
+    base_path = os.path.abspath(os.path.join(package_path, '..'))
+
+    if exclude is None:
+        exclude = EXCLUDE_BY_TYPE.get(type_, [])
+
+    mode_file = os.path.join(base_path, '.dev')
+    if type_ == 'tool' and not os.path.exists(mode_file):
+        exclude = exclude + PROD_EXCLUDE
+
+    if config is None:
+        # Find the ini directory
+        inifilename = 'marvin.ini'
+        inidir = base_path
+
+        # Load the ini file
+        inipath = os.path.join(inidir, inifilename)
+        config_defaults = {
+            'inidir': inidir,
+            'marvin_packagedir': '{inidir}/{marvin_package}',
+        }
+        if os.path.exists(inipath):
+            config = parse_ini(inipath, config_defaults)
+        else:
+            config = {}
+
+    exclude = config.get('marvin_exclude', ','.join(exclude))
+    if isinstance(exclude, str):
+        exclude = exclude.split(',')
+
+    @click.group('custom')
+    @click.option('--debug', is_flag=True, help='Enable debug mode.')
+    @click.pass_context
+    def cli(ctx, debug):
+        ctx.obj = {
+            'debug': debug,
+            'package_name': package_name,
+            'package_path': package_path,
+            'base_path': base_path,
+            'type': type_,
+            'config': config,
+        }
+
+    # Load internal commands
+    commands = {}
+    commands.update(cli_pkg.commands)
+    commands.update(cli_test.commands)
+    commands.update(cli_notebook.commands)
+    commands.update(cli_engine.commands)
+    commands.update(cli_hive.commands)
+
+    for name, command in commands.items():
+        if name not in exclude:
+            cli.add_command(command, name=name)
+
+    # Load custom commands from project been managed
+    commands_file_paths = [
+        config.get('marvin_commandsfile'),
+        os.path.join(base_path, 'marvin_commands.py'),
+        os.path.join(base_path, 'commands.py')
+    ]
+
+    for commands_file_path in commands_file_paths:
+        if commands_file_path and os.path.exists(commands_file_path):
+            commands = load_commands_from_file(commands_file_path)
+            for command in commands:
+                cli.add_command(command)
+            break
+
+    # Add version and help messages
+    from .. import __version__
+    cli = click.version_option(version=__version__,
+                               message=VERSION_MSG.replace('\n', '\n  '))(cli)
+
+    cli.help = '\b{}\n'.format(VERSION_MSG % {'version': __version__})
+
+    return cli
diff --git a/marvin_python_toolbox/management/engine.py b/marvin_python_toolbox/management/engine.py
new file mode 100644
index 0000000..f15dbeb
--- /dev/null
+++ b/marvin_python_toolbox/management/engine.py
@@ -0,0 +1,728 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import click
+import json
+import os
+import sys
+import time
+import os.path
+import re
+import shutil
+import subprocess
+import jinja2
+import six
+from unidecode import unidecode
+import multiprocessing
+from marvin_python_toolbox.common.profiling import profiling
+from marvin_python_toolbox.common.data import MarvinData
+from marvin_python_toolbox.common.config import Config
+from .._compatibility import iteritems
+from .._logging import get_logger
+
+
+logger = get_logger('management.engine')
+
+
+@click.group('engine')
+def cli():
+    pass
+
+
+@cli.command('engine-dryrun', help='Marvin Dryrun Utility - Run marvin engines in a stadalone way')
+@click.option(
+    '--action',
+    '-a',
+    default='all',
+    type=click.Choice(['all', 'acquisitor', 'tpreparator', 'trainer', 'evaluator', 'ppreparator', 'predictor', 'feedback']),
+    help='Marvin engine action name')
+@click.option('--initial-dataset', '-id', help='Initial dataset file path', type=click.Path(exists=True))
+@click.option('--dataset', '-d', help='Dataset file path', type=click.Path(exists=True))
+@click.option('--model', '-m', help='Engine model file path', type=click.Path(exists=True))
+@click.option('--metrics', '-me', help='Engine Metrics file path', type=click.Path(exists=True))
+@click.option('--params-file', '-pf', default='engine.params', help='Marvin engine params file path', type=click.Path(exists=True))
+@click.option('--messages-file', '-mf', default='engine.messages', help='Marvin engine predictor input messages file path', type=click.Path(exists=True))
+@click.option('--feedback-file', '-ff', default='feedback.messages', help='Marvin engine feedback input messages file path', type=click.Path(exists=True))
+@click.option('--response', '-r', default=True, is_flag=True, help='If enable, print responses from engine online actions (ppreparator and predictor)')
+@click.option('--profiling', default=False, is_flag=True, help='Enable execute method profiling')
+@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
+@click.pass_context
+def dryrun_cli(ctx, action, params_file, messages_file, feedback_file, initial_dataset, dataset, model, metrics, response, spark_conf, profiling):
+    dryrun(ctx, action, params_file, messages_file, feedback_file, initial_dataset, dataset, model, metrics, response, spark_conf, profiling)
+
+
+def dryrun(ctx, action, params_file, messages_file, feedback_file, initial_dataset, dataset, model, metrics, response, spark_conf, profiling):
+
+    print(chr(27) + "[2J")
+
+    # setting spark configuration directory
+    os.environ["SPARK_CONF_DIR"] = spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")
+    os.environ["YARN_CONF_DIR"] = os.environ["SPARK_CONF_DIR"]
+
+    params = read_file(params_file)
+    messages_file = read_file(messages_file)
+    feedback_file = read_file(feedback_file)
+
+    if action in ['all', 'ppreparator', 'predictor'] and not messages_file:
+        print('Please, set the input message to be used by the dry run process. Use --messages-file flag to informe in a json valid form.')
+        sys.exit("Stoping process!")
+
+    if action in ['all', 'feedback'] and not feedback_file:
+        print('Please, set the feedback input message to be used by the dry run process. Use --feedback-file flag to informe in a json valid form.')
+        sys.exit("Stoping process!")
+
+    if action == 'all':
+        pipeline = ['acquisitor', 'tpreparator', 'trainer', 'evaluator', 'ppreparator', 'predictor', 'feedback']
+    else:
+        pipeline = [action]
+
+    _dryrun = MarvinDryRun(ctx=ctx, messages=[messages_file, feedback_file], print_response=response)
+
+    initial_start_time = time.time()
+
+    for step in pipeline:
+        _dryrun.execute(clazz=CLAZZES[step], params=params, initial_dataset=initial_dataset, dataset=dataset, model=model, metrics=metrics,
+                        profiling_enabled=profiling)
+
+    print("Total Time : {:.2f}s".format(time.time() - initial_start_time))
+
+    print("\n")
+
+
+CLAZZES = {
+    "acquisitor": "AcquisitorAndCleaner",
+    "tpreparator": "TrainingPreparator",
+    "trainer": "Trainer",
+    "evaluator": "MetricsEvaluator",
+    "ppreparator": "PredictionPreparator",
+    "predictor": "Predictor",
+    "feedback": "Feedback"
+}
+
+
+class MarvinDryRun(object):
+    def __init__(self, ctx, messages, print_response):
+        self.predictor_messages = messages[0]
+        self.feedback_messages = messages[1]
+        self.pmessages = []
+        self.package_name = ctx.obj['package_name']
+        self.kwargs = None
+        self.print_response = print_response
+
+    def execute(self, clazz, params, initial_dataset, dataset, model, metrics, profiling_enabled=False):
+        self.print_start_step(clazz)
+
+        _Step = dynamic_import("{}.{}".format(self.package_name, clazz))
+
+        if not self.kwargs:
+            self.kwargs = generate_kwargs(_Step, params, initial_dataset, dataset, model, metrics)
+
+        step = _Step(**self.kwargs)
+
+        def call_online_actions(step, msg, msg_idx):
+            def print_message(result):
+                try:
+                    print(json.dumps(result, indent=4, sort_keys=True))
+                except TypeError:
+                    print("Unable to serialize the object returned!")
+
+            if self.print_response:
+                    print("\nMessage {} :\n".format(msg_idx))
+                    print_message(msg)
+
+            if profiling_enabled:
+                with profiling(output_path=".profiling", uid=clazz) as prof:
+                    result = step.execute(input_message=msg, params=params)
+
+                prof.disable
+                print("\nProfile images created in {}\n".format(prof.image_path))
+
+            else:
+                result = step.execute(input_message=msg, params=params)
+
+            if self.print_response:
+                print("\nResult for Message {} :\n".format(msg_idx))
+                print_message(result)
+
+            return result
+
+        if clazz == 'PredictionPreparator':
+            for idx, msg in enumerate(self.predictor_messages):
+                self.pmessages.append(call_online_actions(step, msg, idx))
+
+        elif clazz == 'Feedback':
+            for idx, msg in enumerate(self.feedback_messages):
+                self.pmessages.append(call_online_actions(step, msg, idx))
+
+        elif clazz == 'Predictor':
+
+            self.execute("PredictionPreparator", params, initial_dataset, dataset, model, metrics)
+
+            self.pmessages = self.messages if not self.pmessages else self.pmessages
+
+            for idx, msg in enumerate(self.pmessages):
+                call_online_actions(step, msg, idx)
+
+        else:
+            if profiling_enabled:
+                with profiling(output_path=".profiling", uid=clazz) as prof:
+                    step.execute(params=params)
+
+                prof.disable
+
+                print("\nProfile images created in {}\n".format(prof.image_path))
+
+            else:
+                step.execute(params=params)
+
+        self.print_finish_step()
+
+    def print_finish_step(self):
+        print("\n                                               STEP TAKES {:.4f} (seconds) ".format((time.time() - self.start_time)))
+
+    def print_start_step(self, name):
+        print("\n------------------------------------------------------------------------------")
+        print("MARVIN DRYRUN - STEP [{}]".format(name))
+        print("------------------------------------------------------------------------------\n")
+        self.start_time = time.time()
+
+
+def dynamic_import(clazz):
+    components = clazz.split('.')
+    mod = __import__(components[0])
+    for comp in components[1:]:
+        mod = getattr(mod, comp)
+    return mod
+
+
+def read_file(filename):
+    fname = os.path.join("", filename)
+    if os.path.exists(fname):
+
+        print("Engine file {} loaded!".format(filename))
+
+        with open(fname, 'r') as fp:
+            return json.load(fp)
+    else:
+        print("Engine file {} doesn't exists...".format(filename))
+        return {}
+
+
+def generate_kwargs(clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None):
+    kwargs = {}
+
+    if params:
+        kwargs["params"] = params
+    if dataset:
+        kwargs["dataset"] = clazz.retrieve_obj(dataset)
+    if initial_dataset:
+        kwargs["initial_dataset"] = clazz.retrieve_obj(initial_dataset)
+    if model:
+        kwargs["model"] = clazz.retrieve_obj(model)
+    if metrics:
+        kwargs["metrics"] = clazz.retrieve_obj(metrics)
+
+    kwargs["persistence_mode"] = 'local'
+    kwargs["default_root_path"] = os.path.join(os.getenv('MARVIN_DATA_PATH'), '.artifacts')
+    kwargs["is_remote_calling"] = True
+
+    return kwargs
+
+
+class MarvinEngineServer(object):
+    @classmethod
+    def create(self, ctx, action, port, workers, rpc_workers, params, initial_dataset, dataset, model, metrics, pipeline):
+        package_name = ctx.obj['package_name']
+
+        def create_object(act):
+            clazz = CLAZZES[act]
+            _Action = dynamic_import("{}.{}".format(package_name, clazz))
+            kwargs = generate_kwargs(_Action, params, initial_dataset, dataset, model, metrics)
+            return _Action(**kwargs)
+
+        root_obj = create_object(action)
+        previous_object = root_obj
+
+        if pipeline:
+            for step in list(reversed(pipeline)):
+                previous_object._previous_step = create_object(step)
+                previous_object = previous_object._previous_step
+
+        server = root_obj._prepare_remote_server(port=port, workers=workers, rpc_workers=rpc_workers)
+
+        print("Starting GRPC server [{}] for {} Action".format(port, action))
+        server.start()
+
+        return server
+
+
+@cli.command('engine-grpcserver', help='Marvin gRPC engine action server starts')
+@click.option(
+    '--action',
+    '-a',
+    default='all',
+    type=click.Choice(['all', 'acquisitor', 'tpreparator', 'trainer', 'evaluator', 'predictor', 'feedback']),
+    help='Marvin engine action name')
+@click.option('--initial-dataset', '-id', help='Initial dataset file path', type=click.Path(exists=True))
+@click.option('--dataset', '-d', help='Dataset file path', type=click.Path(exists=True))
+@click.option('--model', '-m', help='Engine model file path', type=click.Path(exists=True))
+@click.option('--metrics', '-me', help='Engine Metrics file path', type=click.Path(exists=True))
+@click.option('--params-file', '-pf', default='engine.params', help='Marvin engine params file path', type=click.Path(exists=True))
+@click.option('--metadata-file', '-mf', default='engine.metadata', help='Marvin engine metadata file path', type=click.Path(exists=True))
+@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration path to be used')
+@click.option('--max-workers', '-w', default=multiprocessing.cpu_count(), help='Max number of grpc threads workers per action')
+@click.option('--max-rpc-workers', '-rw', default=multiprocessing.cpu_count(), help='Max number of grpc workers per action')
+@click.pass_context
+def engine_server(ctx, action, params_file, metadata_file, initial_dataset, dataset, model, metrics, spark_conf, max_workers, max_rpc_workers):
+
+    print("Starting server ...")
+
+    # setting spark configuration directory
+    os.environ["SPARK_CONF_DIR"] = spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")
+    os.environ["YARN_CONF_DIR"] = os.environ["SPARK_CONF_DIR"]
+
+    params = read_file(params_file)
+    metadata = read_file(metadata_file)
+    default_actions = {action['name']: action for action in metadata['actions']}
+
+    if action == 'all':
+        action = default_actions
+    else:
+        action = {action: default_actions[action]}
+
+    servers = []
+    for action_name in action.keys():
+        # initializing server configuration
+        engine_server = MarvinEngineServer.create(
+            ctx=ctx,
+            action=action_name,
+            port=action[action_name]["port"],
+            workers=max_workers,
+            rpc_workers=max_rpc_workers,
+            params=params,
+            initial_dataset=initial_dataset,
+            dataset=dataset,
+            model=model,
+            metrics=metrics,
+            pipeline=action[action_name]["pipeline"]
+        )
+
+        servers.append(engine_server)
+
+    try:
+        while True:
+            time.sleep(100)
+
+    except KeyboardInterrupt:
+        print("Terminating server ...")
+        for server in servers:
+            server.stop(0)
+
+
+TEMPLATE_BASES = {
+    'python-engine': os.path.join(os.path.dirname(__file__), 'templates', 'python-engine')
+}
+
+RENAME_DIRS = [
+    ('project_package', '{{project.package}}'),
+]
+
+IGNORE_DIRS = [
+    # Ignore service internal templates
+    'templates'
+]
+
+
+_orig_type = type
+
+
+@cli.command('engine-generateenv', help='Generate a new marvin engine environment and install default requirements.')
+@click.argument('engine-path', type=click.Path(exists=True))
+@click.option('--python', '-p', default='python', help='The Python interpreter to use to create the new environment')
+def generate_env(engine_path, python):
+    dir_ = os.path.basename(os.path.abspath(engine_path))
+    venv_name = _create_virtual_env(dir_, engine_path, python)
+    _call_make_env(venv_name)
+
+    print('\nDone!!!!')
+    print('Now to workon in the new engine project use: workon {}'.format(venv_name))
+
+
+@cli.command('engine-generate', help='Generate a new marvin engine project and install default requirements.')
+@click.option('--name', '-n', prompt='Project name', help='Project name')
+@click.option('--description', '-d', prompt='Short description', default='Marvin engine', help='Library short description')
+@click.option('--mantainer', '-m', prompt='Mantainer name', default='Marvin AI Community', help='Mantainer name')
+@click.option('--email', '-e', prompt='Mantainer email', default='marvin-ai@googlegroups.com', help='Mantainer email')
+@click.option('--package', '-p', default='', help='Package name')
+@click.option('--dest', '-d', envvar='MARVIN_HOME', type=click.Path(exists=True), help='Root folder path for the creation')
+@click.option('--no-env', is_flag=True, default=False, help='Don\'t create the virtual enviroment')
+@click.option('--no-git', is_flag=True, default=False, help='Don\'t initialize the git repository')
+@click.option('--python', '-py', default='python', help='The Python interpreter to use to create the new environment')
+def generate(name, description, mantainer, email, package, dest, no_env, no_git, python):
+    type_ = 'python-engine'
+    type = _orig_type
+
+    # Process package name
+
+    package = _slugify(package or name)
+
+    # Make sure package name starts with "marvin"
+    if not package.startswith('marvin'):
+        package = 'marvin_{}'.format(package)
+
+    # Remove "lib" prefix from package name
+    if type_ == 'lib' and package.endswith('lib'):
+        package = package[:-3]
+    # Custom strip to remove underscores
+    package = package.strip('_')
+
+    # Append project type to services
+
+    if type_ == 'python-engine' and not package.endswith('engine'):
+        package = '{}_engine'.format(package)
+
+    # Process directory/virtualenv name
+
+    # Directory name should use '-' instead of '_'
+    dir_ = package.replace('_', '-')
+
+    # Remove "marvin" prefix from directory
+    if dir_.startswith('marvin'):
+        dir_ = dir_[6:]
+    dir_ = dir_.strip('-')
+
+    # Append "lib" to directory name if creating a lib
+    if type_ == 'lib' and not dir_.endswith('lib'):
+        dir_ = '{}-lib'.format(dir_)
+
+    dest = os.path.join(dest, dir_)
+
+    if type_ not in TEMPLATE_BASES:
+        print('[ERROR] Could not found template files for "{type}".'.format(type=type_))
+        sys.exit(1)
+
+    project = {
+        'name': _slugify(name),
+        'description': description,
+        'package': package,
+        'toolbox_version': os.getenv('TOOLBOX_VERSION'),
+        'type': type_
+    }
+
+    mantainer = {
+        'name': mantainer,
+        'email': email,
+    }
+
+    context = {
+        'project': project,
+        'mantainer': mantainer,
+    }
+
+    folder_created = False
+
+    try:
+        _copy_scaffold_structure(TEMPLATE_BASES[type_], dest)
+
+        folder_created = True
+
+        _copy_processed_files(TEMPLATE_BASES[type_], dest, context)
+        _rename_dirs(dest, RENAME_DIRS, context)
+        _make_data_link(dest)
+
+        venv_name = None
+        if not no_env:
+            venv_name = _create_virtual_env(dir_, dest, python)
+            _call_make_env(venv_name)
+
+        if not no_git:
+            _call_git_init(dest)
+
+        print('\nDone!!!!')
+
+        if not no_env:
+            print('Now to workon in the new engine project use: workon {}'.format(venv_name))
+
+    except Exception as e:
+        logger.info(e)
+        # remove project if created
+        if os.path.exists(dest) and folder_created:
+            shutil.rmtree(dest)
+
+
+_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
+
+
+def _slugify(text, delim='_'):
+    result = []
+    for word in _punct_re.split(text.lower()):
+        result.extend(unidecode(word).split())
+    return six.u(delim.join(result))
+
+
+def _copy_scaffold_structure(src, dest):
+    os.mkdir(dest)
+
+    for root, dirs, files in os.walk(src):
+        for dir_ in dirs:
+            dirname = os.path.join(root, dir_)
+            dirname = '{dest}{dirname}'.format(dest=dest, dirname=dirname.replace(src, ''))  # get dirname without source path
+
+            os.mkdir(dirname)
+
+
+def _copy_processed_files(src, dest, context):
+    env = jinja2.Environment(loader=jinja2.FileSystemLoader(src))
+
+    print('Processing template files...')
+
+    for root, dirs, files in os.walk(src):
+
+        dirname = root.replace(src, '')[1:]  # get dirname without source path
+        to_dirname = os.path.join(dest, dirname)
+
+        should_process = not any(root.startswith(dir_) for dir_ in IGNORE_DIRS)
+
+        for file in files:
+
+            # Ignore trash
+            if file == '.DS_Store' or file.endswith('.pyc'):
+                continue
+
+            from_ = os.path.join(dirname, file)
+            to_ = os.path.join(to_dirname, file)
+
+            print('Copying "{0}" to "{1}"...'.format(from_, to_))
+
+            if not should_process:
+                shutil.copy(os.path.join(src, from_), to_)
+            else:
+                template = env.get_template(from_)
+                output = template.render(**context)
+
+                with open(to_, 'w') as file:
+                    file.write(output)
+
+
+def _rename_dirs(base, dirs, context):
+    for dir_ in dirs:
+        dirname, template = dir_
+        oldname = os.path.join(base, dirname)
+
+        processed = jinja2.Template(template).render(**context)
+        newname = os.path.join(base, processed)
+
+        shutil.move(oldname, newname)
+
+        print('Renaming {0} as {1}'.format(oldname, newname))
+
+
+def _create_virtual_env(name, dest, python):
+    venv_name = '{}-env'.format(name).replace('_', '-')
+    print('Creating virtualenv: {0}...'.format(venv_name))
+
+    command = ['bash', '-c', '. virtualenvwrapper.sh; mkvirtualenv -p {0} -a {1} {2};'.format(python, dest, venv_name)]
+
+    try:
+        result = subprocess.Popen(command, env=os.environ).wait()
+
+        if result > 0:
+            sys.exit(1)
+
+    except:
+        logger.exception('Could not create the virtualenv!')
+        sys.exit(1)
+
+    return venv_name
+
+
+def _call_make_env(venv_name):
+    command = ['bash', '-c', '. virtualenvwrapper.sh; workon {}; make marvin'.format(venv_name)]
+
+    try:
+        subprocess.Popen(command, env=os.environ).wait()
+    except:
+        logger.exception('Could not call make marvin!')
+        sys.exit(1)
+
+
+def _call_git_init(dest):
+    command = ['bash', '-c', '/usr/bin/git init {0}'.format(dest)]
+    print('Initializing git repository...')
+    try:
+        subprocess.Popen(command, env=os.environ).wait()
+    except OSError:
+        print('WARNING: Could not initialize repository!')
+
+
+def _make_data_link(dest):
+    data_path = os.environ['MARVIN_DATA_PATH']
+    data_link = os.path.join(dest, 'notebooks/data')
+    os.symlink(data_path, data_link)
+
+
+@cli.command('engine-httpserver', help='Marvin http api server starts')
+@click.option(
+    '--action',
+    '-a',
+    default='all',
+    type=click.Choice(['all', 'acquisitor', 'tpreparator', 'trainer', 'evaluator', 'ppreparator', 'predictor', 'feedback']),
+    help='Marvin engine action name')
+@click.option('--initial-dataset', '-id', help='Initial dataset file path', type=click.Path(exists=True))
+@click.option('--dataset', '-d', help='Dataset file path', type=click.Path(exists=True))
+@click.option('--model', '-m', help='Engine model file path', type=click.Path(exists=True))
+@click.option('--metrics', '-me', help='Engine Metrics file path', type=click.Path(exists=True))
+@click.option('--protocol', '-pr', default='', help='Marvin protocol to be loaded during initialization.')
+@click.option('--params-file', '-pf', default='engine.params', help='Marvin engine params file path', type=click.Path(exists=True))
+@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
+@click.option('--http-host', '-h', default='localhost', help='Engine executor http bind host')
+@click.option('--http-port', '-p', default=8000, help='Engine executor http port')
+@click.option('--executor-path', '-e', help='Marvin engine executor jar path', type=click.Path(exists=True))
+@click.option('--max-workers', '-w', default=multiprocessing.cpu_count(), help='Max number of grpc threads workers per action')
+@click.option('--max-rpc-workers', '-rw', default=multiprocessing.cpu_count(), help='Max number of grpc workers per action')
+@click.option('--extra-executor-parameters', '-jvm', help='Use to send extra JVM parameters to engine executor process')
+@click.pass_context
+def engine_httpserver_cli(ctx, action, params_file, initial_dataset, dataset,
+                          model, metrics, protocol, spark_conf, http_host, http_port,
+                          executor_path, max_workers, max_rpc_workers, extra_executor_parameters):
+    engine_httpserver(
+        ctx, action, params_file, initial_dataset, dataset,
+        model, metrics, protocol, spark_conf, http_host, http_port,
+        executor_path, max_workers, max_rpc_workers, extra_executor_parameters
+    )
+
+
+def engine_httpserver(ctx, action, params_file, initial_dataset, dataset, model, metrics, protocol, spark_conf, http_host,
+                      http_port, executor_path, max_workers, max_rpc_workers, extra_executor_parameters):
+    logger.info("Starting http and grpc servers ...")
+
+    grpcserver = None
+    httpserver = None
+
+    def _params(**kwargs):
+        params = []
+        if kwargs is not None:
+            for key, value in iteritems(kwargs):
+                if value is not None:
+                    params.append("-{0}".format(str(key)))
+                    params.append(str(value))
+        return params
+
+    try:
+        optional_args = _params(id=initial_dataset, d=dataset, m=model, me=metrics, pf=params_file, c=spark_conf)
+        grpcserver = subprocess.Popen(['marvin', 'engine-grpcserver', '-a', action, '-w', str(max_workers), '-rw', str(max_rpc_workers)] + optional_args)
+
+        time.sleep(3)
+
+    except:
+        logger.exception("Could not start grpc server!")
+        sys.exit(1)
+
+    try:
+        if not (executor_path and os.path.exists(executor_path)):
+            executor_url = Config.get("executor_url", section="marvin")
+            executor_path = MarvinData.download_file(executor_url, force=False)
+
+        command_list = ['java']
+        command_list.append('-DmarvinConfig.engineHome={}'.format(ctx.obj['config']['inidir']))
+        command_list.append('-DmarvinConfig.ipAddress={}'.format(http_host))
+        command_list.append('-DmarvinConfig.port={}'.format(http_port))
+        command_list.append('-DmarvinConfig.protocol={}'.format(protocol))
+
+        if extra_executor_parameters:
+            command_list.append(extra_executor_parameters)
+
+        command_list.append('-jar')
+        command_list.append(executor_path)
+
+        httpserver = subprocess.Popen(command_list)
+
+    except:
+        logger.exception("Could not start http server!")
+        grpcserver.terminate() if grpcserver else None
+        sys.exit(1)
+
+    try:
+        while True:
+            time.sleep(100)
+
+    except KeyboardInterrupt:
+        logger.info("Terminating http and grpc servers...")
+        grpcserver.terminate() if grpcserver else None
+        httpserver.terminate() if httpserver else None
+        logger.info("Http and grpc servers terminated!")
+        sys.exit(0)
+
+
+@cli.command('engine-deploy', help='Engine provisioning and deployment command')
+@click.option('--provision', is_flag=True, default=False, help='Forces provisioning')
+@click.option('--package', is_flag=True, default=False, help='Creates engine package')
+@click.option('--skip-clean', is_flag=True, default=False, help='Skips make clean')
+def engine_deploy(provision, package, skip_clean):
+
+    TOOLBOX_VERSION = os.getenv('TOOLBOX_VERSION')
+
+    if provision:
+        subprocess.Popen([
+            "fab",
+            "provision",
+        ], env=os.environ).wait()
+        subprocess.Popen([
+            "fab",
+            "deploy:version={version}".format(version=TOOLBOX_VERSION),
+        ], env=os.environ).wait()
+    elif package:
+        subprocess.Popen([
+            "fab",
+            "package:version={version}".format(version=TOOLBOX_VERSION),
+        ], env=os.environ).wait()
+    elif skip_clean:
+        subprocess.Popen([
+            "fab",
+            "deploy:version={version},skip_clean=True".format(version=TOOLBOX_VERSION),
+        ], env=os.environ).wait()
+    else:
+        subprocess.Popen([
+            "fab",
+            "deploy:version={version}".format(version=TOOLBOX_VERSION),
+        ], env=os.environ).wait()
+
+
+@cli.command('engine-httpserver-remote', help='Remote HTTP server control command')
+@click.option('--http_host', '-h', default='0.0.0.0', help='Engine executor http bind host')
+@click.option('--http_port', '-p', default=8000, help='Engine executor http port')
+@click.argument('command', type=click.Choice(['start', 'stop', 'status']))
+def engine_httpserver_remote(command, http_host, http_port):
+    if command == "start":
+        subprocess.Popen([
+            "fab",
+            "engine_start:{host},{port}".format(host=http_host, port=http_port)
+        ], env=os.environ).wait()
+    elif command == "stop":
+        subprocess.Popen([
+            "fab",
+            "engine_stop",
+        ], env=os.environ).wait()
+    elif command == "status":
+        subprocess.Popen([
+            "fab",
+            "engine_status",
+        ], env=os.environ).wait()
+    else:
+        print("Usage: marvin engine-httpserver-remote [ start | stop | status ]")
diff --git a/marvin_python_toolbox/management/hive.py b/marvin_python_toolbox/management/hive.py
new file mode 100644
index 0000000..b137846
--- /dev/null
+++ b/marvin_python_toolbox/management/hive.py
@@ -0,0 +1,709 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+import click
+import time
+import os
+import re
+import sys
+import json
+from paramiko import SSHClient, AutoAddPolicy
+from pyhive import hive
+from slugify import slugify
+import hashlib
+
+from .._logging import get_logger
+
+from .._compatibility import six
+
+
+logger = get_logger('management.hive')
+
+
+@click.group('hive')
+def cli():
+    pass
+
+
+@cli.command('hive-generateconf', help='Generate default configuration file')
+@click.pass_context
+def hive_generateconf_cli(ctx):
+    hive_generateconf(ctx)
+
+
+def hive_generateconf(ctx):
+    default_conf = [{
+        "origin_host": "xxx_host_name",
+        "origin_db": "xxx_db_name",
+        "origin_queue": "marvin",
+        "target_table_name": "xxx_table_name",
+        "sample_sql": "SELECT * FROM XXX",
+        "sql_id": "1"
+    }]
+
+    with open('hive_dataimport.conf', 'w') as outfile:
+        json.dump(default_conf, outfile, indent=2)
+
+    print("Done!!!")
+
+
+@cli.command('hive-resetremote', help='Drop all remote tables from informed engine on host')
+@click.option('--host', '-h', default='marvin-hadoop')
+@click.option('--queue', '-h', default='default')
+@click.option('--engine', default=(os.path.relpath(".", "..")), help='Marvin engine name (default is the current folder)')
+@click.pass_context
+def hive_resetremote_cli(ctx, host, engine, queue):
+    hive_resetremote(ctx, host, engine, queue)
+
+
+def hive_resetremote(ctx, host, engine, queue):
+    hdi = HiveDataImporter(
+        engine=engine,
+        origin_host=host,
+        origin_queue=queue,
+        origin_db=None,
+        target_table_name=None,
+        sample_sql=None,
+        max_query_size=None,
+        destination_host=None,
+        destination_port=None,
+        destination_host_username='vagrant',
+        destination_host_password='vagrant',
+        destination_hdfs_root_path='/user/hive/warehouse/',
+        sql_id=None
+    )
+    hdi.reset_remote_tables()
+
+
+@cli.command(
+    'hive-dataimport',
+    help='Export and import data samples from a hive databse to the hive running in this toolbox, cloning same data structure (db and table).')
+@click.option('--destination-hdfs-root-path', '-hdfs', default='/user/hive/warehouse/')
+@click.option('--destination-host-password', '-p', default='vagrant')
+@click.option('--destination-host-username', '-u', default='vagrant')
+@click.option('--destination-host', '-dh', default='marvin-hadoop')
+@click.option('--destination-port', '-dp', default=22)
+@click.option('--max-query-size', '-s', default=(50 * 1024 * 1024), help='Max query size in bytes')
+@click.option('--force', is_flag=True, help='Force table creation even table already exists in destination')
+@click.option('--force-remote', is_flag=True, help='Force remote temp table creation even table already exists in origin')
+@click.option('--validate', is_flag=True, help='Validate the query sample')
+@click.option('--force-copy-files', is_flag=True, help='Force the hdfs files copy procedure')
+@click.option('--skip-remote-preparation', is_flag=True, help='Skip the creation of remote temp table')
+@click.option('--engine', default=(os.path.relpath(".", "..")), help='Marvin engine name (default is the current folder)')
+@click.option('--sql-id', '-q', help='If informed the process will be applied exclusivelly for this sample sql')
+@click.option('--conf', '-c', default='hive_dataimport.conf', help='Hive data import configuration file')
+@click.pass_context
+def hive_dataimport_cli(
+    ctx, conf, sql_id, engine, skip_remote_preparation, force_copy_files, validate, force,
+    force_remote, max_query_size, destination_host, destination_port, destination_host_username,
+    destination_host_password, destination_hdfs_root_path
+):
+    hive_dataimport(
+        ctx, conf, sql_id, engine, skip_remote_preparation, force_copy_files, validate, force,
+        force_remote, max_query_size, destination_host, destination_port, destination_host_username,
+        destination_host_password, destination_hdfs_root_path
+    )
+
+def hive_dataimport(
+    ctx, conf, sql_id, engine, skip_remote_preparation, force_copy_files, validate, force,
+    force_remote, max_query_size, destination_host, destination_port, destination_host_username,
+    destination_host_password, destination_hdfs_root_path
+):
+
+    initial_start_time = time.time()
+
+    confs = read_config(filename=conf)
+
+    if confs:
+        print(chr(27) + "[2J")
+
+        if sql_id:
+            confs = [x for x in confs if x['sql_id'] == sql_id]
+
+        for conf in confs:
+            hdi = HiveDataImporter(
+                max_query_size=max_query_size,
+                destination_host=destination_host,
+                destination_port=destination_port,
+                destination_host_username=destination_host_username,
+                destination_host_password=destination_host_password,
+                destination_hdfs_root_path=destination_hdfs_root_path,
+                engine=engine,
+                **conf)
+
+            if force:
+                table_exists = False
+
+            else:
+                table_exists = hdi.table_exists(host=hdi.destination_host, db=hdi.origin_db, table=hdi.target_table_name)
+
+            if not table_exists:
+                hdi.import_sample(
+                    create_temp_table=(not skip_remote_preparation),
+                    copy_files=force_copy_files,
+                    validate_query=validate,
+                    force_create_remote_table=force_remote,
+                )
+
+            else:
+                print ("Table {} already exists, skiping data import. Use --force flag to force data importation".format(hdi.full_table_name))
+
+        print("Total Time : {:.2f}s".format(time.time() - initial_start_time))
+
+        print("\n")
+
+
+def read_config(filename):
+    fname = os.path.join("", filename)
+    if os.path.exists(fname):
+        with open(fname, 'r') as fp:
+            return json.load(fp)
+    else:
+        print("Configuration file {} doesn't exists...".format(filename))
+        return {}
+
+
+class HiveDataImporter():
+    def __init__(
+        self, origin_host, origin_db, origin_queue, target_table_name, sample_sql, engine,
+        max_query_size, destination_host, destination_port, destination_host_username, destination_host_password,
+        destination_hdfs_root_path, sql_id
+    ):
+
+        self.sql_id = sql_id
+        self.origin_host = origin_host
+        self.origin_db = origin_db
+        self.origin_queue = origin_queue
+        self.target_table_name = target_table_name
+        self.sample_sql = sample_sql
+        self.engine = engine
+        self.destination_host = destination_host
+        self.destination_port = destination_port
+        self.destination_host_username = destination_host_username
+        self.destination_host_password = destination_host_password
+        self.destination_hdfs_root_path = destination_hdfs_root_path
+
+        self.temp_db_name = 'marvin'
+        self.max_query_size = max_query_size
+
+        self.supported_format_types = {
+            'TextInputFormat': 'TEXTFILE',
+            'SequenceFileInputFormat': 'SEQUENCEFILE',
+            'OrcInputFormat': 'ORC',
+            'MapredParquetInputFormat': 'PARQUET',
+            'AvroContainerInputFormat': 'AVRO',
+            'RCFileInputFormat': 'RCFILE'
+        }
+
+        print("\n------------------------------------------------------------------------------")
+        print("Initializing process for sql_id [{}]:".format(self.sql_id))
+        print("     Origin -->")
+        print("         Host:       [{}]".format(self.origin_host))
+        print("         DataBase:   [{}]".format(self.origin_db))
+        print("         Table Name: [{}]".format(self.target_table_name))
+        print("         Sample SQL: [{}]".format(self.sample_sql))
+        print("\n")
+        print("     Destination -->")
+        print("         Host:       [{}]".format(self.destination_host))
+        print("         DataBase:   [{}]".format(self.origin_db))
+        print("         Table Name: [{}]".format(self.target_table_name))
+        print("\n")
+
+    def validade_query(self):
+        # creating connections
+        print("Connecting with {} database on {} .. ".format(self.origin_db, self.origin_host))
+        conn_origin = self.get_connection(host=self.origin_host, db=self.origin_db, queue=self.origin_queue)
+
+        print("Counting sample sql ...")
+        total_rows = self.count_rows(conn=conn_origin, sql=self.sample_sql)
+        print("Found [{}] rows!".format(total_rows))
+
+        print("Retrieve data sample for query estimation reasons...")
+        data_sample = self.retrieve_data_sample(conn=conn_origin, full_table_name=self.full_table_name)
+        print("Calculated [{}] bytes per row!".format(data_sample['estimate_query_mean_per_line']))
+
+        estimated_size = data_sample['estimate_query_mean_per_line'] * total_rows
+
+        print ("Estimated query size is : {} bytes".format(estimated_size))
+        print ("Max permited query size is: {} bytes".format(self.max_query_size))
+
+        return estimated_size <= self.max_query_size
+
+    def table_exists(self, host, db, table):
+        print("Verifiying if table {}.{} exists on {} ...".format(db, table, host))
+        local_conn = self.get_connection(host=host)
+        cursor = local_conn.cursor()
+
+        cursor.execute("SHOW DATABASES LIKE '{}'".format(db))
+        dbs = cursor.fetchall()
+        self.show_log(cursor)
+
+        if not len(dbs) == 1:
+            table_exists = False
+        else:
+            cursor.execute("USE {} ".format(db))
+
+            cursor.execute("SHOW TABLES LIKE '{}'".format(table))
+            tbs = cursor.fetchall()
+            self.show_log(cursor)
+
+            if not len(tbs) == 1:
+                table_exists = False
+            else:
+                table_exists = True
+
+        cursor.close()
+        return table_exists
+
+    def reset_remote_tables(self):
+        self.print_start_step(name="Reset Remote Tables for {}".format(self.temp_table_prefix), step_number=1, total_steps=1)
+
+        print("Connecting with {} database on {} .. ".format(self.temp_db_name, self.origin_host))
+        remote_temp_db_conn = self.get_connection(host=self.origin_host, db=self.temp_db_name, queue=self.origin_queue)
+
+        cursor = remote_temp_db_conn.cursor()
+        cursor.execute("SHOW TABLES LIKE '{}*'".format(self.temp_table_prefix))
+        tbs = cursor.fetchall()
+        self.show_log(cursor)
+        cursor.close()
+
+        valid_tbs = [tb[0] for tb in tbs]
+
+        if valid_tbs:
+            print("Found {} tables for deletion....".format(len(tbs)))
+
+            for tb in valid_tbs:
+                table_name = "{}.{}".format(self.temp_db_name, tb)
+                print("Dropping table {} on {} .. ".format(table_name, self.origin_host))
+                self.drop_table(conn=remote_temp_db_conn, table_name=table_name)
+
+                hdfs_location = self.generate_table_location(self.destination_hdfs_root_path, self.origin_host, self.temp_db_name + '.db', tb)
+                print("Removing hdfs files from {} .. ".format(hdfs_location))
+
+                ssh = self._get_ssh_client(self.origin_host, self.destination_host_username, self.destination_host_password)
+                self.delete_files(ssh, hdfs_location)
+
+        else:
+            print("No table found! Skiping reset remote tables process!!")
+
+        self.print_finish_step()
+
+    def print_finish_step(self):
+        print("\n                                               STEP TAKES {:.4f} (seconds) ".format((time.time() - self.start_time)))
+
+    def print_start_step(self, name, step_number, total_steps):
+        print("\n------------------------------------------------------------------------------")
+        print("MARVIN DATA IMPORT - STEP ({}) of ({}) - [{}]".format(step_number, total_steps, name))
+        print("------------------------------------------------------------------------------\n")
+        self.start_time = time.time()
+
+    def import_sample(self, create_temp_table=True, copy_files=True, validate_query=True, force_create_remote_table=False):
+        #
+        #################################################################################
+        # Step 1 - Query validation
+        self.print_start_step(name="Query Validation", step_number=1, total_steps=6)
+
+        is_valid = self.validade_query() if validate_query else True
+
+        if not is_valid:
+            print("Informed sample query is not valid!")
+            self.print_finish_step()
+            return
+
+        self.print_finish_step()
+
+        #
+        ##################################################################################
+        # Step 2 - Testing remote connecitons and getting table schema
+        self.print_start_step(name="Table Schema Achievement", step_number=2, total_steps=6)
+
+        # creating connections
+        print("Connecting with {} database on {} .. ".format(self.origin_db, self.origin_host))
+        conn_origin = self.get_connection(host=self.origin_host, db=self.origin_db, queue=self.origin_queue)
+
+        print("Connecting with {} database on {} .. ".format(self.temp_db_name, self.origin_host))
+        remote_temp_db_conn = self.get_connection(host=self.origin_host, db=self.temp_db_name, queue=self.origin_queue)
+
+        # getting ddl from real table
+        print("Getting DDL from {} table ".format(self.target_table_name))
+        ddl = self.get_createtable_ddl(conn=conn_origin, origin_table_name=self.target_table_name, dest_table_name=self.temp_table_name)
+
+        # validanting if partitions is used in query statement
+        partitions = self.get_partitions(ddl)
+
+        if validate_query and self.has_partitions(self.sample_sql, [p['cols'] for p in partitions]):
+            print("Informed sample query doesn't have valid partitions in the clausule where!!!! Informe at lest one partition.")
+            print("To disable the query validation use --skip-validation flag.")
+            self.print_finish_step()
+            return
+
+        print("Connecting with DEFAULT database on {} .. ".format(self.destination_host))
+        local_conn = self.get_connection(host=self.destination_host)
+
+        # creating databases if not exists
+        print("Creating database {} ...".format(self.origin_db))
+        self.create_database(conn=local_conn, db=self.origin_db)
+
+        print("Connecting with {} database on {} .. ".format(self.origin_db, self.destination_host))
+        local_conn = self.get_connection(host=self.destination_host, db=self.origin_db)
+
+        # creating databases if not exists
+        print("Creating database {} ...".format(self.temp_db_name))
+        self.create_database(conn=local_conn, db=self.temp_db_name)
+
+        print("Connecting with {} database on {} .. ".format(self.temp_db_name, self.destination_host))
+        local_temp_db_conn = self.get_connection(host=self.destination_host, db=self.temp_db_name)
+
+        self.print_finish_step()
+
+        #
+        ##################################################################################
+        # Step 3 - Remote Table Preparation
+        self.print_start_step(name="Remote Table Preparation", step_number=3, total_steps=6)
+
+        if create_temp_table:
+
+            if force_create_remote_table:
+                remote_table_exists = False
+
+            else:
+                remote_table_exists = self.table_exists(host=self.origin_host, db=self.temp_db_name, table=self.temp_table_name)
+
+            # verify if remote table alredy exists
+            if not remote_table_exists:
+                print("Dropping table {} on {} .. ".format(self.full_temp_table_name, self.origin_host))
+                self.drop_table(conn=remote_temp_db_conn, table_name=self.full_temp_table_name)
+
+                print("Creating table {} on {} .. ".format(self.full_temp_table_name, self.origin_host))
+                self.create_table(conn=remote_temp_db_conn, table_name=self.full_temp_table_name, ddl=ddl)
+
+                # insert from select
+                print("Populating table {} on {} using informed sample sql.. ".format(self.full_temp_table_name, self.origin_host))
+                self.populate_table(conn=conn_origin, table_name=self.full_temp_table_name, partitions=partitions, sql=self.sample_sql)
+
+            else:
+                print("Table {} on {} already exists ...".format(self.full_temp_table_name, self.origin_host))
+
+        self.print_finish_step()
+
+        #
+        ##################################################################################
+        # Step 4 - Copying remote hdfs files
+        self.print_start_step(name="Copying HDFS Files", step_number=4, total_steps=6)
+
+        # get temp location
+        print("Getting hdfs files location from {} table ...".format(self.full_temp_table_name))
+        temp_table_location = self.get_table_location(conn=remote_temp_db_conn, table_name=self.full_temp_table_name)
+
+        # copy hdfs files for local hdfs
+        external_table_location = self.generate_table_location(
+            host=self.destination_host,
+            root_path=self.destination_hdfs_root_path,
+            db_name=self.temp_db_name, table_name=self.temp_table_name)
+
+        print("Copying files from [{}] to [{}]".format(temp_table_location, external_table_location))
+        self.hdfs_dist_copy(force=copy_files,
+                            hdfs_host=self.destination_host,
+                            hdfs_port=self.destination_port,
+                            origin=temp_table_location,
+                            dest=external_table_location,
+                            password=self.destination_host_password,
+                            username=self.destination_host_username)
+
+        self.print_finish_step()
+        #
+        ##################################################################################
+        # Step 5 - External table creation using hdfs files
+        self.print_start_step(name="Local Temporary Table Creation", step_number=5, total_steps=6)
+
+        # creating external table using parquet files in hdfs
+        print("Dropping temp table {} on {} .. ".format(self.full_temp_table_name, self.destination_host))
+        self.drop_table(conn=local_temp_db_conn, table_name=self.full_temp_table_name)
+
+        # create temp table
+        print("Creating temp table {} using imported hdfs files from [{}] ...".format(self.full_temp_table_name, external_table_location))
+        self.create_external_table(conn=local_temp_db_conn,
+                                   temp_table_name=self.full_temp_table_name,
+                                   ddl=ddl,
+                                   parquet_file_location=external_table_location)
+
+        print("Refreshing table {} partitions on {} ..".format(self.full_temp_table_name, self.destination_host))
+        self.refresh_partitions(conn=local_temp_db_conn, table_name=self.full_temp_table_name)
+
+        self.print_finish_step()
+
+        #
+        ##################################################################################
+        # Step 6 - Destination table creation from external table
+        self.print_start_step(name="Table population", step_number=6, total_steps=6)
+
+        # create view
+        print("Dropping table view {} on {} .. ".format(self.full_table_name, self.destination_host))
+        self.drop_view(conn=local_conn, view_name=self.full_table_name)
+
+        print("Creating table view {} ... ".format(self.full_table_name, self.destination_host))
+        self.create_view(conn=local_conn, view_name=self.full_table_name, table_name=self.full_temp_table_name)
+
+        self.print_finish_step()
+
+        print("Procedure done!!!!")
+
+    @property
+    def temp_table_prefix(self):
+        return "{}".format(slugify(self.engine).replace('-', '_'))
+
+    @property
+    def temp_table_name(self):
+        return "{}_{}_{}_{}".format(
+            self.temp_table_prefix,
+            self.origin_db,
+            self.target_table_name,
+            hashlib.sha1(slugify(self.sample_sql).encode('utf-8')).hexdigest()
+        )
+
+    @property
+    def full_table_name(self):
+        return "{}.{}".format(self.origin_db, self.target_table_name)
+
+    @property
+    def full_temp_table_name(self):
+        return "{}.{}".format(self.temp_db_name, self.temp_table_name)
+
+    def generate_table_location(self, root_path, host, db_name, table_name):
+        return "hdfs://{}:8020{}".format(host, os.path.join(root_path, db_name, table_name))
+
+    def clean_ddl(self, ddl, remove_formats=True, remove_general=True):
+        if remove_general:
+            # Removing LOCATION statement
+            regex = "(LOCATION\s+'(.*?)')"
+            result = re.search(regex, ddl)
+            ddl = ddl.replace(result.group(1), " ") if result else ddl
+
+            # Removing TBLPROPERTIES statement
+            regex = "(TBLPROPERTIES\s+(.*?)\))"
+            result = re.search(regex, ddl)
+            ddl = ddl.replace(result.group(1), " ") if result else ddl
+
+            # Removing WITH SERDEPROPERTIES statement
+            regex = "(WITH SERDEPROPERTIES\s+(.*?)\))"
+            result = re.search(regex, ddl)
+            ddl = ddl.replace(result.group(1), " ") if result else ddl
+
+        if remove_formats:
+            # Removing STORED AS INPUTFORMAT statement
+            regex = "(STORED AS INPUTFORMAT\s+'(.*?)')"
+            result = re.search(regex, ddl)
+            ddl = ddl.replace(result.group(1), " ") if result else ddl
+
+            # Removing OUTPUTFORMAT statement
+            regex = "(OUTPUTFORMAT\s+'(.*?)')"
+            result = re.search(regex, ddl)
+            ddl = ddl.replace(result.group(1), " ") if result else ddl
+
+        return ddl
+
+    def get_table_format(self, ddl):
+        regex = "(STORED AS INPUTFORMAT\s+'(.*?)')"
+        result = re.search(regex, ddl)
+        input_format = result.group(2)
+        return self.supported_format_types[input_format.split(".")[-1]]
+
+    def get_database_info(self, ddl):
+        regex = "CREATE TABLE `((.*?)\.)?(.*?)`\("
+        result = re.search(regex, ddl)
+        if result:
+            groups = result.groups()
+            if groups[0]:
+                # found db name
+                return {'db': groups[1], 'table': groups[2]}
+            else:
+                {'db': None, 'table': groups[2]}
+        return {'db': None, 'table': None}
+
+    def get_createtable_ddl(self, conn, origin_table_name, dest_table_name):
+        cursor = conn.cursor()
+        cursor.execute("SHOW CREATE TABLE " + origin_table_name)
+        _lines = [_line[0] for _line in cursor.fetchall()]
+        ddl = ''.join(_lines)
+        ddl = self.clean_ddl(ddl, remove_formats=False, remove_general=True)
+        ddl = ddl.replace(origin_table_name, dest_table_name)
+        cursor.close()
+        return ddl
+
+    def create_database(self, conn, db):
+        self._execute_db_command(conn, "CREATE DATABASE IF NOT EXISTS " + db)
+
+    def drop_table(self, conn, table_name):
+        self._execute_db_command(conn, 'DROP TABLE IF EXISTS ' + table_name)
+
+    def drop_view(self, conn, view_name):
+        self._execute_db_command(conn, 'DROP VIEW ' + view_name)
+
+    def create_table(self, conn, table_name, ddl):
+        self._execute_db_command(conn, ddl)
+
+    def _execute_db_command(self, conn, command):
+        cursor = conn.cursor()
+        cursor.execute(command)
+        self.show_log(cursor)
+        cursor.close()
+
+    def get_connection(self, host, db='DEFAULT', queue='default'):
+        return hive.connect(host=host,
+                            database=db,
+                            configuration={'mapred.job.queue.name': queue,
+                                           ' hive.exec.dynamic.partition.mode': 'nonstrict'})
+
+    def retrieve_data_sample(self, conn, full_table_name, sample_limit=100):
+        cursor = conn.cursor()
+
+        sql = "SELECT * FROM {} TABLESAMPLE ({} ROWS)".format(full_table_name, sample_limit)
+
+        cursor.execute(sql)
+        data_header = [{'col': line[0].split('.')[1], 'table': line[0].split('.')[0], 'type': line[1]} for line in cursor.description]
+        data = [row for row in cursor.fetchall()]
+        self.show_log(cursor)
+        cursor.close()
+        return {'data_header': data_header,
+                'total_lines': len(data),
+                'data': data,
+                'estimate_query_size': sys.getsizeof(data),
+                'estimate_query_mean_per_line': sys.getsizeof(data) / len(data)}
+
+    def count_rows(self, conn, sql):
+        cursor = conn.cursor()
+        cursor.execute("SELECT COUNT(1) " + sql[sql.upper().rfind("FROM"):])
+        size = cursor.fetchone()[0]
+        self.show_log(cursor)
+        cursor.close()
+        return size
+
+    def show_log(self, cursor):
+        for l in cursor.fetch_logs():
+            logger.debug(l)
+
+    def save_data(self, conn, table, data):
+        cursor = conn.cursor()
+        print('Inserting {} rows in {} table...'.format(data['total_lines'], table))
+        cols = [v['col'] for v in data['data_header']]
+        dml = "INSERT INTO {0} ({1}) VALUES ({2})".format(table, ", ".join(cols), ", ".join(['%s' for col in cols]))
+        cursor.executemany(dml, [(v,) for v in data['data'][1:10]])
+        self.show_log(cursor)
+        cursor.close()
+
+    def get_partitions(self, ddl):
+        regex = "(PARTITIONED BY\s+\((.*?)\))"
+        result = re.search(regex, ddl)
+        if result:
+            p_cols = result.group(2).strip().replace('`', '').split(",")
+            return [{'col': p_col.split()[0], 'type': p_col.split()[1]} for p_col in p_cols]
+        else:
+            return []
+
+    def has_partitions(self, sql, partitions):
+        regex = "WHERE(.*?)(" + "|".join(partitions).upper() + ")"
+        result = re.search(regex, sql.upper())
+
+        if result:
+            return True
+        else:
+            return False
+
+    def populate_table(self, conn, table_name, partitions, sql):
+        partitions = [p['col'] for p in partitions]
+        partitions_statement = "PARTITION ({})".format(", ".join(partitions)) if partitions else ""
+        dml = "INSERT OVERWRITE TABLE {0} {1} {2}".format(table_name, partitions_statement, sql)
+        self._execute_db_command(conn, dml)
+
+    def create_view(self, conn, view_name, table_name):
+        dml = "CREATE VIEW {0} AS SELECT * FROM {1}".format(view_name, table_name)
+        self._execute_db_command(conn, dml)
+
+    def refresh_partitions(self, conn, table_name):
+        refresh_statement = "MSCK REPAIR TABLE {0}".format(table_name)
+        self._execute_db_command(conn, refresh_statement)
+
+    def get_table_location(self, conn, table_name):
+        cursor = conn.cursor()
+        cursor.execute("DESCRIBE FORMATTED {}".format(table_name))
+        location = [key[1].strip() for key in cursor.fetchall() if key[0] and key[0].strip().upper() == 'LOCATION:']
+        location = location[0].replace('hdfs://', 'hftp://')
+        cursor.close()
+        return location
+
+    def delete_files(self, ssh, url):
+        cmd = "hdfs dfs -rm -R '{}'".format(url)
+        self._hdfs_commands(ssh, cmd)
+
+    def copy_files(self, ssh, origin, dest):
+        cmd = "hadoop distcp --update '{}' '{}'".format(origin, dest)
+        return self._hdfs_commands(ssh, cmd)
+
+    def _hdfs_commands(self, ssh, cmd):
+        logger.debug("Executing remote command: {}".format(cmd))
+        i, o, e = ssh.exec_command(cmd)
+        errors = e.readlines()
+        output = o.readlines()
+        logger.debug(output)
+        logger.debug(errors)
+        return output, errors
+
+    def _get_ssh_client(self, hdfs_host, hdfs_port, username, password):
+        ssh = SSHClient()
+        ssh.set_missing_host_key_policy(AutoAddPolicy())
+        ssh.connect(hostname=hdfs_host, port=hdfs_port, username=username, password=password, )
+        return ssh
+
+    def hdfs_dist_copy(self, force, hdfs_host, hdfs_port, origin, dest, username=None, password=None):
+        # connecting with hdfs host
+        ssh = self._get_ssh_client(hdfs_host, hdfs_port, username, password)
+
+        if force:
+            print("Removing old hdfs files if necessary. To force copy remote files use --force-copy-files flag.")
+
+            # delete files from dest
+            self.delete_files(ssh, dest)
+
+        else:
+            print("Using old hdfs files to complete the procedure. If necessary to copy files again use --force-copy-files flag.")
+
+        # copy files from origin to destination
+        _, copy_errors = self.copy_files(ssh, origin, dest)
+
+        # validate copy
+        cmd_template = "hdfs dfs -ls -R '{}' | grep -E '^-' | wc -l"
+        cmd = cmd_template.format(origin)
+        result1, _ = self._hdfs_commands(ssh, cmd)
+
+        cmd = cmd_template.format(dest)
+        result2, _ = self._hdfs_commands(ssh, cmd)
+
+        if result1 == result2:
+            print("Files {} successfully transferred!!".format(result1))
+        else:
+            print("Errors during hdfs files copy process!!")
+            for e_l in copy_errors:
+                logger.debug(e_l)
+            sys.exit("Stoping process!")
+
+    def create_external_table(self, conn, temp_table_name, ddl, parquet_file_location):
+        format_type = self.get_table_format(ddl)
+        ddl = self.clean_ddl(ddl, remove_formats=True, remove_general=False)
+        ddl = ddl.replace("CREATE TABLE", "CREATE EXTERNAL TABLE")
+        ddl = "{} STORED AS {} LOCATION '{}'".format(ddl, format_type, parquet_file_location)
+        self.create_table(conn=conn, table_name=temp_table_name, ddl=ddl)
diff --git a/marvin_python_toolbox/management/notebook.py b/marvin_python_toolbox/management/notebook.py
new file mode 100644
index 0000000..eb4e761
--- /dev/null
+++ b/marvin_python_toolbox/management/notebook.py
@@ -0,0 +1,82 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+import sys
+import click
+
+
+@click.group('notebook')
+def cli():
+    pass
+
+
+@cli.command('notebook', help='Start the Jupyter notebook server.')
+@click.option('--port', '-p', default=8888, help='Jupyter server port')
+@click.option('--enable-security', is_flag=True, help='Enable jupyter notebook token security.')
+@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
+@click.option('--allow-root', is_flag=True, help='Run notebook from root user.')
+@click.pass_context
+def notebook_cli(ctx, port, enable_security, spark_conf, allow_root):
+    notebook(ctx, port, enable_security, spark_conf, allow_root)
+
+
+def notebook(ctx, port, enable_security, spark_conf, allow_root):
+    notebookdir = os.path.join(ctx.obj['base_path'], 'notebooks')
+    command = [
+        "SPARK_CONF_DIR={0} YARN_CONF_DIR={0}".format(spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")),
+        'jupyter', 'notebook',
+        '--notebook-dir', notebookdir,
+        '--ip', '0.0.0.0',
+        '--port', str(port),
+        '--no-browser',
+        '--config', os.path.join(os.environ["MARVIN_TOOLBOX_PATH"], 'extras', 'notebook_extensions', 'jupyter_notebook_config.py')
+    ]
+
+    command.append("--NotebookApp.token=") if not enable_security else None
+    command.append("--allow-root") if allow_root else None
+
+    ret = os.system(' '.join(command))
+    sys.exit(ret)
+
+
+@cli.command('lab', help='Start the JupyterLab server.')
+@click.option('--port', '-p', default=8888, help='JupyterLab server port')
+@click.option('--enable-security', is_flag=True, help='Enable jupyterlab token security.')
+@click.option('--spark-conf', '-c', envvar='SPARK_CONF_DIR', type=click.Path(exists=True), help='Spark configuration folder path to be used in this session')
+@click.pass_context
+def lab_cli(ctx, port, enable_security, spark_conf):
+    lab(ctx, port, enable_security, spark_conf)
+
+
+def lab(ctx, port, enable_security, spark_conf):
+    notebookdir = os.path.join(ctx.obj['base_path'], 'notebooks')
+    command = [
+        "SPARK_CONF_DIR={0} YARN_CONF_DIR={0}".format(spark_conf if spark_conf else os.path.join(os.environ["SPARK_HOME"], "conf")),
+        'jupyter-lab',
+        '--notebook-dir', notebookdir,
+        '--ip', '0.0.0.0',
+        '--port', str(port),
+        '--no-browser',
+    ]
+
+    command.append("--NotebookApp.token=") if not enable_security else None
+
+    ret = os.system(' '.join(command))
+    sys.exit(ret)
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/pkg.py b/marvin_python_toolbox/management/pkg.py
new file mode 100644
index 0000000..d126f68
--- /dev/null
+++ b/marvin_python_toolbox/management/pkg.py
@@ -0,0 +1,417 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import os
+import pip
+from distutils.version import LooseVersion
+import sys
+import subprocess
+import click
+import re
+import os.path
+import errno
+import shutil
+from .._compatibility import urlparse
+import multiprocessing
+
+__all__ = ['copy']
+
+
+@click.group('pkg')
+def cli():
+    pass
+
+
+@cli.command('pkg-showversion', help='Show the package version.')
+@click.pass_context
+def version(ctx):
+    print(get_version(ctx.obj['package_path']))
+
+
+@cli.command('pkg-showchanges', help='Show the package changelog.')
+@click.pass_context
+def log(ctx):
+    os.system('less {}'.format(os.path.join(ctx.obj['base_path'], 'CHANGES.md')))
+
+
+@cli.command('pkg-showinfo', help='Show information about the package.')
+@click.pass_context
+def info(ctx):
+    version = get_version(ctx.obj['package_path'])
+    repo = get_git_repository_url(ctx.obj['base_path'])
+    branch = get_git_branch(ctx.obj['base_path'])
+    commit = get_git_commit(ctx.obj['base_path'])
+    tag = get_git_tag(ctx.obj['base_path'])
+    tag_commit = get_git_commit(ctx.obj['base_path'], tag=tag)
+    # tags = utils.get_git_tags(ctx.obj['base_path'])
+    tagged = 'yes' if (tag[1:] == version) else 'no'
+    clean = is_git_clean(ctx.obj['base_path'])
+    status = 'clean' if clean else 'dirty'
+    updated = ('' if (commit == tag_commit and clean) else
+               '(dev)' if (not tag[1:] == version) else
+               '(should be bumped)')
+    pip = 'git+ssh://{repo}@{tag}#egg={pkg}'.format(
+        repo=repo[:-4].replace(':', '/'), tag=tag,
+        pkg=ctx.obj['package_name'])
+
+    print('')
+    print('package: {name}'.format(name=ctx.obj['package_name']))
+    print('type:    {type_}'.format(type_=(ctx.obj['type'] or 'unknown')))
+    print('version: {version} {updated}'.format(version=version,
+                                                updated=updated))
+    print('')
+    print('branch:  {branch}'.format(branch=branch))
+    print('status:  {status}'.format(status=status))
+    print('commit:  {commit}'.format(commit=commit))
+    print('repo:    {repo}'.format(repo=repo))
+    print('')
+    print('tagged:  {tagged}'.format(tagged=tagged))
+    print('current: {tag} ({tag_commit})'.format(tag=tag,
+                                                 tag_commit=tag_commit))
+    print('pip url: {pip}'.format(pip=pip))
+    print('')
+
+
+@cli.command('pkg-updatedeps', help='Show information about the package.')
+@click.pass_context
+def deps(ctx):
+    repos = get_repos_from_requirements(ctx.obj['base_path'])
+    required_versions = get_tag_from_repo_url(repos)
+    latest_versions = get_latest_tags_from_repos(repos)
+    installed_pkgs = pip.get_installed_distributions()
+    click.echo('')
+    for repo in repos:
+        status = 'outdated'
+        required = required_versions[repo]
+        latest = latest_versions[repo]
+        try:
+            repo_small = repo.split('@')[1]
+            pkg_name = repo.split('egg=')[1]
+        except IndexError:
+            continue
+        pkg_name_normalized = pkg_name.lower().replace('_', '-')
+        installed = 'unknown'
+        installed_list = [
+            pkg.version
+            for pkg in installed_pkgs
+            if pkg.key in [pkg_name_normalized, pkg_name_normalized + '-lib']
+        ]
+        if installed_list:
+            installed = 'v{}'.format(installed_list[0])
+
+        if latest is None or installed is None:
+            continue
+
+        if LooseVersion(installed) > LooseVersion(latest):
+            status = 'develop'
+        elif LooseVersion(installed) < LooseVersion(required):
+            status = 'up-to-date (old version installed)'
+        elif required == latest:
+            status = 'up-to-date'
+        msg = '{pkg_name}: {status} (required: {required}, installed: {installed}, latest: {latest})'.format(
+            repo=repo_small, pkg_name=pkg_name_normalized, status=status, required=required, installed=installed, latest=latest)
+        if status == 'up-to-date' or (status == 'develop' and installed == required):
+            color = 'green'
+        elif status in ('develop', 'up-to-date (old version installed)') or installed == latest:
+            color = 'yellow'
+        else:
+            color = 'red'
+        click.echo(click.style(msg, fg=color))
+
+
+@cli.command('pkg-bumpversion', help='Bump the package version.')
+@click.argument('part', default='patch')
+@click.option('--allow-dirty', is_flag=True,
+              help='Allow dirty')
+@click.option('--force', '-f', is_flag=True,
+              help='Alias for --allow-dirty')
+@click.option('--yes', '-y', is_flag=True,
+              help='Answer yes to all prompts')
+@click.pass_context
+def bumpversion(ctx, part, allow_dirty, force, yes):
+    args = [part]
+    allow_dirty = allow_dirty or force
+
+    is_clean = is_git_clean(ctx.obj['base_path'])
+    if not is_clean and not allow_dirty:
+        print('')
+        print('ERROR: Git working directory is not clean.')
+        print('')
+        print('You can use --allow-dirty or --force if you know what '
+              'you\'re doing.')
+        exitcode = 1
+    else:
+        if allow_dirty:
+            args.append('--allow-dirty')
+        command = ['bumpversion'] + args
+
+        old_version = get_version(ctx.obj['package_path'])
+        exitcode = subprocess.call(command, cwd=ctx.obj['base_path'])
+        new_version = get_version(ctx.obj['package_path'])
+
+        if exitcode == 0:
+            print('Bump version from {old} to {new}'.format(
+                old=old_version, new=new_version))
+        if yes or click.confirm('Do you want to edit CHANGES.md?'):
+            click.edit(filename=os.path.join(ctx.obj['base_path'], 'CHANGES.md'))
+    sys.exit(exitcode)
+
+
+@cli.command('pkg-createtag', help='Create git tag using the package version.')
+@click.pass_context
+def tag(ctx):
+    tag = 'v{}'.format(get_version(ctx.obj['package_path']))
+    print('Creating git tag {}'.format(tag))
+    command = ['git', 'tag', '-m', '"version {}"'.format(tag), tag]
+    sys.exit(subprocess.call(command))
+
+
+@cli.command('pkg-updatedeps', help='Update requirements.txt.')
+@click.option('--install', '-i', is_flag=True)
+@click.option('--install-all', '-a', is_flag=True)
+@click.pass_context
+def update(ctx, install, install_all):
+    base_path = ctx.obj['base_path']
+    repos = get_repos_from_requirements(base_path)
+    required_versions = get_tag_from_repo_url(repos)
+    latest_versions = get_latest_tags_from_repos(repos)
+    installed_pkgs = pip.get_installed_distributions()
+    install_list = ['-e .']
+    click.echo('')
+    for repo in repos:
+        latest = latest_versions[repo]
+        required = required_versions[repo]
+        try:
+            pkg_name = repo.split('egg=')[1]
+        except IndexError:
+            continue
+        pkg_name_normalized = pkg_name.lower().replace('_', '-')
+        installed = 'unknown'
+        installed_list = [
+            pkg.version
+            for pkg in installed_pkgs
+            if pkg.key in [pkg_name_normalized, pkg_name_normalized + '-lib']
+        ]
+        if installed_list:
+            installed = 'v{}'.format(installed_list[0])
+
+        if LooseVersion(required) < LooseVersion(latest):
+            click.echo('Updating {} from {} to {}...'.format(pkg_name, required, latest))
+            new_repo = update_repo_tag(repo, latest, path=base_path)
+            if LooseVersion(installed) < LooseVersion(latest):
+                install_list.append(new_repo)
+        elif LooseVersion(installed) < LooseVersion(required):
+            install_list.append(repo)
+    if install_all:
+        install = True
+        install_list = ['-r requirements.txt']
+    if install:
+        for new_repo in install_list:
+            new_repo = new_repo.strip()
+            click.echo('')
+            click.echo('Running `pip install -U {}` ...'.format(new_repo))
+            command = ['pip', 'install', '-U'] + new_repo.split(' ')
+            exitcode = subprocess.call(command, cwd=base_path)
+            if exitcode == 0:
+                click.echo('Done.')
+            else:
+                click.echo('Failed.')
+                sys.exit(exitcode)
+
+
+def copy(src, dest, ignore=('.git', '.pyc', '__pycache__')):
+    try:
+        shutil.copytree(src, dest, ignore=shutil.ignore_patterns(*ignore))
+    except OSError as e:
+        if e.errno == errno.ENOTDIR:
+            shutil.copy(src, dest)
+        else:
+            print('Directory not copied. Error: %s' % e)
+
+
+def get_version(path):
+    """Return the project version from VERSION file."""
+
+    with open(os.path.join(path, 'VERSION'), 'rb') as f:
+        version = f.read().decode('ascii').strip()
+    return version.strip()
+
+
+def get_repos_from_requirements(path):
+    if path is None:
+        path = os.path.curdir
+    with open(os.path.join(path, 'requirements.txt'), 'r') as fp:
+        repos = [line.strip() for line in fp if 'git@' in line and not line.strip().startswith('#')]
+    return repos
+
+
+def get_tag_from_repo_url(repos):
+    tags = {}
+    for repo in repos:
+        if '@' in repo:
+            repo_parsed = urlparse(repo)
+            repo_path = repo_parsed.path
+            tags[repo] = repo_path.split('@')[1]
+        else:
+            tags[repo] = None
+    return tags
+
+
+def _clone(repo):
+    return repo, git_clone(repo, checkout=False, depth=1)
+
+
+def get_latest_tags_from_repos(repos):
+    tags = {}
+    if not repos:
+        return tags
+
+    pool = multiprocessing.Pool(len(repos))
+
+    repos_ = pool.map(_clone, repos)
+    for repo, path in repos_:
+        if path:
+            tag = get_git_tag(path)
+        else:
+            tag = None
+        tags[repo] = tag
+    return tags
+
+
+def update_repo_tag(repo, tag, path=None):
+    if path is None:
+        path = os.path.curdir
+    ret = ''
+    content = ''
+    with open(os.path.join(path, 'requirements.txt'), 'r') as fp:
+        for line in fp:
+            if repo in line:
+                line = re.sub(r'@v[0-9]+\.[0-9]+\.[0-9]+', '@{}'.format(tag), line)
+                ret += line
+            content += line
+
+    with open(os.path.join(path, 'requirements.txt'), 'w') as fp:
+        fp.write(content)
+
+    return ret
+
+
+repo_re = re.compile(r':(\w+)\/(.*)\.git')
+
+
+def git_clone(repo, dest=None, checkout=True, depth=None, branch=None, single_branch=False):
+    if '#egg' in repo:
+        repo_parsed = urlparse(repo)
+        repo_path = repo_parsed.path
+        if '@' in repo_path:
+            repo_path = repo_path.split('@')[0]
+        repo_path = repo_path.strip('/')
+        repo_team, repo_name = tuple(repo_path.split('/'))
+        repo = repo_parsed.netloc + ':' + repo_path
+    else:
+        repo_info = re.search(repo_re, repo)
+        if not repo_info:
+            return None
+        repo_team = repo_info.group(1)
+        repo_name = repo_info.group(2)
+    if dest is None:
+        path = os.path.join(os.path.expanduser('~'), '.marvin-python-toolbox', 'repos')
+        dest = os.path.join(path, repo_team, repo_name)
+
+    opts = ''
+    if not checkout:
+        opts += ' -n'
+    if depth:
+        opts += ' --depth ' + str(depth)
+    if branch:
+        opts += ' --branch ' + branch
+    if single_branch:
+        opts += ' --single-branch'
+
+    if not os.path.exists(dest):
+        os.makedirs(dest)
+        command = 'git clone {opts} {repo} {dest}'.format(
+            opts=opts, repo=repo, dest=dest)
+        print(command)
+        subprocess.Popen(command.split(), stdout=subprocess.PIPE).wait()
+
+    opts = ''
+    if depth:
+        opts += ' --depth ' + str(depth)
+    print('Fetching latest version from {} repository'.format(repo_name))
+    try:
+        subprocess.Popen(('git fetch --tags ' + opts).split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, cwd=dest).wait()
+    except OSError:
+        print('Could not fetch tags from {}'.format(repo_name))
+        dest = None
+
+    return dest
+
+
+def get_git_branch(path=None):
+    if path is None:
+        path = os.path.curdir
+    command = 'git rev-parse --abbrev-ref HEAD'.split()
+    branch = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
+    return branch.strip().decode('utf-8')
+
+
+def get_git_tag(path=None):
+    if path is None:
+        path = os.path.curdir
+    command = 'git rev-list --tags --max-count=1'.split()
+    commit = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read().decode('utf-8')
+    command = 'git describe --tags {}'.format(commit).split()
+    tag = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read().decode('utf-8')
+    return tag.strip()
+
+
+def get_git_commit(path=None, tag=None):
+    if path is None:
+        path = os.path.curdir
+    if tag:
+        command = 'git rev-list -n 1 {tag}'.format(tag=tag).split()
+    else:
+        command = 'git rev-parse HEAD'.split()
+    commit = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
+    return commit.strip().decode('utf-8')
+
+
+def get_git_repository_url(path=None):
+    if path is None:
+        path = os.path.curdir
+    command = 'git config --get remote.origin.url'.split()
+    url = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
+    return url.strip().decode('utf-8')
+
+
+def get_git_tags(path=None):
+    if path is None:
+        path = os.path.curdir
+    command = 'git tag'.split()
+    tags = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
+    return sorted(tags.strip().split('\n'), reverse=True)
+
+
+def is_git_clean(path=None):
+    if path is None:
+        path = os.path.curdir
+    command = 'git diff --quiet HEAD'.split()
+    exit_code = subprocess.Popen(command, stdout=subprocess.PIPE, cwd=path).stdout.read()
+    return exit_code
diff --git a/marvin_python_toolbox/management/templates/python-engine/.bumpversion.cfg b/marvin_python_toolbox/management/templates/python-engine/.bumpversion.cfg
new file mode 100644
index 0000000..e54a5c8
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/.bumpversion.cfg
@@ -0,0 +1,5 @@
+[bumpversion]
+current_version = 0.0.1
+
+[bumpversion:file:{{project.package}}/VERSION]
+[bumpversion:file:README.md]
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/.coveragerc b/marvin_python_toolbox/management/templates/python-engine/.coveragerc
new file mode 100644
index 0000000..6ebe78e
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/.coveragerc
@@ -0,0 +1,22 @@
+[run]
+omit = tests/*
+branch = True
+
+[report]
+exclude_lines =
+    pragma: no cover
+
+    def __repr__
+    if self\.debug
+
+    raise AssertionError
+    raise NotImplementedError
+
+    if 0:
+    if __name__ == .__main__.:
+
+[html]
+directory = coverage_report
+
+[xml]
+output = coverage_report.xml
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/.gitignore b/marvin_python_toolbox/management/templates/python-engine/.gitignore
new file mode 100644
index 0000000..05dd2a2
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/.gitignore
@@ -0,0 +1,16 @@
+.cache
+.eggs
+.tox
+.testmondata
+.coverage
+.coverage.*
+coverage_report.xml
+coverage_report
+*.egg
+*.egg-info
+*.pyc
+tests/__pycache__
+.DS_Store
+.packages
+.profiling
+notebooks/data
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/CHANGES.md b/marvin_python_toolbox/management/templates/python-engine/CHANGES.md
new file mode 100644
index 0000000..4835f6f
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/CHANGES.md
@@ -0,0 +1,5 @@
+## Changes log
+
+### 0.0.1
+
+ - initial version
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/Dockerfile b/marvin_python_toolbox/management/templates/python-engine/Dockerfile
new file mode 100644
index 0000000..08deb81
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/Dockerfile
@@ -0,0 +1,122 @@
+FROM ubuntu:16.04
+
+MAINTAINER {{mantainer.email}}
+
+ENV SLEEP_MILLIS 0
+
+USER root
+
+##############################################################
+# Define all environment variables to be used 
+##############################################################
+
+ENV MARVIN_HOME=/opt/marvin
+ENV MARVIN_DATA_PATH=/marvin-data
+ENV MARVIN_ENGINE_HOME=$MARVIN_HOME/engine
+ENV MARVIN_ENGINE_ENV=marvin-engine-env
+ENV WORKON_HOME=$MARVIN_HOME/.virtualenvs
+ENV SPARK_HOME=/opt/spark
+ENV SPARK_CONF_DIR=$SPARK_HOME/conf
+ENV HADOOP_CONF_DIR=$SPARK_CONF_DIR
+ENV YARN_CONF_DIR=$SPARK_CONF_DIR
+
+
+##############################################################
+# Create all folders needed 
+##############################################################
+
+RUN mkdir -p $MARVIN_HOME && \
+    mkdir -p $MARVIN_DATA_PATH && \
+    mkdir -p $MARVIN_ENGINE_HOME && \
+    mkdir -p /var/log/marvin/engines && \
+    mkdir -p /var/run/marvin/engines
+
+
+##############################################################
+# Install the system dependencies for default installation 
+##############################################################
+
+RUN apt-get update -y && \
+    apt-get install -y build-essential && \
+    apt-get install -y maven git python cmake software-properties-common curl libstdc++6 && \
+    apt-get install -y git && \
+    apt-get install -y wget && \
+    apt-get install -y python2.7-dev && \
+    apt-get install -y python-pip && \
+    apt-get install -y ipython && \
+    apt-get install -y libffi-dev && \
+    apt-get install -y libssl-dev && \
+    apt-get install -y libxml2-dev && \
+    apt-get install -y libxslt1-dev && \
+    apt-get install -y libpng12-dev && \
+    apt-get install -y libfreetype6-dev && \
+    apt-get install -y python-tk && \
+    apt-get install -y libsasl2-dev && \
+    apt-get install -y python-pip && \
+    apt-get install -y graphviz && \
+    pip install --upgrade pip && \
+    apt-get clean
+
+RUN pip install virtualenvwrapper
+
+# Install Oracle JDK
+RUN add-apt-repository ppa:webupd8team/java -y && \
+    apt-get -qq update && \
+    echo debconf shared/accepted-oracle-license-v1-1 select true | debconf-set-selections && \
+    echo debconf shared/accepted-oracle-license-v1-1 seen true | debconf-set-selections && \
+    apt-get install -y oracle-java8-installer    
+
+
+##############################################################
+# Install Apache Spark
+#
+# Uncomment if you are using spark, note that is needed the 
+# spark configuration files to the think works correctly.
+##############################################################
+
+# RUN curl https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz -o /tmp/spark-2.1.1-bin-hadoop2.6.tgz && \
+#    tar -xf /tmp/spark-2.1.1-bin-hadoop2.6.tgz -C /opt/ && \
+#    ln -s /opt/spark-2.1.1-bin-hadoop2.6 /opt/spark
+
+# Add the b2w datalake config for Spark
+# ADD spark-conf.tar $SPARK_CONF_DIR
+RUN mkdir -p $SPARK_CONF_DIR
+
+##############################################################
+# Create the virtualenv configuration
+##############################################################
+
+RUN /bin/bash -c "cd $MARVIN_ENGINE_HOME && \
+    source /usr/local/bin/virtualenvwrapper.sh && \
+    mkvirtualenv $MARVIN_ENGINE_ENV"
+
+
+##############################################################
+#        <CUSTOM ENGINE INSTALLATION PROCEDURE HERE>         #
+##############################################################
+
+
+##############################################################
+# Copy and Install the marvin engine inside virtualenv
+##############################################################
+
+ADD build/engine.tar $MARVIN_ENGINE_HOME
+
+ADD build/marvin-engine-executor-assembly.jar $MARVIN_DATA_PATH 
+
+RUN /bin/bash -c "source /usr/local/bin/virtualenvwrapper.sh && \
+    workon $MARVIN_ENGINE_ENV && \
+    cd $MARVIN_ENGINE_HOME && \
+    pip install . --process-dependency-links"
+
+
+##############################################################
+# Starts the engine http server
+##############################################################
+
+EXPOSE 8000
+
+CMD /bin/bash -c "source /usr/local/bin/virtualenvwrapper.sh && \
+    workon $MARVIN_ENGINE_ENV && \
+    cd $MARVIN_ENGINE_HOME && \
+    marvin engine-httpserver -h 0.0.0.0 -p 8000"
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/INSTALL b/marvin_python_toolbox/management/templates/python-engine/INSTALL
new file mode 100644
index 0000000..fccdaf8
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/INSTALL
@@ -0,0 +1 @@
+REPLACE: Add here the detailed instructions to install this project
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/LICENSE b/marvin_python_toolbox/management/templates/python-engine/LICENSE
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/LICENSE
diff --git a/marvin_python_toolbox/management/templates/python-engine/MANIFEST.in b/marvin_python_toolbox/management/templates/python-engine/MANIFEST.in
new file mode 100644
index 0000000..4f5d4a8
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/MANIFEST.in
@@ -0,0 +1,9 @@
+include CHANGES.md
+include INSTALL
+include LICENSE
+include MANIFEST.in
+include README.md
+include {{project.package}}/VERSION
+recursive-include notebooks *
+prune notebooks/build
+recursive-include tests *
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/Makefile b/marvin_python_toolbox/management/templates/python-engine/Makefile
new file mode 100644
index 0000000..ba3aac7
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/Makefile
@@ -0,0 +1,76 @@
+.PHONY: help marvin marvin-prod update clean-pyc clean-build clean-reports clean-deps clean docker-build docker-push docker-run
+
+DOCKER_VERSION?=0.00.01
+DOCKER_REGISTRY_ADRESS?=docker.registry.io
+MARVIN_DATA_PATH?=$(HOME)/marvin/data
+MARVIN_ENGINE_NAME?={{project.name}}
+MARVIN_TOOLBOX_VERSION?={{project.toolbox_version}}
+
+help:
+	@echo "    marvin"
+	@echo "        Prepare project to be used as a marvin package."
+	@echo "    marvin-prod"
+	@echo "        Prepare project to be used in production environment."
+	@echo "    update"
+	@echo "        Reinstall requirements and setup.py dependencies."
+	@echo "    clean"
+	@echo "        Remove all generated artifacts."
+	@echo "    clean-pyc"
+	@echo "        Remove python artifacts."
+	@echo "    clean-build"
+	@echo "        Remove build artifacts."
+	@echo "    clean-reports"
+	@echo "        Remove coverage reports."
+	@echo "    clean-deps"
+	@echo "        Remove marvin setup.py dependencies."
+	@echo "    docker-build"
+	@echo "        Runs the docker build command with marvin env default parameters."
+	@echo "    docker-push"
+	@echo "        Runs the docker push command with marvin env default parameters."
+	@echo "    docker-run"
+	@echo "        Runs the docker run command with marvin env default parameters."
+
+marvin:
+	pip install -e ".[testing]" --process-dependency-links
+	marvin --help
+
+marvin-prod:
+	pip install . --process-dependency-links
+	marvin --help
+
+update:
+	pip install -e . -U --process-dependency-links
+
+clean-pyc:
+	find . -name '*.pyc' -exec rm -f {} +
+	find . -name '*.pyo' -exec rm -f {} +
+	find . -name '*~' -exec rm -f  {} +
+
+clean-build:
+	rm -rf *.egg-info
+	rm -rf .cache
+	rm -rf .eggs
+	rm -rf dist
+	rm -rf build
+
+clean-reports:
+	rm -rf coverage_report/
+	rm -f coverage.xml
+	rm -f .coverage
+
+clean-deps:
+	pip freeze | grep -v "^-e" | xargs pip uninstall -y
+
+clean: clean-build clean-pyc clean-reports clean-deps
+
+docker-build: clean-build
+	mkdir -p build
+	tar -cf build/engine.tar --exclude=*.log --exclude=*.pkl --exclude='build' --exclude='notebooks' --exclude=*.tar *
+	cp -f $(MARVIN_DATA_PATH)/marvin-engine-executor-assembly-$(MARVIN_TOOLBOX_VERSION).jar build/marvin-engine-executor-assembly.jar
+	sudo docker build -t $(DOCKER_REGISTRY_ADRESS)/$(MARVIN_ENGINE_NAME):$(DOCKER_VERSION) .
+
+docker-run:
+	sudo docker run --name=marvin-$(MARVIN_ENGINE_NAME)-$(DOCKER_VERSION) --mount type=bind,source=$(MARVIN_DATA_PATH),destination=/marvin-data -p 8000:8000 $(DOCKER_REGISTRY_ADRESS)/$(MARVIN_ENGINE_NAME):$(DOCKER_VERSION)
+
+docker-push:
+	sudo docker push $(DOCKER_REGISTRY_ADRESS)/$(MARVIN_ENGINE_NAME):$(DOCKER_VERSION)
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/README.md b/marvin_python_toolbox/management/templates/python-engine/README.md
new file mode 100644
index 0000000..77f7630
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/README.md
@@ -0,0 +1,143 @@
+# {{project.name}} v0.0.1
+
+## Overview
+
+{{project.description}}
+
+
+## Requirements
+
+_REPLACE: Add here the list of requirements. For example:_
+
+ - Python 2.7
+ - Numpy 1.11.0 or higher
+
+
+## Installation
+
+Use the Marvin toolbox to provision, deploy and start the remote HTTP server.
+
+First, edit the `marvin.ini` file, setting the options within the
+`ssh_deployment` section:
+
+1. `host`: the host IP address or name where the engine should be deployed. You
+can enable multi-host deployment using `,` to separate hosts
+2. `port`: the SSH connection port
+3. `user`: the SSH connection username. Currently, only a single user is
+supported. This user should be capable of *passwordless sudo*, although it can
+use password for the SSH connection
+
+Next, ensure that the remotes servers are provisioned (all required software
+are installed):
+
+    marvin engine-deploy --provision
+
+Next, package your engine:
+
+    marvin engine-deploy --package
+
+This will create a compressed archive containing your engine code under the
+`.packages` directory.
+
+Next, deploy your engine to remotes servers:
+
+    marvin engine-deploy
+
+By default, a dependency clean will be executed at each deploy. You can skip it
+using:
+
+    marvin engine-deploy --skip-clean
+
+Next, you can start the HTTP server in the remotes servers:
+
+    marvin engine-httpserver-remote start
+
+You can check if the HTTP server is running:
+
+    marvin engine-httpserver-remote status
+
+And stop it:
+
+    marvin engine-httpserver-remote stop
+
+After starting, you can test it by making a HTTP request to any endpoint, like:
+
+    curl -v http://example.com/predictor/health
+
+Under the hood, this engine uses Fabric to define provisioning and deployment
+process. Check the `fabfile.py` for more information. You can add new tasks or
+edit existing ones to match your provisioning and deployment pipeline.
+
+## Development
+
+### Getting started
+
+First, create a new virtualenv
+
+```
+mkvirtualenv {{project.package}}_env
+```
+
+Now install the development dependencies
+
+```
+make marvin
+```
+
+You are now ready to code.
+
+
+### Adding new dependencies
+
+It\`s very important. All development dependencies should be added to `setup.py`.
+
+### Running tests
+
+This project uses *[py.test](http://pytest.org/)* as test runner and *[Tox](https://tox.readthedocs.io)* to manage virtualenvs.
+
+To run all tests use the following command
+
+```
+marvin test
+```
+
+To run specific test
+
+```
+marvin test tests/test_file.py::TestClass::test_method
+```
+
+
+### Writting documentation
+
+The project documentation is written using *[Jupyter](http://jupyter.readthedocs.io/)* notebooks. 
+You can start the notebook server from the command line by running the following command
+
+```
+marvin notebook
+```
+
+Use notebooks to demonstrate how to use the lib features. It can also be useful to show some use cases.
+
+
+### Bumping version
+
+```
+marvin pkg-bumpversion [patch|minor|major]
+git add . && git commit -m "Bump version"
+```
+
+
+### Tagging version
+
+```
+marvin pkg-createtag
+git push origin master --follow-tags
+```
+
+
+### Logging
+
+The default log level is set to _WARNING_. You can change the log level at runtime setting another value to one of the following environment variable: `{{project.package|upper}}_LOG_LEVEL` or `LOG_LEVEL`. The available values are _CRITICAL_, _ERROR_, _WARNING_, _INFO_ and _DEBUG_.
+
+Be careful using `LOG_LEVEL`, it may affect another lib.
diff --git a/marvin_python_toolbox/management/templates/python-engine/docs.yaml b/marvin_python_toolbox/management/templates/python-engine/docs.yaml
new file mode 100644
index 0000000..6983f8d
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/docs.yaml
@@ -0,0 +1,567 @@
+openapi: "3.0.0"
+info:
+  version: 0.0.1
+  title: {{project.package}} API Doc
+  contact:
+      name: mantainer {{mantainer.name}}
+      email: {{mantainer.email}}
+      url: https://github.com/marvin-ai
+  license:
+    name: Apache License 2.0
+servers:
+  - url: http://localhost:8000
+  - url: http://0.0.0.0:8000
+tags:
+  - name: Docker
+    description: For Docker users, please use "make docker-build" and "make docker-run" commands in your engine virtualenv to start the server
+  - name: Acquisitor
+    description: Setup the initial_dataset with all cleaned data necessary to build your dataset in the next action
+  - name: Tpreparator
+    description: Setup the dataset with the transformed data that is compatible with the algorithm used to build the model in the next action
+  - name: Trainer
+    description: Setup the model with the result of the algorithm used to training
+  - name: Evaluator
+    description: Setup the metrics with the result of the algorithms used to test the model
+  - name: Predictor
+    description: Return the predicted value in a json parsable object format
+  - name: Feedback
+    description: Receive feedback message, user can manipulate this message for any use
+  - name: Pipeline
+    description: Perform all batch actions in your right order
+paths:
+  /acquisitor/health:
+    get:
+      summary: Get acquisitor's service health
+      operationId: getAcquisitorHealth
+      tags:
+        - Acquisitor
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /acquisitor/status:
+    get:
+      summary: Get acquisitor's service status
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      operationId: getAcquisitorStatus
+      tags:
+        - Acquisitor
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /acquisitor:
+    post:
+      summary: Run acquisitor
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: acquisitor
+      tags:
+        - Acquisitor
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /tpreparator/health:
+    get:
+      summary: Get trainer preparator's service health
+      operationId: getTPreparatortorHealth
+      tags:
+        - Tpreparator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /tpreparator/status:
+    get:
+      summary: Get trainer preparator's service status
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      operationId: getTPreparatorStatus
+      tags:
+        - Tpreparator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /tpreparator/reload?protocol=:
+    put:
+      summary: Reload artifact for trainer preparator
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: reloadTPreparator
+      tags:
+        - Tpreparator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /tpreparator:
+    post:
+      summary: Run trainer preparator
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: tpreparator
+      tags:
+        - Tpreparator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /trainer/health:
+    get:
+      summary: Get trainer's service health
+      operationId: getTrainerHealth
+      tags:
+        - Trainer
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /trainer/status:
+    get:
+      summary: Get trainer's service status
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      operationId: getTrainerStatus
+      tags:
+        - Trainer
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /trainer/reload?protocol=:
+    put:
+      summary: Reload artifact for trainer
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: reloadTrainer
+      tags:
+        - Trainer
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /trainer:
+    post:
+      summary: Run trainer
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: trainer
+      tags:
+        - Trainer
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /evaluator/health:
+    get:
+      summary: Get evaluator's service health
+      operationId: getEvaluatorHealth
+      tags:
+        - Evaluator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /evaluator/status:
+    get:
+      summary: Get evaluator's service status
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      operationId: getEvaluatorStatus
+      tags:
+        - Evaluator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /evaluator/metrics?protocol=:
+    get:
+      summary: Get metrics's value
+      parameters: 
+        - in: query
+          name: protocol
+          schema: 
+            type: string
+          required: true
+          description: Metrics protocol value
+      operationId: getMetrics
+      tags:
+        - Evaluator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /evaluator/reload?protocol=:
+    put:
+      summary: Reload artifact for evaluator
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      operationId: reloadEvaluator
+      tags:
+        - Evaluator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /evaluator:
+    post:
+      summary: Run evaluator
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: evaluator
+      tags:
+        - Evaluator
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /predictor/health:
+    get:
+      summary: Get predictor's service health
+      operationId: getPredictorHealth
+      tags:
+        - Predictor
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /predictor/status:
+    get:
+      summary: Get predictor's service status
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      operationId: getPredictorStatus
+      tags:
+        - Predictor
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /predictor/reload?protocol=:
+    put:
+      summary: Reload artifact for predictor
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: reloadPredictor
+      tags:
+        - Predictor
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /predictor:
+    post:
+      summary: Run predictor
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: predictor
+      tags:
+        - Predictor
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /feedback/health:
+    get:
+      summary: Get feedback's service health
+      operationId: getFeedbackHealth
+      tags:
+        - Feedback
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /feedback/status:
+    get:
+      summary: Get feedback's service status
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      operationId: getFeedbackStatus
+      tags:
+        - Feedback
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /feedback/reload?protocol=:
+    put:
+      summary: Reload artifact for feedback
+      parameters: 
+        - in: query
+          name: protocol
+          schema:
+            type: string
+          required: true
+          description: The Protocol value generated from last action
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: reloadFeedback
+      tags:
+        - Feedback
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /feedback:
+    post:
+      summary: Run feedback
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: feedback
+      tags:
+        - Feedback
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
+  /pipeline:
+    post:
+      summary: Do all batch actions (from Acquisitor to Evaluator)
+      requestBody:
+        description: The default value for body is an empty json object
+        required: true
+        content:
+          application/json:
+            schema:
+              type: object
+      operationId: pipeline
+      tags:
+        - Pipeline
+      responses:
+        '200':
+          description: Result Message / Success
+        '400':
+          description: Bad Request / Illegal Argument / Missing Parameters
+        '500':
+          description: Internal Server Error / Timeout
+        '503':
+          description: Service Unavailable
diff --git a/marvin_python_toolbox/management/templates/python-engine/engine.messages b/marvin_python_toolbox/management/templates/python-engine/engine.messages
new file mode 100644
index 0000000..5ff7ec4
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/engine.messages
@@ -0,0 +1,3 @@
+[{
+	"msg1": "Hello from marvin engine!"
+}]
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/engine.metadata b/marvin_python_toolbox/management/templates/python-engine/engine.metadata
new file mode 100644
index 0000000..88ecc2f
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/engine.metadata
@@ -0,0 +1,62 @@
+{
+	"name": "{{project.name.lower()}}",
+	"version": "v0.0.1",
+	"engineType": "python",
+	"artifactsRemotePath": "/tmp/marvin",
+	"artifactManagerType": "FS",
+	"onlineActionTimeout": 1000,
+	"metricsTimeout": 10000,
+    "healthCheckTimeout": 2000,
+	"reloadTimeout": 600000,
+	"batchActionTimeout": 600000,
+	"pipelineActions": ["acquisitor", "tpreparator", "trainer", "evaluator"],
+	"actions": [{
+		"name": "acquisitor",
+		"actionType": "batch",
+		"port": 50051,
+		"host": "localhost",
+		"artifactsToPersist": ["initialdataset"],
+		"artifactsToLoad": [],
+		"pipeline": []
+	}, {
+		"name": "tpreparator",
+		"actionType": "batch",
+		"port": 50052,
+		"host": "localhost",
+		"artifactsToPersist": ["dataset"],
+		"artifactsToLoad": ["initialdataset"],
+		"pipeline": []
+	}, {
+		"name": "trainer",
+		"actionType": "batch",
+		"port": 50053,
+		"host": "localhost",
+		"artifactsToPersist": ["model"],
+		"artifactsToLoad": ["dataset"],
+		"pipeline": []
+	}, {
+		"name": "evaluator",
+		"actionType": "batch",
+		"port": 50054,
+		"host": "localhost",
+		"artifactsToPersist": ["metrics"],
+		"artifactsToLoad": ["dataset", "model"],
+		"pipeline": []
+	}, {
+		"name": "predictor",
+		"actionType": "online",
+		"port": 50055,
+		"host": "localhost",
+		"artifactsToPersist": [],
+		"artifactsToLoad": ["model", "metrics"],
+		"pipeline": ["ppreparator"]
+	}, {
+		"name": "feedback",
+		"actionType": "online",
+		"port": 50056,
+		"host": "localhost",
+		"artifactsToPersist": [],
+		"artifactsToLoad": [],
+		"pipeline": []
+	}]
+}
diff --git a/marvin_python_toolbox/management/templates/python-engine/engine.params b/marvin_python_toolbox/management/templates/python-engine/engine.params
new file mode 100644
index 0000000..410f586
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/engine.params
@@ -0,0 +1,3 @@
+{
+	"PARAM_1" : "VALUE_OF_PARAM_1"
+}
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/fabfile.py b/marvin_python_toolbox/management/templates/python-engine/fabfile.py
new file mode 100644
index 0000000..1da9c50
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/fabfile.py
@@ -0,0 +1,188 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+
+from fabric.api import env
+from fabric.api import run
+from fabric.api import execute
+from fabric.api import cd
+from fabric.api import local
+from fabric.api import put
+from fabric.api import sudo
+from fabric.state import output
+from marvin_python_toolbox import __version__ as TOOLBOX_VERSION
+from marvin_python_toolbox.common.config import Config
+
+_host = Config.get("host", section="ssh_deployment").split(",")
+_port = Config.get("port", section="ssh_deployment")
+_user = Config.get("user", section="ssh_deployment")
+
+for h in _host:
+    env.hosts.append("{user}@{host}:{port}".format(user=_user, host=h, port=_port))
+
+output["everything"] = False
+output["running"] = True
+
+env.package = "{{project.package}}"
+env.margin_engine_executor_prefix = "/opt/marvin/engine-executor"
+env.margin_engine_executor_jar = "marvin-engine-executor-assembly-{version}.jar".format(version=TOOLBOX_VERSION)
+env.marvin_engine_executor_path = env.margin_engine_executor_prefix + "/" + env.margin_engine_executor_jar
+
+
+def install_oracle_jdk():
+    sudo("add-apt-repository ppa:webupd8team/java -y")
+    sudo("apt-get -qq update")
+    run("echo debconf shared/accepted-oracle-license-v1-1 select true | sudo debconf-set-selections")
+    run("echo debconf shared/accepted-oracle-license-v1-1 seen true | sudo debconf-set-selections")
+    sudo("apt-get install -y oracle-java8-installer")
+
+
+def install_virtualenvwrapper():
+    run("pip install virtualenvwrapper")
+    run("echo 'export WORKON_HOME=${HOME}/.virtualenvs' >> ${HOME}/.profile")
+    run("echo 'source /usr/local/bin/virtualenvwrapper.sh' >> ${HOME}/.profile")
+
+
+def install_apache_spark():
+    run("curl https://d3kbcqa49mib13.cloudfront.net/spark-2.1.1-bin-hadoop2.6.tgz -o /tmp/spark-2.1.1-bin-hadoop2.6.tgz")
+    sudo("tar -xf /tmp/spark-2.1.1-bin-hadoop2.6.tgz -C /opt/")
+    sudo("ln -s /opt/spark-2.1.1-bin-hadoop2.6 /opt/spark")
+    run("echo 'export SPARK_HOME=/opt/spark' >> ${HOME}/.profile")
+
+
+def install_required_packages():
+    sudo("apt-get update -y")
+    sudo("apt-get install -y git")
+    sudo("apt-get install -y wget")
+    sudo("apt-get install -y python2.7-dev")
+    sudo("apt-get install -y python-pip")
+    sudo("apt-get install -y ipython")
+    sudo("apt-get install -y libffi-dev")
+    sudo("apt-get install -y libssl-dev")
+    sudo("apt-get install -y libxml2-dev")
+    sudo("apt-get install -y libxslt1-dev")
+    sudo("apt-get install -y libpng12-dev")
+    sudo("apt-get install -y libfreetype6-dev")
+    sudo("apt-get install -y python-tk")
+    sudo("apt-get install -y libsasl2-dev")
+    sudo("apt-get install -y python-pip")
+    sudo("apt-get install -y graphviz")
+    sudo("pip install --upgrade pip")
+
+
+def install_marvin_engine_executor():
+    sudo("mkdir -p {prefix}".format(prefix=env.margin_engine_executor_prefix))
+    with cd("{prefix}".format(prefix=env.margin_engine_executor_prefix)):
+        sudo("wget https://s3.amazonaws.com/marvin-engine-executor/{jar}".format(jar=env.margin_engine_executor_jar))
+
+
+def create_marvin_engines_prefix():
+    sudo("mkdir -p /opt/marvin/engines")
+    sudo("chown {user}:{user} /opt/marvin/engines".format(user=env.user))
+    sudo("mkdir -p /var/log/marvin/engines")
+    sudo("chown {user}:{user} /var/log/marvin/engines".format(user=env.user))
+    sudo("mkdir -p /var/run/marvin/engines")
+    sudo("chown {user}:{user} /var/run/marvin/engines".format(user=env.user))
+
+
+def configure_marvin_environment():
+    run("echo 'export MARVIN_HOME=${HOME}/marvin' >> ${HOME}/.profile")
+    run("echo 'export MARVIN_DATA_PATH=${MARVIN_HOME}/data' >> ${HOME}/.profile")
+    run("mkdir -p ${MARVIN_HOME}")
+    run("mkdir -p ${MARVIN_DATA_PATH}")
+
+
+def provision():
+    execute(install_required_packages)
+    execute(install_virtualenvwrapper)
+    execute(install_oracle_jdk)
+    execute(install_apache_spark)
+    execute(install_marvin_engine_executor)
+    execute(create_marvin_engines_prefix)
+    execute(configure_marvin_environment)
+
+
+def package(version):
+    package = env.package
+    local("mkdir -p .packages")
+    local("tar czvf .packages/{package}-{version}.tar.gz --exclude='.packages' .".format(
+          package=package, version=version))
+
+
+def deploy(version, skip_clean=False):
+    execute(engine_stop)
+    package = env.package
+    put(local_path=".packages/{package}-{version}.tar.gz".format(
+        package=package, version=version), remote_path="/tmp/")
+    run("mkdir -p /opt/marvin/engines/{package}/{version}".format(
+        package=package, version=version))
+    with cd("/opt/marvin/engines/{package}/{version}".format(
+            package=package, version=version)):
+        run("tar xzvf /tmp/{package}-{version}.tar.gz".format(
+            package=package, version=version))
+    with cd("/opt/marvin/engines/{package}".format(package=package)):
+        symlink_exists = run("stat current", quiet=True).succeeded
+        if (symlink_exists):
+            run("rm current")
+        run("ln -s {version} current".format(version=version))
+    with cd("/opt/marvin/engines/{package}/current".format(package=package)):
+        run("mkvirtualenv {package}_env".format(package=package))
+        run("setvirtualenvproject")
+        if skip_clean:
+            run("workon {package}_env && make marvin".format(
+                package=package))
+        else:
+            run("workon {package}_env && make clean && make marvin".format(
+                package=package))
+    execute(engine_start)
+
+
+def engine_start(http_host, http_port):
+    package = env.package
+
+    command = (
+        "workon {package}_env &&"
+        " (marvin engine-httpserver"
+        " -h {http_host}"
+        " -p {http_port}"
+        " -e {executor}"
+        " 1> /var/log/marvin/engines/{package}.out"
+        " 2> /var/log/marvin/engines/{package}.err"
+        " & echo $! > /var/run/marvin/engines/{package}.pid)"
+    ).format(
+        package=package,
+        http_host=http_host,
+        http_port=http_port,
+        executor=env.marvin_engine_executor_path
+    )
+
+    with cd("/opt/marvin/engines/{package}/current".format(package=package)):
+        run(command, pty=False)
+
+
+def engine_stop():
+    package = env.package
+
+    pid_file_exists = run("cat /var/run/marvin/engines/{package}.pid".format(
+        package=package), quiet=True)
+    if pid_file_exists.succeeded:
+        with cd("/opt/marvin/engines/{package}/current".format(package=package)):
+            children_pids = run("ps --ppid $(cat /var/run/marvin/engines/{package}.pid) -o pid --no-headers |xargs echo".format(
+                package=package))
+            run("kill $(cat /var/run/marvin/engines/{package}.pid) {children_pids}".format(
+                package=package, children_pids=children_pids))
+            run("rm /var/run/marvin/engines/{package}.pid".format(package=package))
+
+
+def engine_status():
+    package = env.package
+    pid_file_exists = run("cat /var/run/marvin/engines/{package}.pid".format(
+        package=package), quiet=True)
+    if pid_file_exists.succeeded:
+        is_running = run("ps $(cat /var/run/marvin/engines/{package}.pid)".format(package=package), quiet=True)
+        if is_running.succeeded:
+            print "Your engine is running :)"
+        else:
+            print "Your engine is not running :("
+    else:
+        print "Your engine is not running :("
diff --git a/marvin_python_toolbox/management/templates/python-engine/feedback.messages b/marvin_python_toolbox/management/templates/python-engine/feedback.messages
new file mode 100644
index 0000000..5ff7ec4
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/feedback.messages
@@ -0,0 +1,3 @@
+[{
+	"msg1": "Hello from marvin engine!"
+}]
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/marvin.ini b/marvin_python_toolbox/management/templates/python-engine/marvin.ini
new file mode 100644
index 0000000..0bd2f05
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/marvin.ini
@@ -0,0 +1,11 @@
+[marvin]
+package = {{project.package}}
+type = {{project.type}}
+executor_url = https://s3.amazonaws.com/marvin-engine-executor/marvin-engine-executor-assembly-{{project.toolbox_version}}.jar
+
+[ssh_deployment]
+# You can enable multi-host deployment like this
+# host = host1.com,host2.com,hostN.com
+host = host1.com
+port = 22
+user = marvin
diff --git a/marvin_python_toolbox/management/templates/python-engine/notebooks/sample.ipynb b/marvin_python_toolbox/management/templates/python-engine/notebooks/sample.ipynb
new file mode 100644
index 0000000..d4d6a30
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/notebooks/sample.ipynb
@@ -0,0 +1,50 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Documentation"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Sample"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "import {{project.package}}"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.5"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 0
+}
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/VERSION b/marvin_python_toolbox/management/templates/python-engine/project_package/VERSION
new file mode 100644
index 0000000..8acdd82
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/VERSION
@@ -0,0 +1 @@
+0.0.1
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/__init__.py b/marvin_python_toolbox/management/templates/python-engine/project_package/__init__.py
new file mode 100644
index 0000000..a45c1f1
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/__init__.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+import os.path
+
+from .data_handler import *
+from .prediction import *
+from .training import *
+
+
+# Get package version number from "VERSION" file
+with open(os.path.join(os.path.dirname(__file__), 'VERSION'), 'rb') as f:
+    __version__ = f.read().decode('ascii').strip()
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/_compatibility.py b/marvin_python_toolbox/management/templates/python-engine/project_package/_compatibility.py
new file mode 100644
index 0000000..4ce10fe
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/_compatibility.py
@@ -0,0 +1,19 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""Compatibility module.
+
+Import this module to help to write code compatible with Python 2 and 3.
+"""
+
+from __future__ import print_function
+from __future__ import division
+from __future__ import absolute_import
+from __future__ import unicode_literals
+
+import six
+
+__all__ = ['six']
+
+# Add here any code that have to differentiate between python 2 and 3.
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/_logging.py b/marvin_python_toolbox/management/templates/python-engine/project_package/_logging.py
new file mode 100644
index 0000000..2e45b2c
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/_logging.py
@@ -0,0 +1,78 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""Custom logging module.
+
+This module is responsible to manage log messages and log file.
+"""
+
+import sys
+import os
+import os.path
+import logging
+
+DEFAULT_LOG_LEVEL = logging.WARNING
+DEFAULT_LOG_DIR = '/tmp'
+
+
+class Logger(logging.getLoggerClass()):
+    """Custom logger class.
+
+    Use this class to customize the logger behavior or to intercept the
+    messages.
+    """
+    def error(self, msg, *args, **kwargs):
+        # Add here code to intercept the project error messages
+        super(Logger, self).error(msg, *args, **kwargs)
+
+    def critical(self, msg, *args, **kwargs):
+        # Add here code to intercept the project critical messages
+        super(Logger, self).critical(msg, *args, **kwargs)
+
+
+logging.setLoggerClass(Logger)
+
+
+def get_logger(name, namespace='{{project.package}}',
+               log_level=DEFAULT_LOG_LEVEL, log_dir=DEFAULT_LOG_DIR):
+    """Build a logger that outputs to a file and to the console,"""
+
+    log_level = (os.getenv('{}_LOG_LEVEL'.format(namespace.upper())) or
+                 os.getenv('LOG_LEVEL', log_level))
+    log_dir = (os.getenv('{}_LOG_DIR'.format(namespace.upper())) or
+               os.getenv('LOG_DIR', log_dir))
+
+    logger = logging.getLogger('{}.{}'.format(namespace, name))
+    logger.setLevel(log_level)
+
+    formatter = logging.Formatter(
+        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
+
+    # Create a console stream handler
+    console_handler = logging.StreamHandler()
+    console_handler.setLevel(log_level)
+    console_handler.setFormatter(formatter)
+    logger.addHandler(console_handler)
+
+    try:
+        if log_dir:
+            log_path = os.path.abspath(log_dir)
+            log_filename = '{name}.{pid}.log'.format(
+                name=namespace, pid=os.getpid())
+
+            file_path = str(os.path.join(log_path, log_filename))
+
+            if not os.path.exists(log_path):
+                os.makedirs(log_path, mode=774)
+
+            # Create a file handler
+            file_handler = logging.FileHandler(file_path)
+            file_handler.setLevel(log_level)
+            file_handler.setFormatter(formatter)
+            logger.addHandler(file_handler)
+    except OSError as e:
+        logger.error('Could not create log file {file}: {error}'.format(
+            file=file_path, error=e.strerror))
+
+    return logger
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/__init__.py b/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/__init__.py
new file mode 100644
index 0000000..6707e49
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/__init__.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+from .acquisitor_and_cleaner import AcquisitorAndCleaner
+from .training_preparator import TrainingPreparator
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/acquisitor_and_cleaner.py b/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/acquisitor_and_cleaner.py
new file mode 100644
index 0000000..b6da06a
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/acquisitor_and_cleaner.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""AcquisitorAndCleaner engine action.
+
+Use this module to add the project main code.
+"""
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from marvin_python_toolbox.engine_base import EngineBaseDataHandler
+
+__all__ = ['AcquisitorAndCleaner']
+
+
+logger = get_logger('acquisitor_and_cleaner')
+
+
+class AcquisitorAndCleaner(EngineBaseDataHandler):
+
+    def __init__(self, **kwargs):
+        super(AcquisitorAndCleaner, self).__init__(**kwargs)
+
+    def execute(self, params, **kwargs):
+        """
+        Setup the initial_dataset with all cleaned data necessary to build your dataset in the next action.
+
+        Eg.
+
+            self.marvin_initial_dataset = {...}
+        """
+        self.marvin_initial_dataset = {}
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/training_preparator.py b/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/training_preparator.py
new file mode 100644
index 0000000..283e59d
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/data_handler/training_preparator.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""TrainingPreparator engine action.
+
+Use this module to add the project main code.
+"""
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from marvin_python_toolbox.engine_base import EngineBaseDataHandler
+
+__all__ = ['TrainingPreparator']
+
+
+logger = get_logger('training_preparator')
+
+
+class TrainingPreparator(EngineBaseDataHandler):
+
+    def __init__(self, **kwargs):
+        super(TrainingPreparator, self).__init__(**kwargs)
+
+    def execute(self, params, **kwargs):
+        """
+        Setup the dataset with the transformed data that is compatible with the algorithm used to build the model in the next action.
+        Use the self.initial_dataset prepared in the last action as source of data.
+
+        Eg.
+
+            self.marvin_dataset = {...}
+        """
+        self.marvin_dataset = {}
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/__init__.py b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/__init__.py
new file mode 100644
index 0000000..aa6291d
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/__init__.py
@@ -0,0 +1,6 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+from .prediction_preparator import PredictionPreparator
+from .predictor import Predictor
+from .feedback import Feedback
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/feedback.py b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/feedback.py
new file mode 100644
index 0000000..d7a0ea6
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/feedback.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""Feedback engine action.
+
+Use this module to add the project main code.
+"""
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from marvin_python_toolbox.engine_base import EngineBasePrediction
+
+__all__ = ['Feedback']
+
+
+logger = get_logger('feedback')
+
+
+class Feedback(EngineBasePrediction):
+
+    def __init__(self, **kwargs):
+        super(Feedback, self).__init__(**kwargs)
+
+    def execute(self, input_message, params, **kwargs):
+        """
+        Receive feedback message, user can manipulate this message for any use.
+        Return "Done" to signal that the message is received and processed.
+        """
+        return {"message": "Done"}
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/prediction_preparator.py b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/prediction_preparator.py
new file mode 100644
index 0000000..f67a9b9
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/prediction_preparator.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""PredictionPreparator engine action.
+
+Use this module to add the project main code.
+"""
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from marvin_python_toolbox.engine_base import EngineBasePrediction
+
+__all__ = ['PredictionPreparator']
+
+
+logger = get_logger('prediction_preparator')
+
+
+class PredictionPreparator(EngineBasePrediction):
+
+    def __init__(self, **kwargs):
+        super(PredictionPreparator, self).__init__(**kwargs)
+
+    def execute(self, input_message, params, **kwargs):
+        """
+        Return a prepared input_message compatible to the predict algorithm used by the model.
+        Use the self.model and self.metrics objects if necessary.
+        """
+        return input_message
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/predictor.py b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/predictor.py
new file mode 100644
index 0000000..f1c6b4e
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/prediction/predictor.py
@@ -0,0 +1,30 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""Predictor engine action.
+
+Use this module to add the project main code.
+"""
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from marvin_python_toolbox.engine_base import EngineBasePrediction
+
+__all__ = ['Predictor']
+
+
+logger = get_logger('predictor')
+
+
+class Predictor(EngineBasePrediction):
+
+    def __init__(self, **kwargs):
+        super(Predictor, self).__init__(**kwargs)
+
+    def execute(self, input_message, params, **kwargs):
+        """
+        Return the predicted value in a json parsable object format.
+        Use the self.model and self.metrics objects if necessary.
+        """
+        return {"predicted_value": "mocked_return"}
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/training/__init__.py b/marvin_python_toolbox/management/templates/python-engine/project_package/training/__init__.py
new file mode 100644
index 0000000..e1723b7
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/training/__init__.py
@@ -0,0 +1,5 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+from .metrics_evaluator import MetricsEvaluator
+from .trainer import Trainer
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/training/metrics_evaluator.py b/marvin_python_toolbox/management/templates/python-engine/project_package/training/metrics_evaluator.py
new file mode 100644
index 0000000..9f4f09e
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/training/metrics_evaluator.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""MetricsEvaluator engine action.
+
+Use this module to add the project main code.
+"""
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from marvin_python_toolbox.engine_base import EngineBaseTraining
+
+__all__ = ['MetricsEvaluator']
+
+
+logger = get_logger('metrics_evaluator')
+
+
+class MetricsEvaluator(EngineBaseTraining):
+
+    def __init__(self, **kwargs):
+        super(MetricsEvaluator, self).__init__(**kwargs)
+
+    def execute(self, params, **kwargs):
+        """
+        Setup the metrics with the result of the algorithms used to test the model.
+        Use the self.dataset and self.model prepared in the last actions.
+
+        Eg.
+
+            self.marvin_metrics = {...}
+        """
+        self.marvin_metrics = {}
diff --git a/marvin_python_toolbox/management/templates/python-engine/project_package/training/trainer.py b/marvin_python_toolbox/management/templates/python-engine/project_package/training/trainer.py
new file mode 100644
index 0000000..a5dd23a
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/project_package/training/trainer.py
@@ -0,0 +1,34 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+"""Trainer engine action.
+
+Use this module to add the project main code.
+"""
+
+from .._compatibility import six
+from .._logging import get_logger
+
+from marvin_python_toolbox.engine_base import EngineBaseTraining
+
+__all__ = ['Trainer']
+
+
+logger = get_logger('trainer')
+
+
+class Trainer(EngineBaseTraining):
+
+    def __init__(self, **kwargs):
+        super(Trainer, self).__init__(**kwargs)
+
+    def execute(self, params, **kwargs):
+        """
+        Setup the model with the result of the algorithm used to training.
+        Use the self.dataset prepared in the last action as source of data.
+
+        Eg.
+
+            self.marvin_model = {...}
+        """
+        self.marvin_model = {}
diff --git a/marvin_python_toolbox/management/templates/python-engine/pytest.ini b/marvin_python_toolbox/management/templates/python-engine/pytest.ini
new file mode 100644
index 0000000..cab8644
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/pytest.ini
@@ -0,0 +1,4 @@
+[pytest]
+minversion    = 2.0
+norecursedirs = .git .tox .eggs .cache *.egg build dist tmp*
+python_files  = test*.py
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/templates/python-engine/setup.cfg b/marvin_python_toolbox/management/templates/python-engine/setup.cfg
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/setup.cfg
diff --git a/marvin_python_toolbox/management/templates/python-engine/setup.py b/marvin_python_toolbox/management/templates/python-engine/setup.py
new file mode 100644
index 0000000..b4c128e
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/setup.py
@@ -0,0 +1,175 @@
+from __future__ import print_function
+
+import os
+import shutil
+from os.path import dirname, join
+from setuptools import setup, find_packages
+from setuptools.command.test import test as TestCommand
+from setuptools.command.develop import develop as _develop
+from setuptools.command.install import install as _install
+
+
+REQUIREMENTS_TESTS = [
+    'pytest>=2.6.4',
+    'pytest-cov>=1.8.1',
+    'mock>=2.0.0',
+    'virtualenv>=15.0.1',
+    'tox>=2.2.0',
+]
+
+def _get_version():
+    """Return the project version from VERSION file."""
+
+    with open(join(dirname(__file__), '{{project.package}}/VERSION'), 'rb') as f:
+        version = f.read().decode('ascii').strip()
+    return version
+
+
+def _hooks(dir):
+    _set_autocomplete()
+    _install_notebook_extension()
+
+
+def _set_autocomplete():
+    import marvin_python_toolbox as toolbox
+    virtualenv = os.environ.get('VIRTUAL_ENV', None)
+
+    if virtualenv:
+        postactivate = os.path.join(virtualenv, 'bin', 'postactivate')
+
+        if os.path.exists(postactivate):
+            shutil.copy(
+                os.path.join(toolbox.__path__[0], 'extras', 'marvin_bash_completion'),
+                os.path.join(virtualenv, 'marvin_bash_completion')
+            )
+
+            command = 'source "{}"'.format(os.path.join(virtualenv, 'marvin_bash_completion'))
+
+            with open(postactivate, 'r+') as fp:
+                lines = fp.readlines()
+                fp.seek(0)
+                configured = False
+                for line in lines:
+                    if 'marvin_bash_completion' in line:
+                        # Replacing old autocomplete configuration
+                        fp.write(command)
+                        configured = True
+                    else:
+                        fp.write(line)
+
+                if not configured:
+                    fp.write(command)
+                    # 'Autocomplete was successfully configured'
+                fp.write('\n')
+                fp.truncate()
+
+
+def _install_notebook_extension():
+    import marvin_python_toolbox as toolbox
+
+    install_command = [
+        "jupyter",
+        "nbextension",
+        "install",
+        os.path.join(toolbox.__path__[0], 'extras', 'notebook_extensions', 'main.js'),
+        "--destination",
+        "marvin.js",
+        "--sys-prefix",
+        "--overwrite"
+    ]
+
+    os.system(' '.join(install_command))
+
+    enable_command = [
+        "jupyter",
+        "nbextension",
+        "enable",
+        "marvin",
+        "--sys-prefix"
+    ]
+
+    os.system(' '.join(enable_command))
+
+
+class develop(_develop):
+    def run(self):
+        _develop.run(self)
+        self.execute(_hooks, (self.install_lib,), msg="Running develop preparation task")
+
+
+class install(_install):
+    def run(self):
+        _install.run(self)
+        self.execute(_hooks, (self.install_lib,), msg="Running install preparation task")
+
+
+class Tox(TestCommand):
+    """Run the test cases using TOX command."""
+
+    user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
+
+    def initialize_options(self):
+        TestCommand.initialize_options(self)
+        self.tox_args = None
+
+    def finalize_options(self):
+        TestCommand.finalize_options(self)
+        self.test_args = []
+        self.test_suite = True
+
+    def run_tests(self):
+        # Import here, cause outside the eggs aren't loaded
+        import tox
+        import shlex
+        import sys
+        args = self.tox_args
+        if args:
+            args = shlex.split(self.tox_args)
+        else:
+            # Run all tests by default
+            args = ['-c', join(dirname(__file__), 'tox.ini'), 'tests']
+        errno = tox.cmdline(args=args)
+        sys.exit(errno)
+
+
+setup(
+    name='{{project.package}}',
+    version=_get_version(),
+    url='{{project.url}}',
+    description='{{project.description}}',
+    long_description=open(join(dirname(__file__), 'README.md')).read(),
+    author='{{mantainer.name}}',
+    maintainer='{{mantainer.name}}',
+    maintainer_email='{{mantainer.email}}',
+    packages=find_packages(exclude=('tests', 'tests.*')),
+    include_package_data=True,
+    zip_safe=False,
+    classifiers=[
+        'Development Status :: 3 - Alpha',
+        'Intended Audience :: Developers',
+        'Programming Language :: Python',
+        'Programming Language :: Python :: 2',
+        'Programming Language :: Python :: 2.7',
+        'Programming Language :: Python :: 3',
+        'Programming Language :: Python :: 3.3',
+        'Programming Language :: Python :: 3.4',
+        'Programming Language :: Python :: 3.5',
+        'Topic :: Software Development :: Libraries :: Python Modules',
+    ],
+    install_requires=[
+        'scikit-learn>=0.18.2',
+        'scipy>=0.19.1',
+        'numpy>=1.13.1',
+        'pandas>=0.20.3',
+        'matplotlib>=2.0.2',
+        'marvin-python-toolbox>=0.0.4',
+        'Fabric>=1.14.0',
+    ],
+    tests_require=REQUIREMENTS_TESTS,
+    extras_require={
+        'testing': REQUIREMENTS_TESTS,
+    },
+    cmdclass={
+        'test': Tox, 'develop': develop, 'install': install
+    },
+)
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/conftest.py b/marvin_python_toolbox/management/templates/python-engine/tests/conftest.py
new file mode 100644
index 0000000..903cfc8
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/conftest.py
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+import os
+import pytest
+
+os.environ['TESTING'] = 'True'
+
+
+@pytest.fixture
+def mocked_params():
+    return {'params': 1}
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_acquisitor_and_cleaner.py b/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_acquisitor_and_cleaner.py
new file mode 100644
index 0000000..7b48c68
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_acquisitor_and_cleaner.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+try:
+    import mock
+
+except ImportError:
+    import unittest.mock as mock
+
+from {{project.package}}.data_handler import AcquisitorAndCleaner
+
+
+class TestAcquisitorAndCleaner:
+    def test_execute(self, mocked_params):
+        ac = AcquisitorAndCleaner()
+        ac.execute(params=mocked_params)
+        assert not ac._params
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_training_preparator.py b/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_training_preparator.py
new file mode 100644
index 0000000..5a0965f
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/data_handler/test_training_preparator.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+try:
+    import mock
+
+except ImportError:
+    import unittest.mock as mock
+
+from {{project.package}}.data_handler import TrainingPreparator
+
+
+class TestTrainingPreparator:
+    def test_execute(self, mocked_params):
+        ac = TrainingPreparator()
+        ac.execute(params=mocked_params)
+        assert not ac._params
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_feedback.py b/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_feedback.py
new file mode 100644
index 0000000..5cdde72
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_feedback.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+try:
+    import mock
+
+except ImportError:
+    import unittest.mock as mock
+
+from {{project.package}}.prediction import Feedback
+
+
+class TestFeedback:
+    def test_execute(self, mocked_params):
+        fb = Feedback()
+        fb.execute(input_message="fake message", params=mocked_params)
+        assert not fb._params
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_prediction_preparator.py b/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_prediction_preparator.py
new file mode 100644
index 0000000..301f518
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_prediction_preparator.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+try:
+    import mock
+
+except ImportError:
+    import unittest.mock as mock
+
+from {{project.package}}.prediction import PredictionPreparator
+
+
+class TestPredictionPreparator:
+    def test_execute(self, mocked_params):
+        ac = PredictionPreparator()
+        ac.execute(input_message="fake message", params=mocked_params)
+        assert not ac._params
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_predictor.py b/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_predictor.py
new file mode 100644
index 0000000..e7b3e4a
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/prediction/test_predictor.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+try:
+    import mock
+
+except ImportError:
+    import unittest.mock as mock
+
+from {{project.package}}.prediction import Predictor
+
+
+class TestPredictor:
+    def test_execute(self, mocked_params):
+        ac = Predictor()
+        ac.execute(input_message="fake message", params=mocked_params)
+        assert not ac._params
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/training/test_metrics_evaluator.py b/marvin_python_toolbox/management/templates/python-engine/tests/training/test_metrics_evaluator.py
new file mode 100644
index 0000000..a18f188
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/training/test_metrics_evaluator.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+try:
+    import mock
+
+except ImportError:
+    import unittest.mock as mock
+
+from {{project.package}}.training import MetricsEvaluator
+
+
+class TestMetricsEvaluator:
+    def test_execute(self, mocked_params):
+        ac = MetricsEvaluator()
+        ac.execute(params=mocked_params)
+        assert not ac._params
diff --git a/marvin_python_toolbox/management/templates/python-engine/tests/training/test_trainer.py b/marvin_python_toolbox/management/templates/python-engine/tests/training/test_trainer.py
new file mode 100644
index 0000000..e959207
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tests/training/test_trainer.py
@@ -0,0 +1,18 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+try:
+    import mock
+
+except ImportError:
+    import unittest.mock as mock
+
+from {{project.package}}.training import Trainer
+
+
+class TestTrainer:
+    def test_execute(self, mocked_params):
+        ac = Trainer()
+        ac.execute(params=mocked_params)
+        assert not ac._params
+
diff --git a/marvin_python_toolbox/management/templates/python-engine/tox.ini b/marvin_python_toolbox/management/templates/python-engine/tox.ini
new file mode 100644
index 0000000..19da870
--- /dev/null
+++ b/marvin_python_toolbox/management/templates/python-engine/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py27
+
+[testenv]
+deps=pytest
+     pytest-cov
+     mock
+commands=py.test --cov={envsitepackagesdir}/{{project.package}} --cov-report html --cov-report xml {posargs}
\ No newline at end of file
diff --git a/marvin_python_toolbox/management/test.py b/marvin_python_toolbox/management/test.py
new file mode 100644
index 0000000..741e4af
--- /dev/null
+++ b/marvin_python_toolbox/management/test.py
@@ -0,0 +1,141 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+import sys
+import os
+import os.path
+import subprocess
+import shutil
+import tempfile
+import click
+
+from .pkg import copy
+
+
+@click.group('test')
+def cli():
+    pass
+
+
+@cli.command('test', help='Run tests.')
+@click.option('--cov/--no-cov', default=True)
+@click.option('--no-capture', is_flag=True)
+@click.option('--pdb', is_flag=True)
+@click.argument('args', default='')
+@click.pass_context
+def test(ctx, cov, no_capture, pdb, args):
+    os.environ['TESTING'] = 'true'
+
+    if args:
+        args = args.split(' ')
+    else:
+        args = ['tests']
+
+    if no_capture:
+        args += ['--capture=no']
+
+    if pdb:
+        args += ['--pdb']
+
+    cov_args = []
+    if cov:
+        cov_args += ['--cov', os.path.relpath(ctx.obj['package_path'],
+                                              start=ctx.obj['base_path']),
+                     '--cov-report', 'html',
+                     '--cov-report', 'xml',
+                     '--cov-report', 'term-missing',
+                     ]
+
+    command = ['py.test'] + cov_args + args
+    print(' '.join(command))
+    env = os.environ.copy()
+    exitcode = subprocess.call(command, cwd=ctx.obj['base_path'], env=env)
+    sys.exit(exitcode)
+
+
+@cli.command('test-tox', help='Run tests using a new virtualenv.')
+@click.argument('args', default='')
+@click.pass_context
+def tox(ctx, args):
+    os.environ['TESTING'] = 'true'
+
+    if args:
+        args = ['-a'] + args.split(' ')
+    else:
+        args = []
+    # Copy the project to a tmp dir
+    tmp_dir = tempfile.mkdtemp()
+    tox_dir = os.path.join(tmp_dir, ctx.obj['package_name'])
+    copy(ctx.obj['base_path'], tox_dir)
+    command = ['python', 'setup.py', 'test'] + args
+    env = os.environ.copy()
+    exitcode = subprocess.call(command, cwd=tox_dir, env=env)
+    shutil.rmtree(tmp_dir)
+    sys.exit(exitcode)
+
+
+@cli.command('test-tdd', help='Watch for changes to run tests automatically.')
+@click.option('--cov/--no-cov', default=False)
+@click.option('--no-capture', is_flag=True)
+@click.option('--pdb', is_flag=True)
+@click.option('--partial', is_flag=True)
+@click.argument('args', default='')
+@click.pass_context
+def tdd(ctx, cov, no_capture, pdb, partial, args):
+    os.environ['TESTING'] = 'true'
+
+    if args:
+        args = args.split(' ')
+    else:
+        args = [os.path.relpath(
+            os.path.join(ctx.obj['base_path'], 'tests'))]
+
+    if no_capture:
+        args += ['--capture=no']
+
+    if pdb:
+        args += ['--pdb']
+
+    if partial:
+        args += ['--testmon']
+
+    cov_args = []
+    if cov:
+        cov_args += ['--cov', os.path.relpath(ctx.obj['package_path'],
+                                              start=ctx.obj['base_path']),
+                     '--cov-report', 'html',
+                     '--cov-report', 'xml',
+                     '--cov-report', 'term-missing',
+                     ]
+
+    command = ['ptw', '-p', '--'] + cov_args + args
+    print(' '.join(command))
+    env = os.environ.copy()
+    exitcode = subprocess.call(command, cwd=ctx.obj['base_path'], env=env)
+    sys.exit(exitcode)
+
+
+@cli.command('test-checkpep8', help='Check python code style.')
+@click.pass_context
+def pep8(ctx):
+    command = ['pep8', ctx.obj['package_name']]
+    exitcode = subprocess.call(command, cwd=ctx.obj['base_path'])
+    if exitcode == 0:
+        print('Congratulations! Everything looks good.')
+    sys.exit(exitcode)
diff --git a/notebooks/count-db-tables-rows.ipynb b/notebooks/count-db-tables-rows.ipynb
new file mode 100644
index 0000000..a649288
--- /dev/null
+++ b/notebooks/count-db-tables-rows.ipynb
@@ -0,0 +1,150 @@
+{
+ "cells": [
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "# Count DB Tables Rows"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Getting spark session"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 1,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "from marvin_python_toolbox.common.data_source_provider import get_spark_session"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 2,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "spark = get_spark_session(enable_hive=True)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Getting all hive local dbs"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [],
+   "source": [
+    "dbs = spark.sql(\"show databases\").collect()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 4,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "Connecting with core db ...\n",
+      "   bsc_product [14953204]\n",
+      "   mis_product_hierarchy [5796251]\n",
+      "Connecting with default db ...\n",
+      "Connecting with marvin db ...\n",
+      "   simple_product_classification_engine_core_bsc_product_120374ac16e58cdf8f0c050d0f698addadf2c41c [14953204]\n",
+      "   simple_product_classification_engine_core_mis_product_hierarchy_0b8069f3ba31eedca44b30bc8a61130f5776d119 [5796251]\n"
+     ]
+    }
+   ],
+   "source": [
+    "for db in dbs:\n",
+    "    db_name = db['databaseName']\n",
+    "    print(\"Connecting with {} db ...\".format(db_name)) \n",
+    "    spark.sql(\"use {}\".format(db_name))\n",
+    "    tables = spark.sql(\"show tables\").collect()\n",
+    "    \n",
+    "    for table in tables:\n",
+    "        table_name = table['tableName']\n",
+    "        count = spark.sql(\"select 1 from {}\".format(table_name)).count()\n",
+    "        print \"   {} [{}]\".format(table_name, count)"
+   ]
+  },
+  {
+   "cell_type": "markdown",
+   "metadata": {},
+   "source": [
+    "## Stoping and realease spark session"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {
+    "collapsed": true
+   },
+   "outputs": [],
+   "source": [
+    "spark.stop()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": 6,
+   "metadata": {},
+   "outputs": [
+    {
+     "data": {
+      "text/plain": [
+       "14953204"
+      ]
+     },
+     "execution_count": 6,
+     "metadata": {},
+     "output_type": "execute_result"
+    }
+   ],
+   "source": [
+    "spark.sql(\"select * from core.bsc_product\").count()"
+   ]
+  },
+  {
+   "cell_type": "code",
+   "execution_count": null,
+   "metadata": {},
+   "outputs": [],
+   "source": []
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 1
+}
diff --git a/notebooks/spark_data_source_test.ipynb b/notebooks/spark_data_source_test.ipynb
new file mode 100644
index 0000000..2c120b4
--- /dev/null
+++ b/notebooks/spark_data_source_test.ipynb
@@ -0,0 +1,49 @@
+{
+ "cells": [
+  {
+   "cell_type": "code",
+   "execution_count": 3,
+   "metadata": {},
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "+------------+\n",
+      "|databaseName|\n",
+      "+------------+\n",
+      "|     default|\n",
+      "+------------+\n",
+      "\n"
+     ]
+    }
+   ],
+   "source": [
+    "from marvin_python_toolbox.common.data_source_provider import get_spark_session\n",
+    "spark = get_spark_session()\n",
+    "spark.sql(\"show databases\").show()"
+   ]
+  }
+ ],
+ "metadata": {
+  "kernelspec": {
+   "display_name": "Python 2",
+   "language": "python",
+   "name": "python2"
+  },
+  "language_info": {
+   "codemirror_mode": {
+    "name": "ipython",
+    "version": 2
+   },
+   "file_extension": ".py",
+   "mimetype": "text/x-python",
+   "name": "python",
+   "nbconvert_exporter": "python",
+   "pygments_lexer": "ipython2",
+   "version": "2.7.6"
+  }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 2
+}
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000..cab8644
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,4 @@
+[pytest]
+minversion    = 2.0
+norecursedirs = .git .tox .eggs .cache *.egg build dist tmp*
+python_files  = test*.py
\ No newline at end of file
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/setup.cfg
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..fce8991
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,181 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+import os.path
+import os
+import sys
+
+from setuptools import setup, find_packages
+from setuptools.command.test import test as TestCommand
+
+# Package basic info
+PACKAGE_NAME = 'marvin_python_toolbox'
+PACKAGE_DESCRIPTION = 'Marvin Python Toolbox'
+
+URL = 'https://github.com/marvin-ai/marvin-python-toolbox'
+
+AUTHOR_NAME = 'Daniel Takabayashi'
+AUTHOR_EMAIL = 'daniel.takabayashi@gmail.com'
+
+PYTHON_2 = True
+PYTHON_3 = True
+
+# Project status
+# (should be 'planning', 'pre-alpha', 'alpha', 'beta', 'stable', 'mature' or 'inactive').
+STATUS = 'stable'
+
+# Project topic
+# See https://pypi.python.org/pypi?%3Aaction=list_classifiers for a list
+TOPIC = 'Topic :: Software Development :: Libraries :: Python Modules',
+
+# External dependencies
+# More info https://pythonhosted.org/setuptools/setuptools.html#declaring-dependencies
+REQUIREMENTS_EXTERNAL = [
+    'six>=1.10.0',
+    'bumpversion>=0.5.3',
+    'click>=3.3',
+    'jupyter>=1.0.0',
+    'jupyterlab>=0.32.1',
+    'pep8>=1.7.0',
+    'virtualenv>=15.0.1',
+    'jsonschema>=2.5.1',
+    'python-slugify>=0.1.0',
+    'paramiko>=2.1.2',
+    'PyHive>=0.3.0',
+    'thrift>=0.10.0',
+    'thrift-sasl>=0.2.1',
+    'virtualenvwrapper>=4.7.1',
+    'requests>=2.19.1',
+    'python-dateutil>=2.7.3',
+    'python-slugify>=0.1.0',
+    'path.py>=7.2',
+    'httpretty>=0.9.5',
+    'tornado>=4.5.3',
+    'jsonschema>=2.5.1',
+    'gprof2dot',
+    'ujsonpath>=0.0.2',
+    'simplejson>=3.10.0',
+    'configobj>=5.0.6',
+    'findspark>=1.1.0',
+    'grpcio>=1.13.0',
+    'grpcio-tools>=1.13.0',
+    'joblib>=0.11',
+    'autopep8>=1.3.3',
+    'progressbar2>=3.34.3',
+    'urllib3>=1.21.1',
+    'idna>=2.5',
+    'bleach>=1.5.0',
+    'numpy==1.13.3'
+]
+
+# Test dependencies
+REQUIREMENTS_TESTS = [
+    'tox>=2.2.0',
+    'mock>=2.0.0',
+    'pytest>=2.9.2',
+    'pytest-cov>=1.8.1',
+    'pytest-watch>=4.1.0',
+    'pytest-testmon>=0.8.2',
+    'Keras>=2.2.0',
+    'tensorflow>=1.8.0',
+]
+# This is normally an empty list
+DEPENDENCY_LINKS_EXTERNAL = []
+
+# script to be used
+SCRIPTS = ['bin/marvin', 'marvin.ini']
+
+
+def _get_version():
+    """Return the project version from VERSION file."""
+    with open(os.path.join(os.path.dirname(__file__), PACKAGE_NAME, 'VERSION'), 'rb') as f:
+        version = f.read().decode('ascii').strip()
+    return version
+
+
+class Tox(TestCommand):
+    """Run the test cases using TOX command."""
+    user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
+
+    def initialize_options(self):
+        TestCommand.initialize_options(self)
+        self.tox_args = None
+
+    def finalize_options(self):
+        TestCommand.finalize_options(self)
+        self.test_args = []
+        self.test_suite = True
+
+    def run_tests(self):
+        # Import here, cause outside the eggs aren't loaded
+        import tox
+        import shlex
+        args = self.tox_args
+        if args:
+            args = shlex.split(self.tox_args)
+        else:
+            # Run all tests by default
+            args = ['-c', os.path.join(os.path.dirname(__file__), 'tox.ini'), 'tests']
+        errno = tox.cmdline(args=args)
+        sys.exit(errno)
+
+
+DEVELOPMENT_STATUS = {
+    'planning': '1 - Planning',
+    'pre-alpha': '2 - Pre-Alpha',
+    'alpha': 'Alpha',
+    'beta': '4 - Beta',
+    'stable': '5 - Production/Stable',
+    'mature': '6 - Mature',
+    'inactive': '7 - Inactive',
+}
+
+CLASSIFIERS = ['Development Status :: {}'.format(DEVELOPMENT_STATUS[STATUS])]
+if PYTHON_2:
+    CLASSIFIERS += [
+        'Programming Language :: Python :: 2',
+        'Programming Language :: Python :: 2.7',
+    ]
+if PYTHON_3:
+    CLASSIFIERS += [
+        'Programming Language :: Python :: 3',
+        'Programming Language :: Python :: 3.6',
+    ]
+
+setup(
+    name=PACKAGE_NAME,
+    version=_get_version(),
+    url=URL,
+    description=PACKAGE_DESCRIPTION,
+    long_description=open(os.path.join(os.path.dirname(__file__), 'README.md')).read(),
+    author=AUTHOR_NAME,
+    maintainer=AUTHOR_NAME,
+    maintainer_email=AUTHOR_EMAIL,
+    packages=find_packages(exclude=('tests', 'tests.*')),
+    include_package_data=True,
+    zip_safe=False,
+    classifiers=CLASSIFIERS,
+    install_requires=REQUIREMENTS_EXTERNAL,
+    tests_require=REQUIREMENTS_TESTS,
+    extras_require={
+        'testing': REQUIREMENTS_TESTS,
+    },
+    dependency_links=DEPENDENCY_LINKS_EXTERNAL,
+    scripts=SCRIPTS,
+    cmdclass={'test': Tox},
+)
diff --git a/tests/common/test_config.py b/tests/common/test_config.py
new file mode 100644
index 0000000..6cdcda5
--- /dev/null
+++ b/tests/common/test_config.py
@@ -0,0 +1,117 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+import os
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.common.config import Config, load_conf_from_file
+from marvin_python_toolbox.common.exceptions import InvalidConfigException
+
+
+class TestConfig:
+    def teardown_method(self, test_method):
+        Config.reset()
+
+    @mock.patch('marvin_python_toolbox.common.config.ConfigObj')
+    def test_load_conf_from_file(self, ConfigParserMocked):
+        filepath = '/path/to/config/file.ini'
+
+        load_conf_from_file(filepath)
+
+        ConfigParserMocked.assert_called_once_with(filepath)
+
+    @mock.patch('marvin_python_toolbox.common.config.ConfigObj')
+    @mock.patch('marvin_python_toolbox.common.config.os.getenv')
+    def test_load_conf_from_env(self, getenv_mocked, ConfigParserMocked):
+        filepath = '/path/to/config/file.ini'
+
+        getenv_mocked.return_value = filepath
+        load_conf_from_file()
+
+        ConfigParserMocked.assert_called_once_with(filepath)
+
+    @mock.patch('marvin_python_toolbox.common.config.ConfigObj')
+    def test_load_conf_from_default_path(self, ConfigParserMocked):
+        load_conf_from_file()
+
+        ConfigParserMocked.assert_called_once_with(os.environ['DEFAULT_CONFIG_PATH'])
+
+    @mock.patch('marvin_python_toolbox.common.config.logger')
+    @mock.patch('marvin_python_toolbox.common.config.ConfigObj.__getitem__')
+    def test_load_conf_from_default_path_with_invalid_section(self, ConfigParserGetItemMocked, logger_mocked):
+        from configparser import NoSectionError
+
+        filepath = '/path/to/config/file.ini'
+
+        ConfigParserGetItemMocked.side_effect = NoSectionError('')
+        assert len(load_conf_from_file(filepath, section='invalidsection')) == 0
+        logger_mocked.warn.assert_called_once_with(
+            "Couldn't find \"invalidsection\" section in \"/path/to/config/file.ini\""
+        )
+
+    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    def test_get(self, load_conf_from_file_mocked, config_fixture):
+        load_conf_from_file_mocked.return_value = config_fixture
+        assert Config.get('key') == config_fixture['key']
+
+    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    def test_get_invalid_key(self, load_conf_from_file_mocked, config_fixture):
+        load_conf_from_file_mocked.return_value = config_fixture
+        assert 'invalidkey' not in config_fixture
+        with pytest.raises(InvalidConfigException):
+            Config.get('invalidkey')
+
+    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    def test_get_invalid_key_with_default(self, load_conf_from_file_mocked, config_fixture):
+        load_conf_from_file_mocked.return_value = config_fixture
+        assert 'invalidkey' not in config_fixture
+        assert Config.get('invalidkey', default='default_value') == 'default_value'
+
+    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    def test_get_with_invalid_section(self, load_conf_from_file_mocked, config_fixture):
+        load_conf_from_file_mocked.return_value = {}
+        with pytest.raises(InvalidConfigException):
+            Config.get('key', section='invalidsection')
+
+    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    def test_keys_alread_loaded(self, load_conf_from_file_mocked, config_fixture):
+        load_conf_from_file_mocked.return_value = config_fixture
+        Config._load()
+        assert Config.keys() == config_fixture.keys()
+
+    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    def test_keys(self, load_conf_from_file_mocked, config_fixture):
+        load_conf_from_file_mocked.return_value = config_fixture
+        assert Config.keys() == config_fixture.keys()
+
+    @mock.patch('marvin_python_toolbox.common.config.load_conf_from_file')
+    def test_keys_with_invalid_section(self, load_conf_from_file_mocked):
+        load_conf_from_file_mocked.return_value = {}
+        assert not Config.keys(section='invalidsection')
+
+    @mock.patch('os.getenv')
+    def test_read_with_real_file(self, env_read):
+        env_read.return_value = 'tests/fixtures/config.sample'
+        assert Config.get('models.default_context_name') == 'pdl'
+        assert Config.get('models.default_context_name', section='section') == 'pdl2'
+        assert Config.get('models.default_type_name') == 'pdl'
+        assert Config.get('models.default_type_name') == Config.get('models.default_type_name', section='section')
diff --git a/tests/common/test_data.py b/tests/common/test_data.py
new file mode 100644
index 0000000..c244e81
--- /dev/null
+++ b/tests/common/test_data.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import pytest
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.common.data import MarvinData
+from marvin_python_toolbox.common.exceptions import InvalidConfigException
+from io import IOBase
+
+
+@pytest.fixture
+def data_path():
+    return os.path.expanduser("/tmp/data")
+
+
+@pytest.fixture
+def data_path_key():
+    return MarvinData._key
+
+
+def test_read_from_env(data_path_key, data_path):
+    os.environ[data_path_key] = data_path
+    assert MarvinData.data_path == os.environ[data_path_key]
+
+
+def test_path_not_set(data_path_key):
+    del os.environ[data_path_key]
+    path_ = None
+    try:
+        path_ = MarvinData.data_path
+    except InvalidConfigException:
+        assert not path_
+
+
+@mock.patch('marvin_python_toolbox.common.data.check_path')
+def test_unable_to_create_path(check_path, data_path_key, data_path):
+    os.environ[data_path_key] = data_path
+    check_path.return_value = False
+
+    path_ = None
+    try:
+        path_ = MarvinData.data_path
+    except InvalidConfigException:
+        assert not path_
+
+
+def test_load_data_from_filesystem(data_path_key, data_path):
+    data = 'return value'
+
+    # If the data was not found try to load from filesystem
+    with mock.patch('marvin_python_toolbox.common.data.open', create=True) as mock_open:
+        mock_open.return_value = mock.MagicMock(spec=IOBase)
+        mocked_fp = mock_open.return_value.__enter__.return_value
+        mocked_fp.read.return_value = data
+        content = MarvinData.load_data(os.path.join('named_features', 'brands.json'))
+
+    mocked_fp.read.assert_called_once()
+    assert content == data
+
+
+def test_load_data_from_filesystem_exception(data_path_key, data_path):
+    with mock.patch('marvin_python_toolbox.common.data.open') as mock_open:
+        mock_open.side_effect = IOError
+
+        # load_data should propagate IOError
+        with pytest.raises(IOError):
+            MarvinData.load_data(os.path.join('named_features', 'brands.json'))
+
+
+def test_data_key_using_abspath(data_path_key, data_path):
+    assert MarvinData._convert_path_to_key(os.path.join(data_path, 'brands.json')) == 'brands.json'
+
+
+@mock.patch('marvin_python_toolbox.common.data.progressbar')
+@mock.patch('marvin_python_toolbox.common.data.requests')
+def test_download_file(mocked_requests, mocked_progressbar):
+    file_url = 'google.com/file.json'
+    file_path = MarvinData.download_file(file_url)
+    assert file_path == '/tmp/data/file.json'
+
+    file_path = MarvinData.download_file(file_url, local_file_name='myfile')
+    assert file_path == '/tmp/data/myfile'
+
+@mock.patch('marvin_python_toolbox.common.data.progressbar')
+@mock.patch('marvin_python_toolbox.common.data.requests')
+def test_download_file_delete_file_if_exception(mocked_requests, mocked_progressbar):
+    mocked_requests.get.side_effect = Exception()
+    with open('/tmp/data/error.json', 'w') as f:
+        f.write('test')
+    
+    file_url = 'google.com/error.json'
+    with pytest.raises(Exception) as excinfo:
+        file_path = MarvinData.download_file(file_url, force=True)
+
+    assert os.path.exists('/tmp/data/error.json') is False
+
+@mock.patch('marvin_python_toolbox.common.data.progressbar.ProgressBar')
+@mock.patch('marvin_python_toolbox.common.data.requests')
+def test_download_file_write_file_if_content(mocked_requests, mocked_progressbar):
+    from requests import Response
+    file_url = 'google.com/file.json'
+
+    response = mock.Mock(spec=Response)
+    response.iter_content.return_value = 'x'
+    mocked_requests.get.return_value = response
+        
+    mocked_open = mock.mock_open()
+    with mock.patch('marvin_python_toolbox.common.data.open', mocked_open, create=True):
+        MarvinData.download_file(file_url, force=True)
+
+    mocked_open.assert_called_once_with('/tmp/data/file.json', 'wb')
+    handle = mocked_open()
+    handle.write.assert_called_once_with('x')
+
+@mock.patch('marvin_python_toolbox.common.data.progressbar.ProgressBar')
+@mock.patch('marvin_python_toolbox.common.data.requests')
+def test_download_file_dont_write_file_if_no_content(mocked_requests, mocked_progressbar):
+    from requests import Response
+    file_url = 'google.com/file.json'
+
+    response = mock.Mock(spec=Response)
+    response.iter_content.return_value = ''
+    mocked_requests.get.return_value = response
+        
+    mocked_open = mock.mock_open()
+    with mock.patch('marvin_python_toolbox.common.data.open', mocked_open, create=True):
+        MarvinData.download_file(file_url, force=True)
+
+    mocked_open.assert_called_once_with('/tmp/data/file.json', 'wb')
+    handle = mocked_open()
+    assert handle.write.call_count == 0
\ No newline at end of file
diff --git a/tests/common/test_data_source_provider.py b/tests/common/test_data_source_provider.py
new file mode 100644
index 0000000..ff5bce7
--- /dev/null
+++ b/tests/common/test_data_source_provider.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import findspark
+findspark.init()
+
+# is important to import these classes after findspark.init call
+from pyspark.tests import ReusedPySparkTestCase
+from marvin_python_toolbox.common.data_source_provider import get_spark_session
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+
+class TestDataSourceProvider:
+    @mock.patch("pyspark.sql.SparkSession")
+    def test_get_spark_session(self, mocked_session):
+        spark = get_spark_session()
+        assert spark
+        mocked_session.assert_has_calls([
+            mock.call.builder.appName('marvin-engine'),
+            mock.call.builder.appName().getOrCreate()]
+        )
+
+        spark = get_spark_session(app_name='TestEngine')
+        assert spark
+        mocked_session.assert_has_calls([
+            mock.call.builder.appName('TestEngine'),
+            mock.call.builder.appName().getOrCreate()]
+        )
+
+        spark = get_spark_session(configs=[("spark.xxx", "true")])
+        assert spark
+        mocked_session.assert_has_calls([
+            mock.call.builder.appName('TestEngine'),
+            mock.call.builder.appName().getOrCreate()]
+        )
+
+    @mock.patch("pyspark.sql.SparkSession")
+    def test_get_spark_session_with_hive(self, mocked_session):
+        spark = get_spark_session(enable_hive=True)
+        assert spark
+
+        mocked_session.assert_has_calls([
+            mock.call.builder.appName('marvin-engine'),
+            mock.call.builder.appName().enableHiveSupport(),
+            mock.call.builder.appName().enableHiveSupport().getOrCreate()]
+        )
+
+
+class TestSparkDataSource(ReusedPySparkTestCase):
+    def test_spark_initialization(self):
+        rdd = self.sc.parallelize(['Hi there', 'Hi'])
+        counted = rdd.flatMap(lambda word: word.split(' ')).map(lambda word: (word, 1)).reduceByKey(lambda acc, n: acc + n)
+        assert counted.collectAsMap() == {'Hi': 2, 'there': 1}
diff --git a/tests/common/test_http_client.py b/tests/common/test_http_client.py
new file mode 100644
index 0000000..2b92844
--- /dev/null
+++ b/tests/common/test_http_client.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# coding=utf-8
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import json
+import pytest
+import httpretty
+from httpretty import httpretty as httpretty_object
+
+from marvin_python_toolbox.common.http_client import ApiClient, ListResultSet
+from marvin_python_toolbox.common.exceptions import HTTPException
+
+
+class TestHttpClient:
+
+    @httpretty.activate
+    def test_list_result_set(self):
+        data = [{'id': str(n)} for n in range(100)]
+        per_page = 2
+        total_pages = len(data) / per_page
+
+        def fake_items(start=0):
+            httpretty.register_uri(
+                httpretty.GET, "http://localhost:8000/service1/",
+                body=json.dumps({
+                    'objects': data[start:start + per_page],
+                    'total': len(data),
+                }),
+                content_type="application/json",
+                status=200,
+            )
+
+        fake_items(0)
+        result_set = ListResultSet(path='/service1/', limit=per_page)
+
+        assert len(result_set) == len(data)
+
+        # force iter all
+        all_items = list(result_set)
+        assert len(all_items) == len(data)
+
+        assert len(httpretty_object.latest_requests) == total_pages
+
+    @httpretty.activate
+    def test_get_ok(self):
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='[{"id": "1"}]',
+                               content_type="application/json",
+                               status=200)
+
+        response = ApiClient().get('/service1/')
+        assert response.ok
+        assert response.data is not None
+
+    @httpretty.activate
+    def test_get_not_ok(self):
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='[{"error": "deu merda"}]',
+                               content_type="application/json",
+                               status=500)
+
+        response = ApiClient().get('/service1/')
+        assert not response.ok
+
+    @httpretty.activate
+    def test_get_not_ok_not_json(self):
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='error: "deu merda"',
+                               content_type="text/html",
+                               status=500)
+
+        response = ApiClient().get('/service1/')
+        assert not response.ok
+
+    @httpretty.activate
+    def test_get_all_ok(self):
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='{"objects": [{"id": "3"}], "total": 3}',
+                               content_type="application/json",
+                               status=200)
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='{"objects": [{"id": "1"}, {"id": "2"}], "total": 3}',
+                               content_type="application/json",
+                               status=200)
+
+        response = ApiClient().get_all('/service1/', limit=2)
+        response_list = list(response)
+        assert len(response) == 3
+        assert len(response_list) == 3
+        assert response_list[0]['id'] == '1'
+        assert response_list[1]['id'] == '2'
+        assert response_list[2]['id'] == '3'
+
+    @httpretty.activate
+    def test_get_all_not_ok(self):
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='{"error": "deu merda"}',
+                               content_type="application/json",
+                               status=500)
+
+        with pytest.raises(HTTPException):
+            response = ApiClient().get_all('/service1/', limit=2)
+
+    @httpretty.activate
+    def test_get_all_not_ok_second_page(self):
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='{"error": "deu merda"}',
+                               content_type="application/json",
+                               status=500)
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:8000/service1/",
+                               body='{"objects": [{"id": "1"}, {"id": "2"}], "total": 3}',
+                               content_type="application/json",
+                               status=200)
+
+        response = ApiClient().get_all('/service1/', limit=2)
+        assert len(response) == 3
+
+        with pytest.raises(HTTPException):
+            response_list = list(response)
+
+    @httpretty.activate
+    def test_post_not_ok(self):
+
+        httpretty.register_uri(httpretty.POST, "http://localhost:8000/service1/",
+                               body='[{"error": "name required"}]',
+                               content_type='text/json',
+                               status=500)
+
+        response = ApiClient().post('/service1/', {"name": "americanas", "url": "www.americanas.com.br"})
+        assert not response.ok
+
+    @httpretty.activate
+    def test_post_ok(self):
+
+        httpretty.register_uri(httpretty.POST, "http://localhost:8000/service1/",
+                               body='{"success": true}',
+                               content_type='text/json',
+                               status=201)
+
+        response = ApiClient().post('/service1/', {"name": "americanas", "url": "www.americanas.com.br"})
+        assert response.ok
+
+    @httpretty.activate
+    def test_put_not_ok(self):
+
+        httpretty.register_uri(httpretty.PUT, "http://localhost:8000/service1/",
+                               body='[{"error": "name required"}]',
+                               content_type="application/json",
+                               status=500)
+
+        response = ApiClient().put('/service1/', {"id": "1", "url": "www.americanas.com.br"})
+        assert not response.ok
+
+    @httpretty.activate
+    def test_put_ok(self):
+
+        httpretty.register_uri(httpretty.PUT, "http://localhost:8000/service1/",
+                               body='{"success": true}',
+                               content_type='text/json',
+                               status=200)
+
+        response = ApiClient().put('/service1/', {"id": "1", "name": "americanas", "url": "www.americanas.com.br"})
+        assert response.ok
+
+    @httpretty.activate
+    def test_delete_not_ok(self):
+
+        httpretty.register_uri(httpretty.DELETE, "http://localhost:8000/service1/",
+                               body='[{"error": "name required"}]',
+                               content_type="application/json",
+                               status=500)
+
+        response = ApiClient().delete('/service1/')
+        assert not response.ok
+
+    @httpretty.activate
+    def test_delete_ok(self):
+
+        httpretty.register_uri(httpretty.DELETE, "http://localhost:8000/service1/",
+                               body='{"success": true}',
+                               content_type='text/json',
+                               status=200)
+
+        response = ApiClient().delete('/service1/')
+        assert response.ok
+
+    @httpretty.activate
+    def test_full_url_path(self):
+
+        httpretty.register_uri(httpretty.GET, "http://localhost:9999/service_full/",
+                               body='[{"id": "1"}]',
+                               content_type="application/json",
+                               status=200)
+
+        response = ApiClient().get('http://localhost:9999/service_full/')
+        assert response.ok
+        assert response.data is not None
diff --git a/tests/common/test_profiling.py b/tests/common/test_profiling.py
new file mode 100644
index 0000000..62088ab
--- /dev/null
+++ b/tests/common/test_profiling.py
@@ -0,0 +1,223 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import os
+import shutil
+import tempfile
+import uuid
+import pytest
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.common.profiling import profiling
+
+
+class TestProfiling:
+
+    def test_decorator_file_creation(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        @profiling(output_path=output_path, uid=uid, info={'test': 42})
+        def foo():
+            return
+
+        foo()
+
+        assert os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.png'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.json'))
+
+        shutil.rmtree(output_path)
+
+    def test_decorator_disabled(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        @profiling(enable=False, output_path=output_path, uid=uid, info={'test': 42})
+        def foo():
+            return
+
+        foo()
+
+        assert not os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.png'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.json'))
+
+        shutil.rmtree(output_path)
+
+    def test_context_manager_file_creation(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        with profiling(output_path=output_path, uid=uid, info={'test': 42}) as prof:
+            def foo():
+                return
+
+            foo()
+
+        assert os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.png'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.json'))
+
+        shutil.rmtree(output_path)
+
+    def test_context_manager_disabled(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        with profiling(enable=False, output_path=output_path, uid=uid, info={'test': 42}) as prof:
+            def foo():
+                return
+
+            foo()
+
+        assert not os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.png'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.json'))
+
+        shutil.rmtree(output_path)
+
+    def test_callable_param(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        @profiling(enable=lambda *args, **kwargs: True, output_path=lambda *args, **kwargs: output_path, uid=lambda *args, **kwargs: uid, info=lambda *args, **kwargs: {'test': 42})
+        def foo():
+            return
+
+        foo()
+
+        assert os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.png'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.json'))
+
+        shutil.rmtree(output_path)
+
+    def test_exception_recover(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        @profiling(enable=True, output_path=output_path, uid=lambda *args, **kwargs: uid)
+        def foo():
+            raise RuntimeError()
+
+        with pytest.raises(RuntimeError):
+            foo()
+
+        assert os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.png'))
+
+        shutil.rmtree(output_path)
+
+    def test_disabled_exception_recover(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        @profiling(enable=False, output_path=output_path, uid=lambda *args, **kwargs: uid)
+        def foo():
+            raise RuntimeError()
+
+        with pytest.raises(RuntimeError):
+            foo()
+
+        assert not os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.png'))
+
+        shutil.rmtree(output_path)
+
+    def test_invalid_info(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        @profiling(enable=True, output_path=output_path,
+                   uid=lambda *args, **kwargs: uid, info=uuid.uuid4())
+        def foo():
+            return
+
+        foo()
+
+        assert os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert os.path.isfile(os.path.join(output_path, uid + '.png'))
+
+        shutil.rmtree(output_path)
+
+    @mock.patch('marvin_python_toolbox.common.profiling.subprocess')
+    def test_subprocess_exception(self, subprocess_mock):
+        subprocess_mock.call.side_effect = Exception()
+
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        @profiling(enable=True, output_path=output_path,
+                   uid=lambda *args, **kwargs: uid, info=uuid.uuid4())
+        def foo():
+            return
+
+        foo()
+
+        assert os.path.isfile(os.path.join(output_path, uid + '.pstats'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.dot'))
+        assert not os.path.isfile(os.path.join(output_path, uid + '.png'))
+
+        shutil.rmtree(output_path)
+
+    def test_jupyter_repr_html(self):
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        with profiling(output_path=output_path, uid=lambda *args, **kwargs: uid, info={'test': 42}) as prof:
+            def foo():
+                return
+
+            foo()
+        prof_repr_html = prof._repr_html_()
+        assert '<pre>' in prof_repr_html
+        assert '<img' in prof_repr_html
+        assert os.path.join(output_path, uid + '.png') in prof_repr_html
+
+        shutil.rmtree(output_path)
+
+    @mock.patch('marvin_python_toolbox.common.profiling.subprocess')
+    def test_subprocess_exception_jupyter_repr_html(self, subprocess_mock):
+        subprocess_mock.call.side_effect = Exception()
+
+        output_path = tempfile.mkdtemp()
+        uid = str(uuid.uuid4())
+
+        with profiling(output_path=output_path, uid=uid, info={'test': 42}) as prof:
+            def foo():
+                return
+
+            foo()
+        prof_repr_html = prof._repr_html_()
+        assert '<pre>' in prof_repr_html
+        assert '<img' not in prof_repr_html
+        assert os.path.join(output_path, uid + '.png') not in prof_repr_html
+
+        shutil.rmtree(output_path)
\ No newline at end of file
diff --git a/tests/common/test_utils.py b/tests/common/test_utils.py
new file mode 100644
index 0000000..5845811
--- /dev/null
+++ b/tests/common/test_utils.py
@@ -0,0 +1,253 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import uuid
+import datetime
+import json
+
+import pytest
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.common.utils import (class_property, memoized_class_property, get_datetime, deprecated,
+                                        to_json, from_json, is_valid_json, validate_json, generate_key, to_slug,
+                                        url_encode, getattr_qualified, chunks, check_path)
+from marvin_python_toolbox.common.exceptions import InvalidJsonException
+
+instance_count = 0
+
+
+# ==============================================================================
+# Test memoized_class_property
+
+class Dummy:
+    @memoized_class_property
+    def my_cls_attribute(cls):
+        global instance_count
+        instance_count += 1
+        return 42
+
+    @class_property
+    def my_cls_property(cls):
+        return 'oi'
+
+    def any_method(self):
+        return self.my_cls_attribute
+
+
+def test_initial_instance_count():
+    global instance_count
+    assert 0 == instance_count
+
+
+def test_property():
+    assert 42 == Dummy.my_cls_attribute
+
+
+def test_access_from_instance_method():
+    assert 42 == Dummy().any_method()
+
+
+def test_create_only_one_instance():
+    global instance_count
+
+    Dummy.my_cls_attribute
+    assert 1 == instance_count
+
+    Dummy.my_cls_attribute
+    assert 1 == instance_count
+
+
+def test_class_property():
+    assert Dummy.my_cls_property == 'oi'
+
+
+# ==============================================================================
+# test to_json & from_json
+
+def test_to_json():
+    d = {'i': 42, 's': 'string', 'dt': datetime.datetime.now(), 'id': uuid.uuid4()}
+    assert isinstance(to_json(d), str)
+
+
+def test_to_json_with_obj_with_id():
+    class Obj(object):
+        id = '42'
+
+    d = {'i': Obj()}
+    assert isinstance(to_json(d), str)
+
+
+def test_to_json_with_obj_without_id():
+    class Obj(object):
+        pass
+
+    d = {'i': Obj()}
+    with pytest.raises(TypeError):
+        to_json(d)
+
+
+def test_to_json_with_numpy():
+    class FakeNumpyFloat(object):
+        def item(self):
+            return 0.666
+
+    d = {'float': FakeNumpyFloat()}
+    assert to_json(d) == '{"float": 0.666}'
+
+
+def test_from_json():
+    d = {'i': 42, 's': 'string', 'dt': datetime.datetime.now(), 'id': uuid.uuid4()}
+    assert isinstance(from_json(to_json(d)), dict)
+
+
+def test_validate_json():
+    valid = {
+        'prop': ['a', 'b' , 'c']
+    }
+    invalid = {
+        'prop': 'a'
+    }
+    schema = {
+        'type': 'object',
+        'properties': {
+            'prop': {
+                'type': 'array',
+                'items': { 'type': 'string' }
+            }
+        }
+    }
+    validate_json(valid, schema=schema)
+
+    with pytest.raises(InvalidJsonException):
+        validate_json(invalid, schema=schema)
+
+
+def test_is_valid_json():
+    valid = {
+        'prop': ['a', 'b' , 'c']
+    }
+    invalid = {
+        'prop': 'a'
+    }
+    schema = {
+        'type': 'object',
+        'properties': {
+            'prop': {
+                'type': 'array',
+                'items': { 'type': 'string' }
+            }
+        }
+    }
+    assert is_valid_json(valid, schema=schema) is True
+    assert is_valid_json(invalid, schema=schema) is False
+
+    assert is_valid_json(json.dumps(valid), schema=json.dumps(schema)) is True
+    assert is_valid_json(json.dumps(invalid), schema=json.dumps(schema)) is False
+
+
+def test_generate_key():
+    value = 'www.image.com.br'
+    valid_sha256 = '58643ebba1abbbd92c586957a98c9f3e8a104b681eb37b99df50b2e1044bbf20'
+    assert generate_key(value) == valid_sha256
+
+
+def test_generate_key_with_unicode():
+    value = u'http://static.wmobjects.com.br/imgres/arquivos/ids/2509201-344-344/torradeira-cuisinart-tan-4-\u2013-branca.jpg'
+    assert generate_key(value)
+
+
+def test_to_slug():
+    assert to_slug('any text') == "any-text"
+
+
+def test_chunk():
+    assert len(list(chunks([1, 2, 3, 4], 2))) == 2
+
+
+def test_getattr_qualified():
+    class B(object):
+        c = {'d': 'e'}
+
+    class A(object):
+        b = B()
+
+    a = A()
+
+    assert getattr_qualified(a, 'b') == a.b
+    assert getattr_qualified(a, 'b.c') == a.b.c
+    assert getattr_qualified(a, 'b.c["d"]') == a.b.c['d']
+    assert getattr_qualified(a, "b.c['d']") == a.b.c['d']
+    assert getattr_qualified(a, "b.c[d]") == a.b.c['d']
+    assert getattr_qualified(a, "b.c[f]", 'default') == 'default'
+    assert getattr_qualified(a, "x.c[f]", 'default') == 'default'
+
+    with pytest.raises(AttributeError):
+        getattr_qualified(a, 'x.c[f]')
+
+    with pytest.raises(KeyError):
+        getattr_qualified(a, 'b.c["z"]')
+
+    with pytest.raises(TypeError):
+        getattr_qualified(a, 'b', 'default', 'bla')
+
+
+@mock.patch('marvin_python_toolbox.common.utils.os.path.exists')
+def test_path_not_exists(path_exists_mock):
+    path_exists_mock.return_value = False
+    assert not check_path('temp')
+
+
+@mock.patch('marvin_python_toolbox.common.utils.os.makedirs')
+@mock.patch('marvin_python_toolbox.common.utils.os.path.exists')
+def test_path_creation(path_exists_mock, makedirs_mock):
+    path_exists_mock.side_effect = [False, True]
+    makedirs_mock.return_value = None
+    assert check_path('temp', create=True)
+
+
+def test_get_datetime():
+    date = get_datetime()
+    date = date.split()
+
+    assert len(date[0]) == 10
+    assert len(date[1]) == 8
+    assert date[2] == 'UTC'
+
+
+def test_deprecated():
+    @deprecated
+    def deprecated_func():
+        pass
+
+    with pytest.warns(DeprecationWarning):
+        deprecated_func()
+
+
+def test_url_encode():
+    original = u'http://host.com/path_with_special_char_áéíóú?and=query&string=true'
+    transformed = 'http://host.com/path_with_special_char_%C3%A1%C3%A9%C3%AD%C3%B3%C3%BA?and=query&string=true'
+    assert url_encode(original) == transformed
+
+
+def test_url_encode_string():
+    original = b'http://host.com/path_with_special_char_\xc3\xa1\xc3\xa9\xc3\xad\xc3\xb3\xc3\xba?and=query&string=true'
+    transformed = 'http://host.com/path_with_special_char_%C3%A1%C3%A9%C3%AD%C3%B3%C3%BA?and=query&string=true'
+    assert url_encode(original) == transformed
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..30a578a
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+
+@pytest.fixture
+def config_fixture():
+    return {
+        'key': 'value',
+        'section': {
+            'section_key': 'section_value'
+        }
+    }
diff --git a/tests/engine_base/serializers/test_keras_serializer.py b/tests/engine_base/serializers/test_keras_serializer.py
new file mode 100644
index 0000000..7cd42cc
--- /dev/null
+++ b/tests/engine_base/serializers/test_keras_serializer.py
@@ -0,0 +1,62 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import mock
+import pytest
+
+from marvin_python_toolbox.engine_base import EngineBaseTraining
+from marvin_python_toolbox.engine_base import KerasSerializer
+
+
+@pytest.fixture
+def engine():
+    class MyEngineAction(KerasSerializer, EngineBaseTraining):
+        def execute(self, **kwargs):
+            pass
+    return MyEngineAction(default_root_path="/tmp/.marvin")
+
+
+class TestKerasSerializer(object):
+    @mock.patch('keras.models.load_model')
+    def test__serializer_load_keras(self, mocked_load, engine):
+        mocked_load.return_value = {"me": "here"}
+        mocked_path = "/tmp/engine/model"
+        obj = engine._serializer_load(object_file_path=mocked_path)
+        mocked_load.assert_called_once_with(mocked_path)
+        assert obj == {"me": "here"}
+
+    @mock.patch('joblib.load')
+    def test__serializer_load_not_keras(self, mocked_load, engine):
+        mocked_path = "/tmp/engine/dataset"
+        mocked_load.return_value = {"me": "here"}
+        obj = engine._serializer_load(object_file_path=mocked_path)
+        mocked_load.assert_called_once_with(mocked_path)
+        assert obj == {"me": "here"}
+
+    def test__serializer_dump_keras(self, engine):
+        mocked_obj = mock.MagicMock()
+        mocked_path = "/tmp/engine/model"
+        engine._serializer_dump(mocked_obj, object_file_path=mocked_path)
+        mocked_obj.save.assert_called_once_with(mocked_path)
+
+    @mock.patch('marvin_python_toolbox.engine_base.EngineBaseTraining._serializer_dump')
+    def test__serializer_dump_not_keras(self, mocked_dump, engine):
+        mocked_obj = mock.MagicMock()
+        mocked_path = "/tmp/engine/dataset"
+        engine._serializer_dump(mocked_obj, object_file_path=mocked_path)
+        mocked_dump.assert_called_once_with(mocked_obj, mocked_path)
+
diff --git a/tests/engine_base/test_engine_base_action.py b/tests/engine_base/test_engine_base_action.py
new file mode 100644
index 0000000..3ac5647
--- /dev/null
+++ b/tests/engine_base/test_engine_base_action.py
@@ -0,0 +1,352 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from builtins import str
+import joblib as serializer
+import pytest
+import os
+import shutil
+import copy
+from mock import ANY
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.engine_base import EngineBaseBatchAction
+from marvin_python_toolbox.engine_base import EngineBaseAction, EngineBaseOnlineAction
+from marvin_python_toolbox.engine_base.stubs.actions_pb2 import HealthCheckResponse, HealthCheckRequest
+from marvin_python_toolbox.engine_base.stubs.actions_pb2 import OnlineActionRequest, ReloadRequest, BatchActionRequest
+
+
+@pytest.fixture
+def engine_action():
+    class EngineAction(EngineBaseAction):
+        def execute(self, params, **kwargs):
+            return 1
+
+    return EngineAction(default_root_path="/tmp/.marvin")
+
+
+@pytest.fixture
+def batch_engine_action():
+    class BatchEngineAction(EngineBaseBatchAction):
+        def execute(self, params, **kwargs):
+            return 1
+
+    return BatchEngineAction(default_root_path="/tmp/.marvin")
+
+
+class TestEngineBaseAction:
+    def setup(self):
+        shutil.rmtree("/tmp/.marvin", ignore_errors=True)
+
+    def test_retrieve_obj(self):
+        path = '/tmp/test-obj'
+        obj = [1, 2]
+        serializer.dump(obj, open(path, 'wb'))
+        assert obj == EngineBaseAction.retrieve_obj(path)
+
+    def test_constructor(self):
+        class EngineAction(EngineBaseAction):
+            def execute(self, params, **kwargs):
+                return 1
+
+        engine = EngineAction(params={"x", 1}, persistence_mode='x')
+
+        assert engine._params == {"x", 1}
+        assert engine._persistence_mode == 'x'
+
+    def test_get_object_file_path(self, engine_action):
+        assert engine_action._get_object_file_path(object_reference="xpath") == "/tmp/.marvin/test_base_action/xpath"
+
+    def test_save_obj_memory_persistence(self, engine_action):
+        obj = [6, 5, 4]
+        object_reference = '_params'
+        engine_action._save_obj(object_reference, obj)
+
+        assert obj == engine_action._params
+        assert not os.path.exists("/tmp/.marvin/test_base_action/params")
+
+    def test_save_obj_local_persistence(self, engine_action):
+        obj = [6, 5, 4]
+        object_reference = '_params'
+        engine_action._persistence_mode = 'local'
+        engine_action._save_obj(object_reference, obj)
+
+        assert obj == engine_action._params
+        assert os.path.exists("/tmp/.marvin/test_base_action/params")
+        assert list(engine_action._local_saved_objects.keys()) == [object_reference]
+
+    def test_release_saved_objects(self, engine_action):
+        obj = [6, 5, 4]
+        object_reference = '_params'
+        engine_action._persistence_mode = 'local'
+        engine_action._save_obj(object_reference, obj)
+
+        assert list(engine_action._local_saved_objects.keys()) == [object_reference]
+        engine_action._release_local_saved_objects()
+        assert engine_action._params is None
+
+    def test_save_two_times(self, engine_action):
+        obj = [6, 5, 4]
+        object_reference = '_params'
+        engine_action._persistence_mode = 'local'
+
+        engine_action._save_obj(object_reference, obj)
+        try:
+            engine_action._save_obj(object_reference, obj)
+            assert False
+
+        except Exception as e:
+            assert str(e.args[0]) == 'MultipleAssignException'
+            assert str(e.args[1]) == '_params'
+
+    def test_load_obj_local_persistence(self, engine_action):
+        engine_action2 = copy.copy(engine_action)
+
+        obj = [6, 5, 4]
+        object_reference = '_params'
+        engine_action._persistence_mode = 'local'
+        engine_action._save_obj(object_reference, obj)
+
+        engine_action2._persistence_mode = 'local'
+        engine_action2._load_obj(object_reference)
+
+        assert obj == engine_action2._params
+
+        engine_action2._persistence_mode = 'memory'
+        engine_action2._params = [1]
+        engine_action2._load_obj(object_reference)
+
+        assert [1] == engine_action2._params
+
+    def test_health_check_ok(self, engine_action):
+        obj1_key = "obj1"
+        engine_action._save_obj(obj1_key, "check")
+        request = HealthCheckRequest(artifacts=obj1_key)
+        expected_response = HealthCheckResponse(status=HealthCheckResponse.OK)
+        response = engine_action._health_check(request=request, context=None)
+
+        assert expected_response.status == response.status
+
+    def test_health_check_ok_multiple(self, engine_action):
+        obj1_key = "obj1"
+        engine_action._save_obj(obj1_key, "check")
+        obj2_key = "obj2"
+        engine_action._save_obj(obj2_key, "check")
+
+        request = HealthCheckRequest(artifacts=obj1_key + "," + obj2_key)
+        expected_response = HealthCheckResponse(status=HealthCheckResponse.OK)
+        response = engine_action._health_check(request=request, context=None)
+
+        assert expected_response.status == response.status
+
+    def test_health_check_nok(self, engine_action):
+        obj1_key = "obj1"
+        request = HealthCheckRequest(artifacts=obj1_key)
+        expected_response = HealthCheckResponse(status=HealthCheckResponse.NOK)
+        response = engine_action._health_check(request=request, context=None)
+
+        assert expected_response.status == response.status
+
+        engine_action._save_obj(obj1_key, "check")
+        request = HealthCheckRequest(artifacts=obj1_key + ", obj2")
+        response = engine_action._health_check(request=request, context=None)
+
+        assert expected_response.status == response.status
+
+    def test_health_check_exception(self):
+        class BadEngineAction(EngineBaseAction):
+            def execute(self, params, **kwargs):
+                return 1
+
+            def __getattribute__(self, name):
+                if name == 'obj1':
+                    raise Exception('I am Bad!')
+                else:
+                    return EngineBaseAction.__getattribute__(self, name)
+
+        engine_action = BadEngineAction()
+
+        obj1_key = "obj1"
+        request = HealthCheckRequest(artifacts=obj1_key)
+        expected_response = HealthCheckResponse(status=HealthCheckResponse.NOK)
+        response = engine_action._health_check(request=request, context=None)
+
+        assert expected_response.status == response.status
+
+    def test_remote_execute_with_string_response(self):
+        class StringReturnedAction(EngineBaseOnlineAction):
+            def execute(self, input_message, params, **kwargs):
+                return "message 1"
+
+        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        engine_action = StringReturnedAction()
+        response = engine_action._remote_execute(request=request, context=None)
+
+        assert response.message == "message 1"
+
+    def test_remote_execute_with_int_response(self):
+        class StringReturnedAction(EngineBaseOnlineAction):
+            def execute(self, input_message, params, **kwargs):
+                return 1
+
+        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        engine_action = StringReturnedAction()
+        response = engine_action._remote_execute(request=request, context=None)
+
+        assert response.message == "1"
+
+    def test_remote_execute_with_object_response(self):
+        class StringReturnedAction(EngineBaseOnlineAction):
+            def execute(self, input_message, params, **kwargs):
+                return {"r": 1}
+
+        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        engine_action = StringReturnedAction()
+        response = engine_action._remote_execute(request=request, context=None)
+
+        assert response.message == "{\"r\": 1}"
+
+    def test_remote_execute_with_list_response(self):
+        class StringReturnedAction(EngineBaseOnlineAction):
+            def execute(self, input_message, params, **kwargs):
+                return [1, 2]
+
+        request = OnlineActionRequest(message="{\"k\": 1}", params="{\"k\": 1}")
+        engine_action = StringReturnedAction()
+        response = engine_action._remote_execute(request=request, context=None)
+
+        assert response.message == "[1, 2]"
+
+    @mock.patch('marvin_python_toolbox.engine_base.engine_base_action.EngineBaseAction._load_obj')
+    def test_remote_reload_with_artifacts(self, load_obj_mocked, engine_action):
+        objs_key = "obj1"
+        engine_action._save_obj(objs_key, "check")
+        request = ReloadRequest(artifacts=objs_key, protocol='xyz')
+
+        response = engine_action._remote_reload(request, None)
+        load_obj_mocked.assert_called_once_with(force=True, object_reference=u'obj1')
+        assert response.message == "Reloaded"
+
+    @mock.patch('marvin_python_toolbox.engine_base.engine_base_action.EngineBaseAction._load_obj')
+    def test_remote_reload_without_artifacts(self, load_obj_mocked, engine_action):
+        request = ReloadRequest(artifacts=None, protocol='xyz')
+
+        response = engine_action._remote_reload(request, None)
+        load_obj_mocked.assert_not_called()
+        assert response.message == "Nothing to reload"
+
+    def test_load_obj_dont_reload_without_force(self, engine_action):
+        obj = [6, 5, 4]
+        object_reference = '_params'
+        engine_action._persistence_mode = 'local'
+        engine_action._save_obj(object_reference, obj)
+
+        assert obj == engine_action._params
+
+        new_obj = [1, 2, 3]
+        path = engine_action._get_object_file_path(object_reference)
+        engine_action._serializer_dump(new_obj, path)
+        engine_action._load_obj(object_reference)
+
+        assert obj == engine_action._params
+
+        engine_action._load_obj(object_reference, force=True)
+
+        assert new_obj == engine_action._params
+
+
+class TestEngineBaseBatchAction:
+    def setup(self):
+        shutil.rmtree("/tmp/.marvin", ignore_errors=True)
+
+    def test_pipeline_execute_without_previous_steps(self, batch_engine_action):
+        batch_engine_action.execute = mock.MagicMock()
+        batch_engine_action._pipeline_execute(params=123)
+        
+        batch_engine_action.execute.assert_called_once_with(123)
+
+    def test_pipeline_execute_with_previous_steps(self, batch_engine_action):
+        previous = copy.copy(batch_engine_action)
+        previous._pipeline_execute = mock.MagicMock()
+        batch_engine_action._previous_step = previous
+        batch_engine_action.execute = mock.MagicMock()
+
+        batch_engine_action._pipeline_execute(params=123)
+        
+        previous._pipeline_execute.assert_called_once_with(123)
+        batch_engine_action.execute.assert_called_once_with(123)
+
+    def test_remote_execute_without_request_params(self, batch_engine_action):
+        batch_engine_action._params = 123
+        batch_engine_action._pipeline_execute = mock.MagicMock()
+
+        request = BatchActionRequest()
+        batch_engine_action._remote_execute(request, None)
+
+        batch_engine_action._pipeline_execute.assert_called_once_with(params=123)
+
+    def test_remote_execute_with_request_params(self, batch_engine_action):
+        batch_engine_action._params = 123
+        batch_engine_action._pipeline_execute = mock.MagicMock()
+
+        request = BatchActionRequest(params='{"test": 123}')
+        batch_engine_action._remote_execute(request, None)
+
+        batch_engine_action._pipeline_execute.assert_called_once_with(params={u"test": 123})
+
+    @mock.patch("json.load")
+    def test__serializer_load_metrics(self, mocked_load):
+        obj = {"key", 1}
+        mocked_load.return_value = obj
+        object_reference = "_metrics"
+
+        class _EAction(EngineBaseAction):
+            _metrics = None
+
+            def execute(self, params, **kwargs):
+                pass
+
+        mocked_open = mock.mock_open()
+        with mock.patch('marvin_python_toolbox.engine_base.engine_base_action.open', mocked_open, create=False):
+            _metrics = _EAction(default_root_path="/tmp/.marvin", persistence_mode="local")._load_obj(object_reference)
+
+        mocked_load.assert_called_once_with(ANY)
+        mocked_open.assert_called_once()
+        assert obj == _metrics
+
+    @mock.patch("json.dump")
+    def test__serializer_dump_metrics(self, mocked_dump):
+        obj = {"key", 1}
+        object_reference = "_metrics"
+
+        class _EAction(EngineBaseAction):
+            _metrics = None
+
+            def execute(self, params, **kwargs):
+                pass
+
+        mocked_open = mock.mock_open()
+        with mock.patch('marvin_python_toolbox.engine_base.engine_base_action.open', mocked_open, create=False):
+            _EAction(default_root_path="/tmp/.marvin", persistence_mode="local")._save_obj(object_reference, obj)
+
+        mocked_dump.assert_called_once_with(obj, ANY, indent=4, separators=(u',', u': '), sort_keys=True)
+        mocked_open.assert_called_once()
+
+
diff --git a/tests/engine_base/test_engine_base_data_handler.py b/tests/engine_base/test_engine_base_data_handler.py
new file mode 100644
index 0000000..e89e8cb
--- /dev/null
+++ b/tests/engine_base/test_engine_base_data_handler.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from marvin_python_toolbox.engine_base import EngineBaseDataHandler
+
+
+@pytest.fixture
+def engine_action():
+    class EngineAction(EngineBaseDataHandler):
+        def execute(self, **kwargs):
+            return 1
+
+    return EngineAction(default_root_path="/tmp/.marvin")
+
+
+class TestEngineBaseDataHandler:
+
+    def test_initial_dataset(self, engine_action):
+        engine_action.marvin_initial_dataset = [1]
+        assert engine_action.marvin_initial_dataset == engine_action._initial_dataset == [1]
+
+    def test_dataset(self, engine_action):
+        engine_action.marvin_dataset = [1]
+        assert engine_action.marvin_dataset == engine_action._dataset == [1]
diff --git a/tests/engine_base/test_engine_base_prediction.py b/tests/engine_base/test_engine_base_prediction.py
new file mode 100644
index 0000000..bba43a3
--- /dev/null
+++ b/tests/engine_base/test_engine_base_prediction.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from marvin_python_toolbox.engine_base import EngineBasePrediction
+
+
+@pytest.fixture
+def engine_action():
+    class EngineAction(EngineBasePrediction):
+        def execute(self, **kwargs):
+            return 1
+
+    return EngineAction(default_root_path="/tmp/.marvin")
+
+
+class TestEngineBasePrediction:
+
+    def test_model(self, engine_action):
+        engine_action.marvin_model = [2]
+        assert engine_action.marvin_model == engine_action._model == [2]
+
+    def test_metrics(self, engine_action):
+        engine_action.marvin_metrics = [3]
+        assert engine_action.marvin_metrics == engine_action._metrics == [3]
diff --git a/tests/engine_base/test_engine_base_training.py b/tests/engine_base/test_engine_base_training.py
new file mode 100644
index 0000000..f809b57
--- /dev/null
+++ b/tests/engine_base/test_engine_base_training.py
@@ -0,0 +1,44 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pytest
+
+from marvin_python_toolbox.engine_base import EngineBaseTraining
+
+
+@pytest.fixture
+def engine_action():
+    class EngineAction(EngineBaseTraining):
+        def execute(self, **kwargs):
+            return 1
+
+    return EngineAction(default_root_path="/tmp/.marvin")
+
+
+class TestEngineBaseTraining:
+
+    def test_dataset(self, engine_action):
+        engine_action.marvin_dataset = [1]
+        assert engine_action.marvin_dataset == engine_action._dataset == [1]
+
+    def test_model(self, engine_action):
+        engine_action.marvin_model = [2]
+        assert engine_action.marvin_model == engine_action._model == [2]
+
+    def test_metrics(self, engine_action):
+        engine_action.marvin_metrics = [3]
+        assert engine_action.marvin_metrics == engine_action._metrics == [3]
diff --git a/tests/fixtures/config.sample b/tests/fixtures/config.sample
new file mode 100644
index 0000000..c49951e
--- /dev/null
+++ b/tests/fixtures/config.sample
@@ -0,0 +1,8 @@
+[marvin]
+
+models.default_context_name = pdl
+models.default_type_name = pdl
+
+[section]
+
+models.default_context_name = pdl2
diff --git a/tests/management/test_engine.py b/tests/management/test_engine.py
new file mode 100644
index 0000000..91cb8ed
--- /dev/null
+++ b/tests/management/test_engine.py
@@ -0,0 +1,217 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from mock import call
+from mock import ANY
+from marvin_python_toolbox.management.engine import MarvinDryRun
+from marvin_python_toolbox.management.engine import dryrun
+from marvin_python_toolbox.management.engine import engine_httpserver
+from marvin_python_toolbox.management.engine import _create_virtual_env
+from marvin_python_toolbox.management.engine import _make_data_link
+import os
+
+
+class mocked_ctx(object):
+    obj = {'package_name': 'test_package', 'config': {'inidir': 'test_dir'}}
+
+
+def mocked_sleep(value):
+    if value == 100:
+        raise KeyboardInterrupt()
+
+
+class mocked_acquisitor():
+    def __init__(self, persistence_mode, is_remote_calling, default_root_path):
+        self.persistence_mode = persistence_mode
+        self.is_remote_calling = is_remote_calling
+        self.default_root_path = default_root_path
+
+    def execute(self, **kwargs):
+        print ('test')
+
+
+@mock.patch('marvin_python_toolbox.management.engine.time.time')
+@mock.patch('marvin_python_toolbox.management.engine.MarvinDryRun')
+@mock.patch('marvin_python_toolbox.management.engine.sys.exit')
+@mock.patch('marvin_python_toolbox.management.engine.os.system')
+def test_dryrun(system_mocked, exit_mocked, MarvinDryRun_mocked, time_mocked):
+    params = '/tmp/params'
+    messages_file = '/tmp/messages'
+    feedback_file = '/tmp/feedback'
+    action = 'all'
+    spark_conf = '/opt/spark/conf'
+    time_mocked.return_value = 555
+
+    dryrun(ctx=mocked_ctx, action=action, params_file=params, messages_file=messages_file, feedback_file=feedback_file, initial_dataset=None,
+           dataset=None, model=None, metrics=None, response=False, spark_conf=spark_conf, profiling=None)
+
+    time_mocked.assert_called()
+    exit_mocked.assert_called_with("Stoping process!")
+    MarvinDryRun_mocked.assert_called_with(ctx=mocked_ctx, messages=[{}, {}], print_response=False)
+
+    MarvinDryRun_mocked.return_value.execute.assert_called_with(clazz='Feedback', dataset=None, initial_dataset=None, metrics=None, model=None,
+                                                                params={}, profiling_enabled=None)
+
+    action = 'acquisitor'
+
+    dryrun(ctx=mocked_ctx, action=action, params_file=params, messages_file=messages_file, feedback_file=feedback_file, initial_dataset=None,
+           dataset=None, model=None, metrics=None, response=False, spark_conf=spark_conf, profiling=None)
+
+    time_mocked.assert_called()
+    MarvinDryRun_mocked.assert_called_with(ctx=mocked_ctx, messages=[{}, {}], print_response=False)
+
+
+@mock.patch('marvin_python_toolbox.management.engine.json.dumps')
+@mock.patch('marvin_python_toolbox.management.engine.dynamic_import')
+def test_marvindryrun(import_mocked, dumps_mocked):
+    messages = ['/tmp/messages', '/tmp/feedback']
+    response = 'response'
+    clazz = 'PredictionPreparator'
+    import_mocked.return_value = mocked_acquisitor
+
+    test_dryrun = MarvinDryRun(ctx=mocked_ctx, messages=messages, print_response=response)
+    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=True)
+
+    import_mocked.assert_called_with("{}.{}".format('test_package', 'PredictionPreparator'))
+    dumps_mocked.assert_called_with(None, indent=4, sort_keys=True)
+
+    clazz = 'Feedback'
+    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
+
+    import_mocked.assert_called_with("{}.{}".format('test_package', 'Feedback'))
+
+    clazz = 'Predictor'
+    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
+
+    import_mocked.assert_called_with("{}.{}".format('test_package', 'PredictionPreparator'))
+
+    clazz = 'test'
+    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=True)
+    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
+
+    import_mocked.assert_called_with("{}.{}".format('test_package', 'test'))
+
+    response = False
+    clazz = 'PredictionPreparator'
+
+    MarvinDryRun(ctx=mocked_ctx, messages=messages, print_response=response)
+    test_dryrun = MarvinDryRun(ctx=mocked_ctx, messages=messages, print_response=response)
+    test_dryrun.execute(clazz=clazz, params=None, initial_dataset=None, dataset=None, model=None, metrics=None, profiling_enabled=False)
+
+    dumps_mocked.assert_called_with(None, indent=4, sort_keys=True)
+
+
+@mock.patch('marvin_python_toolbox.management.engine.sys.exit')
+@mock.patch('marvin_python_toolbox.management.engine.time.sleep')
+@mock.patch('marvin_python_toolbox.management.engine.MarvinData')
+@mock.patch('marvin_python_toolbox.management.engine.Config')
+@mock.patch('marvin_python_toolbox.management.engine.subprocess.Popen')
+def test_engine_httpserver(Popen_mocked, Config_mocked, MarvinData_mocked, sleep_mocked, exit_mocked):
+
+    sleep_mocked.side_effect = mocked_sleep
+
+    engine_httpserver(ctx=mocked_ctx, action='all', params_file='test_params', initial_dataset='test_id', dataset='test_d', model='test_m', metrics='test_me',
+                      protocol='test_protocol', spark_conf='test_conf', http_host='test_host', http_port=9999, executor_path='test_executor',
+                      max_workers=9, max_rpc_workers=99, extra_executor_parameters="-DXX=123")
+
+    expected_calls = []
+
+    expected_calls.append(call([
+        'marvin', 'engine-grpcserver',
+        '-a', 'all',
+        '-w', '9',
+        '-rw', '99',
+        ANY, ANY,
+        ANY, ANY,
+        ANY, ANY,
+        ANY, ANY,
+        ANY, ANY,
+        ANY, ANY]
+    ))
+
+    expected_calls.append(call([
+        'java',
+        '-DmarvinConfig.engineHome=test_dir',
+        '-DmarvinConfig.ipAddress=test_host',
+        '-DmarvinConfig.port=9999',
+        '-DmarvinConfig.protocol=test_protocol',
+        '-DXX=123',
+        '-jar',
+        MarvinData_mocked.download_file('test_executor')]
+    ))
+
+    Popen_mocked.assert_has_calls(expected_calls)
+    exit_mocked.assert_called_with(0)
+
+
+@mock.patch('marvin_python_toolbox.management.engine.subprocess.Popen')
+def test_create_virtual_env(Popen_mocked):
+    name = "my_project"
+    dest = "/tmp/xxx"
+    python = "python"
+
+    mockx = mock.MagicMock()
+    mockx.wait.return_value = 0
+    Popen_mocked.return_value = mockx
+
+    env_name = _create_virtual_env(name, dest, python)
+
+    commands = [
+        'bash',
+        '-c',
+        '. virtualenvwrapper.sh; mkvirtualenv -p {0} -a {1} {2};'.format(python, dest, env_name)
+    ]
+
+    Popen_mocked.assert_called_with(commands, env=os.environ)
+    assert env_name == 'my-project-env'
+
+
+@mock.patch('marvin_python_toolbox.management.engine.sys')
+@mock.patch('marvin_python_toolbox.management.engine.subprocess.Popen')
+def test_create_virtual_env_error(Popen_mocked, sys_mocked):
+    name = "my_project"
+    dest = "/tmp/xxx"
+    python = "python"
+
+    mockx = mock.MagicMock()
+    mockx.wait.return_value = 3
+    Popen_mocked.return_value = mockx
+
+    env_name = _create_virtual_env(name, dest, python)
+
+    commands = [
+        'bash',
+        '-c',
+        '. virtualenvwrapper.sh; mkvirtualenv -p {0} -a {1} {2};'.format(python, dest, env_name)
+    ]
+
+    Popen_mocked.assert_called_with(commands, env=os.environ)
+    mockx.wait.assert_called_once()
+    # sys_mocked.exit.assert_called_once_with(1)
+
+
+@mock.patch('marvin_python_toolbox.management.engine.os.symlink')
+def test_make_data_link_call_symlink(mock_symlink):
+    os.environ['MARVIN_DATA_PATH'] = '/tmp/'
+    dest = '/tmp/'
+    _make_data_link(dest)
+    mock_symlink.assert_called_once_with('/tmp/', '/tmp/notebooks/data')
diff --git a/tests/management/test_hive.py b/tests/management/test_hive.py
new file mode 100644
index 0000000..a24e417
--- /dev/null
+++ b/tests/management/test_hive.py
@@ -0,0 +1,809 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.management import hive
+
+
+@mock.patch('marvin_python_toolbox.management.hive.json')
+def test_hive_generateconf_write_file_with_json(mocked_json):
+    default_conf = [{
+        "origin_host": "xxx_host_name",
+        "origin_db": "xxx_db_name",
+        "origin_queue": "marvin",
+        "target_table_name": "xxx_table_name",
+        "sample_sql": "SELECT * FROM XXX",
+        "sql_id": "1"
+    }]
+
+    mocked_open = mock.mock_open()
+    with mock.patch('marvin_python_toolbox.management.hive.open', mocked_open, create=True):
+        hive.hive_generateconf(None)
+
+    mocked_open.assert_called_once_with('hive_dataimport.conf', 'w')
+    mocked_json.dump.assert_called_once_with(default_conf, mocked_open(), indent=2)
+
+
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.reset_remote_tables')
+def test_hive_resetremote_call_HiveDataImporter_reset_remote_tables(reset_mocked): 
+    hive.hive_resetremote(ctx=None, host="test", engine="test", queue="test")
+    reset_mocked.assert_called_once_with()
+
+
+@mock.patch('marvin_python_toolbox.management.hive.read_config')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.__init__')
+def test_hive_dataimport_without_config(init_mocked, read_config_mocked):
+    read_config_mocked.return_value = None
+
+    ctx = conf = sql_id = engine = \
+        skip_remote_preparation = force_copy_files = validate = force =\
+        force_remote = max_query_size = destination_host = destination_port =\
+        destination_host_username = destination_host_password = destination_hdfs_root_path = None
+
+    hive.hive_dataimport(
+        ctx, conf, sql_id, engine, 
+        skip_remote_preparation, force_copy_files, validate, force,
+        force_remote, max_query_size, destination_host, destination_port,
+        destination_host_username, destination_host_password, destination_hdfs_root_path
+    )
+
+    init_mocked.assert_not_called()
+
+
+@mock.patch('marvin_python_toolbox.management.hive.read_config')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.__init__')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.table_exists')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.import_sample')
+def test_hive_dataimport_with_config(import_sample_mocked, table_exists_mocked, init_mocked, read_config_mocked):
+    read_config_mocked.return_value = [{'origin_db': 'test', 'target_table_name': 'test'}]
+    init_mocked.return_value = None
+
+    ctx = sql_id = engine = \
+        skip_remote_preparation = force_copy_files = validate =\
+        force_remote = max_query_size = destination_port =\
+        destination_host_username = destination_host_password = destination_hdfs_root_path = None
+
+    force = True
+    conf = '/path/to/conf'
+    destination_host = 'test'
+
+    hive.hive_dataimport(
+        ctx, conf, sql_id, engine, 
+        skip_remote_preparation, force_copy_files, validate, force,
+        force_remote, max_query_size, destination_host, destination_port,
+        destination_host_username, destination_host_password, destination_hdfs_root_path
+    )
+
+    init_mocked.assert_called_once_with(
+        max_query_size=max_query_size,
+        destination_host=destination_host,
+        destination_port=destination_port,
+        destination_host_username=destination_host_username,
+        destination_host_password=destination_host_password,
+        destination_hdfs_root_path=destination_hdfs_root_path,
+        origin_db='test',
+        target_table_name='test',
+        engine=engine,
+    )
+    import_sample_mocked.assert_called_once_with(
+        create_temp_table=True,
+        copy_files=None,
+        validate_query=None,
+        force_create_remote_table=None
+    )
+
+
+@mock.patch('marvin_python_toolbox.management.hive.read_config')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.__init__')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.table_exists')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.import_sample')
+def test_hive_dataimport_with_config_sql_id(import_sample_mocked, table_exists_mocked, init_mocked, read_config_mocked):
+    read_config_mocked.return_value = [
+        {'origin_db': 'test', 'target_table_name': 'test', 'sql_id': 'test'},
+        {'origin_db': 'bla', 'target_table_name': 'bla', 'sql_id': 'bla'},
+    ]
+    init_mocked.return_value = None
+
+    ctx = sql_id = engine = \
+        skip_remote_preparation = force_copy_files = validate =\
+        force_remote = max_query_size = destination_port =\
+        destination_host_username = destination_host_password = destination_hdfs_root_path = None
+
+    sql_id= 'test'
+    force = True
+    conf = '/path/to/conf'
+    destination_host = 'test'
+
+    hive.hive_dataimport(
+        ctx, conf, sql_id, engine, 
+        skip_remote_preparation, force_copy_files, validate, force,
+        force_remote, max_query_size, destination_host, destination_port,
+        destination_host_username, destination_host_password, destination_hdfs_root_path
+    )
+
+    init_mocked.assert_called_once_with(
+        max_query_size=max_query_size,
+        destination_host=destination_host,
+        destination_port=destination_port,
+        destination_host_username=destination_host_username,
+        destination_host_password=destination_host_password,
+        destination_hdfs_root_path=destination_hdfs_root_path,
+        origin_db='test',
+        target_table_name='test',
+        sql_id='test',
+        engine=engine,
+    )
+    import_sample_mocked.assert_called_once_with(
+        create_temp_table=True,
+        copy_files=None,
+        validate_query=None,
+        force_create_remote_table=None
+    )
+
+
+@mock.patch('marvin_python_toolbox.management.hive.read_config')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.table_exists')
+@mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.import_sample')
+def test_hive_dataimport_with_config_force_false(import_sample_mocked, table_exists_mocked, read_config_mocked):
+    table_exists_mocked.return_value = False
+    read_config_mocked.return_value = [{
+        'origin_db': 'test',
+        'target_table_name': 'test',
+        'origin_queue':'test',
+        'origin_host':'test',
+        'sample_sql':'test',
+        'sql_id':'test'
+    }]
+
+    ctx = sql_id = engine = \
+        skip_remote_preparation = force_copy_files = validate =\
+        force_remote = max_query_size = destination_port =\
+        destination_host_username = destination_host_password = destination_hdfs_root_path = None
+
+    force = False
+    conf = '/path/to/conf'
+    destination_host = 'test'
+
+    hdi = hive.HiveDataImporter(
+        max_query_size=max_query_size,
+        destination_host=destination_host,
+        destination_port=destination_port,
+        destination_host_username=destination_host_username,
+        destination_host_password=destination_host_password,
+        destination_hdfs_root_path=destination_hdfs_root_path,
+        origin_db='test',
+        target_table_name='test',
+        engine=engine,
+        sql_id='test',
+        origin_host='test',
+        origin_queue='test',
+        sample_sql='test',
+    )
+
+    with mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter', return_value=hdi):
+        hive.hive_dataimport(
+            ctx, conf, sql_id, engine, 
+            skip_remote_preparation, force_copy_files, validate, force,
+            force_remote, max_query_size, destination_host, destination_port,
+            destination_host_username, destination_host_password, destination_hdfs_root_path
+        )
+
+        table_exists_mocked.assert_called_once_with(
+            host=hdi.destination_host, db=hdi.origin_db, table=hdi.target_table_name
+        )
+
+        import_sample_mocked.assert_called_once_with(
+            create_temp_table=True,
+            copy_files=None,
+            validate_query=None,
+            force_create_remote_table=None
+        )
+
+
+@mock.patch('marvin_python_toolbox.management.hive.json')
+@mock.patch('marvin_python_toolbox.management.hive.os.path')
+def test_read_config_with_existing_path(path_mocked, json_mocked):
+    path_mocked.exists.return_value = True
+    path_mocked.join.return_value = 'test.conf'
+
+    mocked_open = mock.mock_open()
+    with mock.patch('marvin_python_toolbox.management.hive.open', mocked_open, create=True):
+        hive.read_config("test.conf")
+
+    mocked_open.assert_called_once_with('test.conf', 'r')
+    json_mocked.load.assert_called_once_with(mocked_open())
+
+
+@mock.patch('marvin_python_toolbox.management.hive.json')
+@mock.patch('marvin_python_toolbox.management.hive.os.path')
+def test_read_config_with_not_existing_path(path_mocked, json_mocked):
+    path_mocked.exists.return_value = False
+    path_mocked.join.return_value = 'test.conf'
+
+    mocked_open = mock.mock_open()
+    with mock.patch('marvin_python_toolbox.management.hive.open', mocked_open, create=True):
+        hive.read_config("test.conf")
+
+    mocked_open.assert_not_called()
+    json_mocked.load.assert_not_called()
+
+
+class TestHiveDataImporter:
+
+    def setup(self):
+        self.hdi = hive.HiveDataImporter(
+            max_query_size=13,
+            destination_host='test',
+            destination_port=None,
+            destination_host_username=None,
+            destination_host_password=None,
+            destination_hdfs_root_path='/tmp',
+            origin_db='test',
+            target_table_name='test',
+            engine='test',
+            sql_id='test',
+            origin_host='test',
+            origin_queue='test',
+            sample_sql='test',
+        )
+
+        self.mock_methods = {
+            'get_createtable_ddl': mock.DEFAULT,
+            'get_partitions': mock.DEFAULT,
+            'has_partitions': mock.DEFAULT,
+            'create_database': mock.DEFAULT,
+            'table_exists': mock.DEFAULT,
+            'drop_table': mock.DEFAULT,
+            'create_table': mock.DEFAULT,
+            'populate_table': mock.DEFAULT,
+            'get_table_location': mock.DEFAULT,
+            'generate_table_location': mock.DEFAULT,
+            'hdfs_dist_copy': mock.DEFAULT,
+            'create_external_table': mock.DEFAULT,
+            'refresh_partitions': mock.DEFAULT,
+            'drop_view': mock.DEFAULT,
+            'create_view': mock.DEFAULT,
+            'validade_query': mock.DEFAULT,
+            'get_connection': mock.DEFAULT,
+            'print_finish_step': mock.DEFAULT,
+            'print_start_step': mock.DEFAULT
+        }
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.count_rows')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.retrieve_data_sample')
+    def test_validade_query(self, retrieve_mocked, connection_mocked, count_rows_mocked):
+        count_rows_mocked.return_value = 1
+        connection_mocked.return_value = 'connection_mocked'
+        retrieve_mocked.return_value = {'estimate_query_mean_per_line': 42}
+
+        self.hdi.validade_query()
+
+        connection_mocked.assert_called_once_with(
+            host=self.hdi.origin_host, 
+            db=self.hdi.origin_db, 
+            queue=self.hdi.origin_queue
+        )
+        count_rows_mocked.assert_called_once_with(conn='connection_mocked', sql=self.hdi.sample_sql)
+        retrieve_mocked.assert_called_once_with(conn='connection_mocked', full_table_name=self.hdi.full_table_name)
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
+    def test_table_exists_table_not_exists(self, connection_mocked, show_log_mocked):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+        cursor.fetchall.return_value = []
+        connection_mocked.return_value = conn
+
+        table_exists = self.hdi.table_exists(host='host', db='db', table='table')
+
+        show_log_mocked.assert_called_once_with(cursor)
+        assert table_exists is False
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
+    def test_table_exists_table_exists(self, connection_mocked, show_log_mocked):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+        cursor.fetchall.return_value = ['test']
+        connection_mocked.return_value = conn
+
+        table_exists = self.hdi.table_exists(host='host', db='db', table='table')
+
+        show_log_mocked.assert_has_calls([mock.call(cursor)] * 2)
+        assert table_exists is True
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.drop_table')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.generate_table_location')
+    def test_reset_remote_tables_without_valids_tables(self, tb_loc_mock, ssh_cli_mock, conn_mock, 
+        delete_mock, drop_mock, log_mock):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+        cursor.fetchall.return_value = []
+        conn_mock.return_value = conn
+
+        self.hdi.reset_remote_tables()
+
+        conn_mock.assert_called_once_with(
+            host=self.hdi.origin_host, 
+            db=self.hdi.temp_db_name, 
+            queue=self.hdi.origin_queue
+        )
+        log_mock.assert_called_once_with(cursor)
+
+        drop_mock.assert_not_called()
+        tb_loc_mock.assert_not_called()
+        delete_mock.assert_not_called()
+        ssh_cli_mock.assert_not_called()
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.drop_table')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.generate_table_location')
+    def test_reset_remote_tables_with_valids_tables(self, tb_loc_mock, ssh_cli_mock, 
+        conn_mock, delete_mock, drop_mock, log_mock):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+        cursor.fetchall.return_value = [['test']]
+        conn_mock.return_value = conn
+
+        tb_loc_mock.return_value = 'test'
+        ssh_cli_mock.return_value = 'test'
+
+        self.hdi.reset_remote_tables()
+
+        conn_mock.assert_called_once_with(
+            host=self.hdi.origin_host, 
+            db=self.hdi.temp_db_name, 
+            queue=self.hdi.origin_queue
+        )
+        log_mock.assert_called_once_with(cursor)
+
+        drop_mock.assert_called_once_with(conn=conn, table_name="marvin.test")
+        tb_loc_mock.assert_called_once_with(
+            self.hdi.destination_hdfs_root_path,
+            self.hdi.origin_host,
+            self.hdi.temp_db_name + '.db',
+            "test"
+        )
+        delete_mock.assert_called_once_with('test', 'test')
+        ssh_cli_mock.assert_called_once_with(
+            self.hdi.origin_host,
+            self.hdi.destination_host_username,
+            self.hdi.destination_host_password
+        )
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.validade_query')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_connection')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.print_finish_step')
+    def test_import_sample_with_invalid_query_and_flag_true_stop(self, finish_step_mock, conn_mock, val_query_mock):
+        val_query_mock.return_value = False
+
+        self.hdi.import_sample(validate_query=True)
+
+        val_query_mock.assert_called_once_with()
+        finish_step_mock.assert_called_once_with()
+        conn_mock.assert_not_called()
+
+    @mock.patch('marvin_python_toolbox.management.hive.print')
+    def test_import_sample_with_invalid_query_and_flag_false_dont_stop(self, print_mocked):
+        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
+            **self.mock_methods
+        ) as mocks:
+
+            self.hdi.import_sample(validate_query=False)
+
+            assert mocks['print_finish_step'].call_count == 6
+            assert mocks['get_connection'].call_count == 5
+            
+            mocks['validade_query'].assert_not_called()
+
+    @mock.patch('marvin_python_toolbox.management.hive.print')
+    def test_import_sample_with_partitions_stop(self, print_mocked):
+        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
+            **self.mock_methods
+        ) as mocks:
+
+            conn = mock.MagicMock()
+            mocks['has_partitions'].return_value = True
+            mocks['get_connection'].return_value = conn
+
+            self.hdi.import_sample(validate_query=True)
+
+            assert mocks['get_connection'].call_count == 2
+            mocks['get_createtable_ddl'].assert_called_once_with(
+                conn=conn,
+                origin_table_name=self.hdi.target_table_name,
+                dest_table_name=self.hdi.temp_table_name
+            )
+            mocks['get_partitions'].assert_called_once_with(
+                mocks['get_createtable_ddl'].return_value
+            )
+            mocks['create_database'].assert_not_called()
+
+    @mock.patch('marvin_python_toolbox.management.hive.print')
+    def test_import_sample_with_create_temp_table_false_dont_call_create_table(self, print_mocked):
+        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
+            **self.mock_methods
+        ) as mocks:
+
+            self.hdi.import_sample(create_temp_table=False)
+
+            mocks['table_exists'].assert_not_called()
+            mocks['drop_table'].assert_not_called()
+            mocks['create_table'].assert_not_called()
+            mocks['populate_table'].assert_not_called()
+
+    @mock.patch('marvin_python_toolbox.management.hive.print')
+    def test_import_sample_with_create_temp_table_true_call_create_table(self, print_mocked):
+        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
+            **self.mock_methods
+        ) as mocks:
+
+            mocks['has_partitions'].return_value = False
+            self.hdi.import_sample(create_temp_table=True, force_create_remote_table=True)
+
+            assert mocks['drop_table'].call_count == 2
+            assert mocks['create_table'].call_count == 1
+            assert mocks['populate_table'].call_count == 1
+
+    @mock.patch('marvin_python_toolbox.management.hive.print')
+    def test_import_sample(self, print_mocked):
+        with mock.patch.multiple('marvin_python_toolbox.management.hive.HiveDataImporter',
+            **self.mock_methods
+        ) as mocks:
+
+            mocks['validade_query'].return_value = True
+            mocks['has_partitions'].return_value = False
+            self.hdi.import_sample()
+
+            assert mocks['print_finish_step'].call_count == 6
+            assert mocks['get_connection'].call_count == 5
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.clean_ddl')
+    def test_get_createtable_ddl(self, clean_ddl_mocked):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+        cursor.fetchall.return_value = [['l1'], ['l2']]
+        dll = mock.MagicMock()
+        clean_ddl_mocked.return_value = dll
+
+        self.hdi.get_createtable_ddl(conn, 'marvin', 'test')
+
+        cursor.execute.assert_called_once_with("SHOW CREATE TABLE marvin")
+        clean_ddl_mocked.assert_called_once_with('l1l2', remove_formats=False, remove_general=True)
+        dll.replace.assert_called_once_with('marvin', 'test')
+        cursor.close.assert_called_once_with()
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    def test_execute_db_command(self, show_log_mocked):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+        command = "bla bla bla"
+
+        self.hdi._execute_db_command(conn, command)
+
+        cursor.execute.assert_called_once_with(command)
+        show_log_mocked.assert_called_once_with(cursor)
+        cursor.close.assert_called_once_with()
+
+    @mock.patch('marvin_python_toolbox.management.hive.hive')
+    def test_get_connection(self, pyhive_mocked):
+        host = 'test'
+        self.hdi.get_connection(host, db='DEFAULT', queue='default')
+
+        pyhive_mocked.connect.assert_called_once_with(
+            host=host, database='DEFAULT',
+            configuration={'mapred.job.queue.name': 'default',
+                ' hive.exec.dynamic.partition.mode': 'nonstrict'}
+        )
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    def test_retrieve_data_sample(self, show_log_mocked):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+        cursor.description = [('table.col', 'type')]
+        cursor.fetchall.return_value = ['test']
+
+        full_table_name = 'test'
+        sample_limit = 10
+
+        data = self.hdi.retrieve_data_sample(conn, full_table_name, sample_limit)
+        
+        sql = "SELECT * FROM {} TABLESAMPLE ({} ROWS)".format(full_table_name, sample_limit)
+
+        cursor.execute.assert_called_once_with(sql)
+        assert data['data_header'][0]['col'] == 'col'
+        assert data['data_header'][0]['table'] == 'table'
+        assert data['data_header'][0]['type'] == 'type'
+        assert data['total_lines'] == 1
+        assert data['data'] == ['test']
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    def test_count_rows(self, show_log_mocked):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        cursor.fetchone.return_value = [42]
+        conn.cursor.return_value = cursor
+
+        sql = "SELECT COL1, COL2 FROM TABLE"
+        count = self.hdi.count_rows(conn, sql)
+
+        assert count == 42
+        cursor.execute.assert_called_once_with("SELECT COUNT(1) FROM TABLE")
+        show_log_mocked.assert_called_once_with(cursor)
+        cursor.close.assert_called_once_with()
+
+    @mock.patch('marvin_python_toolbox.management.hive.logger')
+    def test_show_log(self, logger_mocked):
+        cursor = mock.MagicMock()
+        cursor.fetch_logs.return_value = ['log log log']
+
+        self.hdi.show_log(cursor)
+
+        logger_mocked.debug.assert_called_once_with('log log log')
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.show_log')
+    def test_save_data(self, show_log_mocked):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        conn.cursor.return_value = cursor
+
+        table = 'test'
+        data = {
+            'total_lines': 2,
+            'data_header': [
+                {'col': 'test_col_1'},
+                {'col': 'test_col_2'},
+            ],
+            'data': [
+                'header',
+                'test_val_1',
+                'test_val_2',
+            ]
+        }
+        self.hdi.save_data(conn, table, data)
+
+        dml = "INSERT INTO test (test_col_1, test_col_2) VALUES (%s, %s)"
+        cursor.executemany.assert_called_once_with(dml, [('test_val_1',), ('test_val_2',)])
+        show_log_mocked.assert_called_once_with(cursor)
+        cursor.close.assert_called_once_with()
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
+    def test_populate_table_with_partitions(self, exec_comm_mock):
+        conn = None
+        table_name = 'test'
+        sql = 'bla bla bla'
+        partitions = [{'col': 'test1'}, {'col': 'test2'}]
+
+        self.hdi.populate_table(conn, table_name, partitions, sql)
+
+        dml = "INSERT OVERWRITE TABLE test PARTITION (test1, test2) bla bla bla"
+        exec_comm_mock.assert_called_once_with(conn, dml)
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
+    def test_populate_table_without_partitions(self, exec_comm_mock):
+        conn = None
+        table_name = 'test'
+        sql = 'bla bla bla'
+        partitions = []
+
+        self.hdi.populate_table(conn, table_name, partitions, sql)
+
+        dml = "INSERT OVERWRITE TABLE test  bla bla bla"
+        exec_comm_mock.assert_called_once_with(conn, dml)
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
+    def test_create_view(self, exec_comm_mock):
+        conn = None
+        view_name = 'view_test'
+        table_name = 'table_test'
+
+        self.hdi.create_view(conn, view_name, table_name)
+
+        dml = "CREATE VIEW {0} AS SELECT * FROM {1}".format(view_name, table_name)
+        exec_comm_mock.assert_called_once_with(conn, dml)
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._execute_db_command')
+    def test_refresh_partitions(self, exec_comm_mock):
+        conn = None
+        table_name = 'table_test'
+
+        self.hdi.refresh_partitions(conn, table_name)
+
+        sttmt = "MSCK REPAIR TABLE {0}".format(table_name)
+        exec_comm_mock.assert_called_once_with(conn, sttmt)
+
+    def test_get_table_location(self):
+        cursor = mock.MagicMock()
+        conn = mock.MagicMock()
+        cursor.fetchall.return_value = [[' location: ', ' hdfs://test ']]
+        conn.cursor.return_value = cursor
+        table_name = 'test'
+
+        loc = self.hdi.get_table_location(conn, table_name)
+
+        cursor.execute.assert_called_once_with("DESCRIBE FORMATTED test")
+        assert loc == 'hftp://test'
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
+    def test_delete_files(self, cmd_mocked):
+        ssh = 'ssh'
+        url = 'test.com'
+        self.hdi.delete_files(ssh, url)
+
+        cmd = "hdfs dfs -rm -R 'test.com'"
+        cmd_mocked.assert_called_once_with(ssh, cmd)
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
+    def test_copy_files(self, cmd_mocked):
+        ssh = 'ssh'
+        origin = "/home/"
+        dest = "/tmp/"
+        self.hdi.copy_files(ssh, origin, dest)
+
+        cmd = "hadoop distcp --update '/home/' '/tmp/'"
+        cmd_mocked.assert_called_once_with(ssh, cmd)
+
+    @mock.patch('marvin_python_toolbox.management.hive.logger')
+    def test_hdfs_commands(self, logger_mocked):
+        i = mock.MagicMock()
+        o = mock.MagicMock()
+        e = mock.MagicMock()
+        ssh = mock.MagicMock()
+        o.readlines.return_value = 'output'
+        e.readlines.return_value = 'error'
+        ssh.exec_command.return_value = (i, o, e)
+        cmd = "command"
+
+        out, err = self.hdi._hdfs_commands(ssh, cmd)
+
+        assert (out, err) == ('output', 'error')
+        logger_mocked.debug.assert_any_call("Executing remote command: command")
+        logger_mocked.debug.assert_any_call("output")
+        logger_mocked.debug.assert_any_call("error")
+
+    @mock.patch('marvin_python_toolbox.management.hive.AutoAddPolicy', spec=True)
+    @mock.patch('marvin_python_toolbox.management.hive.SSHClient.connect')
+    @mock.patch('marvin_python_toolbox.management.hive.SSHClient.set_missing_host_key_policy')
+    def test_get_ssh_client(self, set_missing_mocked, connect_mocked, AutoAddPolicyMocked):
+        hdfs_host = 'hdfs://test.com'
+        hdfs_port = '1234'
+        username = 'user'
+        password = 'pass'
+        self.hdi._get_ssh_client(hdfs_host, hdfs_port, username, password)
+
+        set_missing_mocked.assert_called_once_with(AutoAddPolicyMocked.return_value)
+        connect_mocked.assert_called_once_with(
+            hostname=hdfs_host, port=hdfs_port, username=username, password=password
+        )
+
+    @mock.patch('marvin_python_toolbox.management.hive.sys')
+    @mock.patch('marvin_python_toolbox.management.hive.logger')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.copy_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
+    def test_hdfs_dist_copy(self, ssh_cli_mock, hdfs_comm_mock, del_files_mock, copy_mock, logger_mock, sys_mock):
+        hdfs_comm_mock.return_value = (42, None)
+        copy_mock.return_value = (None, None)
+        ssh = mock.MagicMock()
+        ssh_cli_mock.return_value = ssh
+
+        force = False
+        hdfs_host = 'hdfs://test.com'
+        hdfs_port = 1234
+        origin = '/home/'
+        dest = '/tmp/'
+
+        self.hdi.hdfs_dist_copy(force, hdfs_host, hdfs_port, origin, dest, username=None, password=None)
+
+        ssh_cli_mock.assert_called_once_with(hdfs_host, hdfs_port, None, None)
+        del_files_mock.assert_not_called()
+        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/home/' | grep -E '^-' | wc -l")
+        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/tmp/' | grep -E '^-' | wc -l")
+        logger_mock.debug.assert_not_called()
+        sys_mock.exit.assert_not_called()
+
+    @mock.patch('marvin_python_toolbox.management.hive.sys')
+    @mock.patch('marvin_python_toolbox.management.hive.logger')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.copy_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
+    def test_hdfs_dist_copy_with_force(self, ssh_cli_mock, hdfs_comm_mock, del_files_mock, copy_mock, logger_mock, sys_mock):
+        hdfs_comm_mock.return_value = (42, None)
+        copy_mock.return_value = (None, None)
+        ssh = mock.MagicMock()
+        ssh_cli_mock.return_value = ssh
+
+        force = True
+        hdfs_host = 'hdfs://test.com'
+        hdfs_port = 1234
+        origin = '/home/'
+        dest = '/tmp/'
+
+        self.hdi.hdfs_dist_copy(force, hdfs_host, hdfs_port, origin, dest, username=None, password=None)
+
+        ssh_cli_mock.assert_called_once_with(hdfs_host, hdfs_port, None, None)
+        del_files_mock.assert_called_once_with(ssh, dest)
+        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/home/' | grep -E '^-' | wc -l")
+        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/tmp/' | grep -E '^-' | wc -l")
+        logger_mock.debug.assert_not_called()
+        sys_mock.exit.assert_not_called()
+
+    @mock.patch('marvin_python_toolbox.management.hive.sys')
+    @mock.patch('marvin_python_toolbox.management.hive.logger')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.copy_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.delete_files')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._hdfs_commands')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter._get_ssh_client')
+    def test_hdfs_dist_copy_error_copy(self, ssh_cli_mock, hdfs_comm_mock, del_files_mock, copy_mock, logger_mock, sys_mock):
+        hdfs_comm_mock.side_effect = [(42, None), (13, None)]
+        copy_mock.return_value = (None, ['error'])
+        ssh = mock.MagicMock()
+        ssh_cli_mock.return_value = ssh
+
+        force = False
+        hdfs_host = 'hdfs://test.com'
+        hdfs_port = 1234
+        origin = '/home/'
+        dest = '/tmp/'
+
+        self.hdi.hdfs_dist_copy(force, hdfs_host, hdfs_port, origin, dest, username=None, password=None)
+
+        ssh_cli_mock.assert_called_once_with(hdfs_host, hdfs_port, None, None)
+        del_files_mock.assert_not_called()
+        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/home/' | grep -E '^-' | wc -l")
+        hdfs_comm_mock.assert_any_call(ssh, "hdfs dfs -ls -R '/tmp/' | grep -E '^-' | wc -l")
+        logger_mock.debug.assert_called_once_with('error')
+        sys_mock.exit.assert_called_once_with("Stoping process!")
+
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.clean_ddl')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.create_table')
+    @mock.patch('marvin_python_toolbox.management.hive.HiveDataImporter.get_table_format')
+    def test_create_external_table(self, table_formar_mock, create_table_mock, clean_ddl_mock):
+        table_formar_mock.return_value = 'test'
+        conn = None
+        temp_table_name = 'temp'
+        ddl = "CREATE TABLE bla bla bla"
+        parquet_file_location = "/tmp/"
+        clean_ddl_mock.return_value = ddl
+
+        self.hdi.create_external_table(conn, temp_table_name, ddl, parquet_file_location)
+
+        table_formar_mock.assert_called_once_with(ddl)
+        clean_ddl_mock.assert_called_once_with(ddl, remove_formats=True, remove_general=False)
+        ddl = "CREATE EXTERNAL TABLE bla bla bla STORED AS test LOCATION '/tmp/'"
+        create_table_mock.assert_called_once_with(conn=conn, table_name=temp_table_name, ddl=ddl)
\ No newline at end of file
diff --git a/tests/management/test_notebook.py b/tests/management/test_notebook.py
new file mode 100644
index 0000000..2a31e0d
--- /dev/null
+++ b/tests/management/test_notebook.py
@@ -0,0 +1,88 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# from click.testing import CliRunner
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+import os
+from marvin_python_toolbox.management.notebook import notebook, lab
+
+
+class mocked_ctx(object):
+    obj = {'base_path': '/tmp'}
+
+
+@mock.patch('marvin_python_toolbox.management.notebook.sys')
+@mock.patch('marvin_python_toolbox.management.notebook.os.system')
+def test_notebook(system_mocked, sys_mocked):
+    ctx = mocked_ctx()
+    port = 8888
+    enable_security = False
+    allow_root = False
+    spark_conf = '/opt/spark/conf'
+    system_mocked.return_value = 1
+
+    notebook(ctx, port, enable_security, spark_conf, allow_root)
+
+    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter notebook --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --config ' + os.environ["MARVIN_ENGINE_PATH"] + '/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py --NotebookApp.token=')
+
+
+@mock.patch('marvin_python_toolbox.management.notebook.sys')
+@mock.patch('marvin_python_toolbox.management.notebook.os.system')
+def test_notebook_with_security(system_mocked, sys_mocked):
+    ctx = mocked_ctx()
+    port = 8888
+    enable_security = True
+    allow_root = False
+    spark_conf = '/opt/spark/conf'
+    system_mocked.return_value = 1
+
+    notebook(ctx, port, enable_security, spark_conf, allow_root)
+
+    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter notebook --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --config ' + os.environ["MARVIN_ENGINE_PATH"] + '/marvin_python_toolbox/extras/notebook_extensions/jupyter_notebook_config.py')
+
+
+@mock.patch('marvin_python_toolbox.management.notebook.sys')
+@mock.patch('marvin_python_toolbox.management.notebook.os.system')
+def test_jupyter_lab(system_mocked, sys_mocked):
+    ctx = mocked_ctx()
+    port = 8888
+    enable_security = False
+    spark_conf = '/opt/spark/conf'
+    system_mocked.return_value = 1
+
+    lab(ctx, port, enable_security, spark_conf)
+
+    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter-lab --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser --NotebookApp.token=')
+
+
+@mock.patch('marvin_python_toolbox.management.notebook.sys')
+@mock.patch('marvin_python_toolbox.management.notebook.os.system')
+def test_jupyter_lab_with_security(system_mocked, sys_mocked):
+    ctx = mocked_ctx()
+    port = 8888
+    enable_security = True
+    spark_conf = '/opt/spark/conf'
+    system_mocked.return_value = 1
+
+    lab(ctx, port, enable_security, spark_conf)
+
+    system_mocked.assert_called_once_with('SPARK_CONF_DIR=/opt/spark/conf YARN_CONF_DIR=/opt/spark/conf jupyter-lab --notebook-dir /tmp/notebooks --ip 0.0.0.0 --port 8888 --no-browser')
diff --git a/tests/management/test_pkg.py b/tests/management/test_pkg.py
new file mode 100644
index 0000000..de81226
--- /dev/null
+++ b/tests/management/test_pkg.py
@@ -0,0 +1,205 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the 'License');
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an 'AS IS' BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# from click.testing import CliRunner
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.management.pkg import _clone
+from marvin_python_toolbox.management.pkg import copy
+from marvin_python_toolbox.management.pkg import get_git_branch
+from marvin_python_toolbox.management.pkg import is_git_clean
+from marvin_python_toolbox.management.pkg import get_git_tags
+from marvin_python_toolbox.management.pkg import get_git_repository_url
+from marvin_python_toolbox.management.pkg import get_git_tag
+from marvin_python_toolbox.management.pkg import get_git_commit
+from marvin_python_toolbox.management.pkg import get_tag_from_repo_url
+from marvin_python_toolbox.management.pkg import get_repos_from_requirements
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.open')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.join')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
+def test_get_repos_from_requirements(curdir_mocked, join_mocked, open_mocked):
+    join_mocked.return_value = '/tmp'
+
+    get_repos_from_requirements(path=None)
+
+    join_mocked.assert_called_with(curdir_mocked, 'requirements.txt')
+    open_mocked.assert_called_with('/tmp', 'r')
+
+    get_repos_from_requirements(path='/path')
+
+    join_mocked.assert_called_with('/path', 'requirements.txt')
+    open_mocked.assert_called_with('/tmp', 'r')
+
+
+def test_get_tag_from_repo_url():
+    repos = ['http://www.xxx.org:80/tag@/repo.html']
+
+    tags = get_tag_from_repo_url(repos)
+
+    assert tags == {'http://www.xxx.org:80/tag@/repo.html': '/repo.html'}
+
+    repos = ['http://www.xxx.org:80/tag/repo.html']
+
+    tags = get_tag_from_repo_url(repos)
+
+    assert tags == {'http://www.xxx.org:80/tag/repo.html': None}
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.git_clone')
+def test_clone(git_mocked):
+    git_mocked.return_value = 1
+    repo = 'http://xxx.git'
+    result = _clone(repo)
+
+    assert result == (repo, 1)
+    git_mocked.assert_called_once_with(repo, checkout=False, depth=1)
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.shutil.ignore_patterns')
+@mock.patch('marvin_python_toolbox.management.pkg.shutil.copytree')
+def test_copy(copytree_mocked, ignore_mocked):
+    src = '/xpto'
+    dest = '/xpto_dest'
+    ignore = ('.git')
+    ignore_mocked.return_value = 1
+    copy(src, dest, ignore)
+
+    copytree_mocked.assert_called_once_with(src, dest, ignore=1)
+    ignore_mocked.assert_called_once_with(*ignore)
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
+def test_get_git_branch(popen_mocked, curdir_mocked, pipe_mocked):
+    mockx = mock.MagicMock()
+    mockx.stdout.read.return_value = b'branch '
+    popen_mocked.return_value = mockx
+
+    branch = get_git_branch()
+
+    popen_mocked.assert_called_once_with(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], stdout=pipe_mocked, cwd=curdir_mocked)
+
+    assert branch == 'branch'
+
+    branch = get_git_branch(path='/tmp')
+
+    popen_mocked.assert_called_with(['git', 'rev-parse', '--abbrev-ref', 'HEAD'], stdout=pipe_mocked, cwd='/tmp')
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
+def test_get_git_tag(popen_mocked, curdir_mocked, pipe_mocked):
+    mockx = mock.MagicMock()
+    mockx.stdout.read.return_value = b'tag '
+    popen_mocked.return_value = mockx
+
+    tags = get_git_tag()
+
+    popen_mocked.assert_called_with(['git', 'describe', '--tags', 'tag'], stdout=pipe_mocked, cwd=curdir_mocked)
+
+    assert tags == 'tag'
+
+    tags = get_git_tag(path='/tmp')
+
+    popen_mocked.assert_called_with(['git', 'describe', '--tags', 'tag'], stdout=pipe_mocked, cwd='/tmp')
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
+def test_get_git_commit(popen_mocked, curdir_mocked, pipe_mocked):
+    mockx = mock.MagicMock()
+    mockx.stdout.read.return_value = b'commit '
+    popen_mocked.return_value = mockx
+
+    commit = get_git_commit()
+
+    popen_mocked.assert_called_once_with(['git', 'rev-parse', 'HEAD'], stdout=pipe_mocked, cwd=curdir_mocked)
+
+    assert commit == 'commit'
+
+    commit = get_git_commit(path='/tmp')
+    popen_mocked.assert_called_with(['git', 'rev-parse', 'HEAD'], stdout=pipe_mocked, cwd='/tmp')
+
+    commit = get_git_commit(tag='tag')
+    popen_mocked.assert_called_with(['git', 'rev-list', '-n', '1', 'tag'], stdout=pipe_mocked, cwd=curdir_mocked)
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
+def test_get_git_repository_url(popen_mocked, curdir_mocked, pipe_mocked):
+    mockx = mock.MagicMock()
+    mockx.stdout.read.return_value = b'url '
+    popen_mocked.return_value = mockx
+
+    url = get_git_repository_url()
+
+    popen_mocked.assert_called_once_with(['git', 'config', '--get', 'remote.origin.url'], stdout=pipe_mocked, cwd=curdir_mocked)
+
+    assert url == 'url'
+
+    url = get_git_repository_url(path='www.xxx.com')
+
+    popen_mocked.assert_called_with(['git', 'config', '--get', 'remote.origin.url'], stdout=pipe_mocked, cwd='www.xxx.com')
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
+def test_get_git_tags(popen_mocked, curdir_mocked, pipe_mocked):
+    mockx = mock.MagicMock()
+    mockx.stdout.read.return_value = 'git\ntags '
+    popen_mocked.return_value = mockx
+
+    tags = get_git_tags()
+
+    popen_mocked.assert_called_once_with(['git', 'tag'], stdout=pipe_mocked, cwd=curdir_mocked)
+
+    assert tags == ['tags', 'git']
+
+    tags = get_git_tags(path='/tmp')
+
+    popen_mocked.assert_called_with(['git', 'tag'], stdout=pipe_mocked, cwd='/tmp')
+
+
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.PIPE')
+@mock.patch('marvin_python_toolbox.management.pkg.subprocess.Popen')
+@mock.patch('marvin_python_toolbox.management.pkg.os.path.curdir')
+def test_is_git_clean(curdir_mocked, popen_mocked, pipe_mocked):
+    mockx = mock.MagicMock()
+    mockx.stdout.read.return_value = 'done'
+    popen_mocked.return_value = mockx
+
+    clean = is_git_clean()
+    popen_mocked.assert_called_once_with(['git', 'diff', '--quiet', 'HEAD'], stdout=pipe_mocked, cwd=curdir_mocked)
+
+    assert clean == 'done'
+
+    clean = is_git_clean('/tmp')
+
+    popen_mocked.assert_called_with(['git', 'diff', '--quiet', 'HEAD'], stdout=pipe_mocked, cwd='/tmp')
+
+    assert clean == 'done'
diff --git a/tests/test_loader.py b/tests/test_loader.py
new file mode 100644
index 0000000..1cdc39e
--- /dev/null
+++ b/tests/test_loader.py
@@ -0,0 +1,38 @@
+#!/usr/bin/env python
+# coding=utf-8
+
+# Copyright [2017] [B2W Digital]
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+try:
+    import mock
+except ImportError:
+    import unittest.mock as mock
+
+from marvin_python_toolbox.loader import load_commands_from_file
+
+
+@mock.patch("marvin_python_toolbox.loader.isinstance")
+@mock.patch("marvin_python_toolbox.loader.getmembers")
+@mock.patch("marvin_python_toolbox.loader.imp.load_source")
+def test_load_commands_from_file(load_source_mocked, getmembers_mocked, isinstance_mocked):
+    path = '/tmp'
+    load_source_mocked.return_value = 'source'
+
+    commands = load_commands_from_file(path)
+
+    load_source_mocked.assert_called_once_with('custom_commands', '/tmp')
+    getmembers_mocked.assert_called_once_with('source')
+
+    assert commands == []
diff --git a/tox.ini b/tox.ini
new file mode 100644
index 0000000..12c79da
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist = py27
+
+[testenv]
+deps=pytest
+     pytest-cov
+     mock
+commands=py.test --cov={envsitepackagesdir}/marvin_python_toolbox --cov-report html --cov-report xml {posargs}