Merge pull request #706 from apache/DLAB-1680

[DLAB-1680] Updated documentation(3)[RC]
diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md
index cf4e3a3..2b43591 100644
--- a/RELEASE_NOTES.md
+++ b/RELEASE_NOTES.md
@@ -1,60 +1,69 @@
+#
 # DLab is Self-service, Fail-safe Exploratory Environment for Collaborative Data Science Workflow
 
-## New features in v2.2
+## New features in v2.3
+
 **All Cloud platforms:**
-- added concept of Projects into DLab. Now users can unite under Projects and collaborate
-- for ease of use we've added web terminal for all DLab Notebooks
-- updated versions of installed software:
-	* angular 8.2.7
+- Added support for multi-Cloud orchestration for AWS, Azure and GCP. Now, a single DLab instance can connect to the above Clouds, by means of respective set of API's, deployed on cloud endpoints;
+- Added JupyterLab v.0.35.6 template
+- Updated versions of installed software:
+  - Jupyter notebook v.6.0.2;
+  - Apache Zeppelin v.0.8.2;
+  - RStudio v.1.2.5033;
+  - Apache Spark v.2.4.4 for standalone cluster;
+
+**AWS:**
+- Added support of new version of Data Engine Service (EMR) v.5.28.0;
 
 **GCP:**
-- added billing report to monitor Cloud resources usage into DLab, including ability to manage billing quotas
-- updated versions of installed software:
-	* Dataproc 1.3
+- Added support of new version of Data Engine Service (Dataproc) v.1.4;
+- Added new template Superset v.0.35.1;
 
-## Improvements in v2.2
+## Improvements in v2.3
 **All Cloud platforms:**
-- implemented login via KeyCloak to support integration with multiple SAML and OAUTH2 identity providers
-- added DLab version into WebUI
-- augmented ‘Environment management’ page
-- added possibility to tag Notebook from UI
-- added possibility to terminate computational resources via scheduler
+- Grouped project management actions in single Edit project menu for ease of use;
+- Introduced new "project admin" role;
+- SSO now also works for Notebooks;
+- Implemented ability to filter installed libraries;
+- Added possibility to sort by project/user/charges in 'Billing report' page;
+- Added test option for remote endpoint;
 
-**GCP:**
-- added possibility to create Notebook/Data Engine from an AMI image
-
-**AWS and GCP:**
-- UnGit tool now allows working with remote repositories over ssh
-- implemented possibility to view Data Engine Service version on UI after creation
-
-## Bug fixes in v2.2
+## Bug fixes in v2.3
 **All Cloud platforms:**
-- fixed  sparklyr library (r package) installation on RStudio, RStudio with TensorFlow notebooks
+- Fixed a bug when Notebook name should be unique per project for different users, since it was impossible to operate Notebook with the same name after the first instance creation;
+- Fixed a bug when administrator could not stop/terminate Notebook/computational resources created by another user;
+- Fixed a bug when shell interpreter was not showing up for Apache Zeppelin;
+- Fixed a bug when scheduler by start time was not triggered for Data Engine;
+- Fixed a bug when it was possible to start Notebook if project quota was exceeded;
+- Fixed a bug when scheduler for stopping was not triggered after total quota depletion;
 
-**GCP:**
-- fixed a bug when Data Engine creation fails for DeepLearning template
-- fixed a bug when Jupyter does not start successfully after Data Engine Service creation (create Jupyter -> create Data Engine -> stop Jupyter -> Jupyter fails)
-- fixed a bug when DeepLearning creation was failing
-
-## Known issues in v2.2
-**All Cloud platforms:**
-- Notebook name should be unique per project for different users in another case it is impossible to operate Notebook with the same name after the first instance creation
+**AWS:**
+- Fixed a bug when Notebook image/snapshot were still available after SSN termination;
 
 **Microsoft Azure:**
-- DLab deployment  is unavailable if Data Lake is enabled
-- custom image creation from Notebook fails and deletes existed Notebook
+- Fixed a bug when custom image creation from Notebook failed and deleted the existing Notebook of another user;
+- Fixed a bug when detailed billing was not available;
+- Fixed a bug when spark reconfiguration failed on Data Engine;
+- Fixed a bug when billing data was not available after calendar filter usage;
 
-**Refer to the following link in order to view the other major/minor issues in v2.2:**
+## Known issues in v2.3
+**GCP:**
+- SSO is not available for Superset;
 
-[Apache DLab: known issues](https://issues.apache.org/jira/issues/?filter=12347602 "Apache DLab: known issues")
-
-## Known issues caused by cloud provider limitations in v2.2
 **Microsoft Azure:**
-- resource name length should not exceed 80 chars
-- TensorFlow templates are not supported for Red Hat Enterprise Linux
-- low priority Virtual Machines are not supported yet
+- Notebook creation fails on RedHat;
+- Web terminal is not working for Notebooks only for remote endpoint;
+
+Refer to the following link in order to view the other major/minor issues in v2.3:
+
+[Apache DLab: known issues](https://issues.apache.org/jira/issues/?filter=12348876#](https://issues.apache.org/jira/issues/?filter=12348876 "Apache DLab: known issues")
+
+## Known issues caused by cloud provider limitations in v2.3
+**Microsoft Azure:**
+- Resource name length should not exceed 80 chars;
+- TensorFlow templates are not supported for RedHat Enterprise Linux;
+- Low priority Virtual Machines are not supported yet;
 
 **GCP:**
-- resource name length should not exceed 64 chars
-- billing data is not available
-- **NOTE:** DLab has not been tested on GCP for Red Hat Enterprise Linux
+- Resource name length should not exceed 64 chars;
+- NOTE: DLab has not been tested on GCP for RedHat Enterprise Linux;
diff --git a/USER_GUIDE.md b/USER_GUIDE.md
index d821a93..78876fc 100644
--- a/USER_GUIDE.md
+++ b/USER_GUIDE.md
@@ -10,7 +10,7 @@
 
 [Login](#login)
 
-[Setup a Gateway/Edge node](#setup_edge_node)
+[Create project](#setup_edge_node)
 
 [Setting up analytical environment and managing computational power](#setup_environmen)
 
@@ -26,35 +26,34 @@
 
         [Deploy Computational resource](#computational_deploy)
 
-        [Stop Apache Spark cluster](#spark_stop)
+        [Stop Standalone Apache Spark cluster](#spark_stop)
 
         [Terminate Computational resource](#computational_terminate)
 
+        [Scheduler](#scheduler)
+
         [Collaboration space](#collaboration_space)
 
                 [Manage Git credentials](#git_creds)
 
                 [Git UI tool (ungit)](#git_ui)
 
-[DLab Health Status Page](#health_page)
+[Administration](#administration)
 
-                [Backup](#backup)
+          [Manage roles](#manage_roles)
 
-                [Manage environment](#manage_environment)
+          [Project management](#project_management)
 
-                [Manage roles](#manage_roles)
+          [Environment management](#environment_management)
 
-                [SSN monitor](#ssn_monitor)
+                [Multiple Cloud endpoints](#multiple_cloud_endpoints)
+
+                [Manage DLab quotas](#manage_dlab_quotas)
 
 [DLab billing report](#billing_page)
 
-[DLab Environment Management Page](#environment_management)
-
 [Web UI filters](#filter)
 
-[Scheduler](#scheduler)
-
-[Key reupload](#key_reupload)
 
 ---------
 # Login <a name="login"></a>
@@ -65,6 +64,9 @@
 
 -   OpenLdap;
 -   Cloud Identity and Access Management service user validation;
+-   KeyCloak integration for seamless SSO experience *;
+
+    * NOTE: in case has been installed and configured to use SSO, please click on "Login with SSO" and use your corporate credentials
 
 | Login error messages               | Reason                                                                           |
 |------------------------------------|----------------------------------------------------------------------------------|
@@ -76,7 +78,7 @@
 
 To stop working with DLab - click on Log Out link at the top right corner of DLab.
 
-After login user will see warning in case of exceeding quota or close to this limit.
+After login user sees warning in case of exceeding quota or close to this limit.
 
 <p align="center" class="facebox-popup"> 
     <img src="doc/exceeded quota.png" alt="Exceeded quota" width="400">
@@ -87,38 +89,35 @@
 </p>
 
 ----------------------------------
-# Setup a Gateway/Edge node <a name="setup_edge_node"></a>
+# Create project <a name="setup_edge_node"></a>
 
-When you log into DLab Web Application, the first thing you will have to setup is a Gateway Node, or an “Edge” Node.
+When you log into DLab Web interface, the first thing you need to do is to create a new project.
 
-To do this click on “Upload” button on “Create initial infrastructure”, select your personal public key and hit “Create” button or click on "Generate" button on “Create initial infrastructure” and save your private key.
+To do this click on “Upload” button on “Projects” page, select your personal public key (or click on "Generate" button), endpoint, group, 'Use shared image' select enable or disable and hit “Create” button. Do not forget to save your private key.
 
 <p align="center" class="facebox-popup"> 
-    <img src="doc/upload_or_generate_user_key.png" alt="Upload or generate user key" width="400">
+    <img src="doc/upload_or_generate_user_key.png" alt="Upload or generate user key" width="100%">
 </p>
 
-Please note that you need to have a key pair combination (public and private key) to work with DLab. To figure out how to create public and private key, please click on “Where can I get public key?” on “Create initial infrastructure” dialog. DLab build-in wiki page will guide Windows, MasOS and Linux on how to generate SSH key pairs quickly.
+Please note, that you need to have a key pair combination (public and private key) to work with DLab. To figure out how to create public and private key, please click on “Where can I get public key?” on “Projects” page. DLab build-in wiki page guides Windows, MasOS and Linux on how to generate SSH key pairs quickly.
 
-After you hit "Create" or "Generate" button, creation of Edge node will start. This process is a one-time operation for each Data Scientist and it might take up-to 10 minutes for DLab to setup initial infrastructure for you. During this process, you will see following popup in your browser:
+Creation of Project starts after hitting "Create" button. This process is a one-time operation for each Data Scientist and it might take up-to 10 minutes for DLab to setup initial infrastructure for you. During this process project is in status "Creating".
 
-<p align="center"> 
-    <img src="doc/loading_key.png" alt="Loading user key" width="350">
-</p>
+'Use shared image' enabled means, that an image of particular notebook type is created while first notebook of same type is created in DLab. This image will be availble for all DLab users. This image is used for provisioning of further notebooks of same type within DLab. 'Use share image' disabled means, that image of particular notebook type is created while first notebook of same type is created in DLab. This AMI is available for all users withing same project.
 
-As soon as an Edge node is created, Data Scientist will see a blank “List of Resources” page. The message “To start working, please create new environment” will be displayed:
+As soon as Project is created, Data Scientist can create  notebook server on “List of Resources” page. The message “To start working, please create new environment” is appeared on “List of Resources” page:
 
 ![Main page](doc/main_page.png)
 
 ---------------------------------------------------------------------------------------
 # Setting up analytical environment and managing computational power <a name="setup_environmen"></a>
 
-----------------------
+
 ## Create notebook server <a name="notebook_create"></a>
 
 To create new analytical environment from “List of Resources” page click on "Create new" button.
 
-“Create analytical tool” popup will show-up. Data Scientist can choose a preferable analytical tool to be setup. Adding new analytical tools is supported by architecture, so you can expect new templates to show up in upcoming releases.
-
+The "Create analytical tool" popup shows up. Data Scientist can choose the preferred project, endpoint and analytical tool. Adding new analytical toolset is supported by architecture, so you can expect new templates to show up in upcoming releases.
 Currently by means of DLab, Data Scientists can select between any of the following templates:
 
 -   Jupyter
@@ -127,6 +126,8 @@
 -   RStudio with TensorFlow
 -   Jupyter with TensorFlow
 -   Deep Learning (Jupyter + MXNet, Caffe, Caffe2, TensorFlow, CNTK, Theano, Torch and Keras)
+-   JupyterLab
+-   Superset (implemented on GCP)
 
 <p align="center"> 
     <img src="doc/notebook_create.png" alt="Create notebook" width="574">
@@ -134,9 +135,9 @@
 
 After specifying desired template, you should fill in the “Name” and “Instance shape”.
 
-Name field – is just for visual differentiation between analytical tools on “List of resources” dashboard.
+Keep in mind that "Name" field – is just for visual differentiation between analytical tools on “List of resources” dashboard.
 
-Instance shape dropdown, contains configurable list of shapes, which should be chosen depending on the type of analytical work to be performed. Following groups of instance shapes will be showing up with default setup configuration:
+Instance shape dropdown, contains configurable list of shapes, which should be chosen depending on the type of analytical work to be performed. Following groups of instance shapes are showing up with default setup configuration:
 
 <p align="center"> 
     <img src="doc/select_shape.png" alt="Select shape" width="250">
@@ -144,25 +145,29 @@
 
 These groups have T-Shirt based shapes (configurable), that can help Data Scientist to either save money\* and leverage not very powerful shapes (for working with relatively small datasets), or that could boost the performance of analytics by selecting more powerful instance shape.
 
-\* Please refer to official documentation from Amazon that will help you understand what [instance shapes](https://aws.amazon.com/ec2/instance-types/) would be most preferable in your particular DLAB setup. Also, you can use [AWS calculator](https://calculator.s3.amazonaws.com/index.html) to roughly estimate the cost of your environment.
+\* Please refer to official documentation from Amazon that helps you to understand what [instance shapes](https://aws.amazon.com/ec2/instance-types/) are the most preferable in your particular DLAB setup. Also, you can use [AWS calculator](https://calculator.s3.amazonaws.com/index.html) to roughly estimate the cost of your environment.
 
-You can override the default configurations for local spark. The configuration object is referenced as a JSON file. To tune spark configuration check off "Spark configurations" check box and insert JSON format in text box.
+\* Please refer to official documentation from GCP that helps you to understand what [instance shapes](https://cloud.google.com/compute/docs/machine-types) are the most preferable in your particular DLAB setup. Also, you can use [GCP calculator](https://cloud.google.com/products/calculator) to roughly estimate the cost of your environment.
 
-After you Select the template, fill in the Name and choose needed instance shape - you need to click on "Create" button for your instance to start creating. Corresponding record will show up in your dashboard:
+\* Please refer to official documentation from Microsoft Azure that helps you to understand what [virtual machine shapes](https://azure.microsoft.com/en-us/pricing/details/virtual-machines/series/) are the most preferable in your particular DLAB setup. Also, you can use [Microsoft Azure calculator](https://azure.microsoft.com/en-us/pricing/calculator/?&ef_id=EAIaIQobChMItPmK5uj-6AIVj-iaCh0BFgVYEAAYASAAEgJ4KfD_BwE:G:s&OCID=AID2000606_SEM_UOMYUjFz&MarinID=UOMYUjFz_364338000380_microsoft%20azure%20calculator_e_c__76882726955_kwd-300666827690&lnkd=Google_Azure_Brand&dclid=CLC65Ojo_ugCFUWEsgodm18GNA) to roughly estimate the cost of your environment.
+
+You can override the default configurations of local spark. The configuration object is referenced as a JSON file. To tune spark configuration check off "Spark configurations" check box and insert JSON format in the text box.
+
+After you Select the template, fill in the Name and specify desired instance shape - you need to click on "Create" button for your analytical toolset to be created. Corresponding record shows up in your dashboard:
 
 ![Dashboard](doc/main_page2.png)
 
-As soon as notebook server is created, its status will change to Running:
+As soon as notebook server is created, status changes to Running:
 
 ![Running notebook](doc/main_page3.png)
 
-When you click on the name of your Analytical tool in the dashboard – analytical tool popup will show up:
+When you click on the name of your Analytical tool in the dashboard – analytical tool popup shows up:
 
 <p align="center"> 
     <img src="doc/notebook_info.png" alt="Notebook info" width="574">
 </p>
 
-In the header you will see version of analytical tool, its status and shape.
+In the header you see version of analytical tool, its status and shape.
 
 In the body of the dialog:
 
@@ -170,60 +175,54 @@
 -   Analytical tool URL
 -   Git UI tool (ungit)
 -   Shared bucket for all users
--   Bucket that has been provisioned for your needs
+-   Project bucket for project members
 
-To access analytical tool Web UI you proceed with one of the options:
-
--   use direct URL's to access notebooks (your access will be established via reverse proxy, so you don't need to have Edge node tunnel up and running)
--   SOCKS proxy based URL's to access notebooks (via tunnel to Edge node)
-
-If you use direct urls you don't need to open tunnel for Edge node and set SOCKS proxy.
-If you use indirect urls you need to configure SOCKS proxy and open tunnel for Edge node. Please follow the steps described on “Read instruction how to create the tunnel” page to configure SOCKS proxy for Windows/MAC/Linux machines. “Read instruction how to create the tunnel” is available on DLab notebook popup.
+To access analytical tool Web UI you use direct URL's (your access is established via reverse proxy, so you don't need to have Edge node tunnel up and running).
 
 ### Manage libraries <a name="manage_libraries"></a>
 
-On every analytical tool instance you can install additional libraries by clicking on gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the Actions column for a needed Notebook and hit Manage libraries:
+On every analytical tool instance you can install additional libraries by clicking on gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the "Actions" column for a needed Notebook and hit "Manage libraries":
 
 <p align="center"> 
     <img src="doc/notebook_menu_manage_libraries.png" alt="Notebook manage_libraries" width="150">
 </p>
 
-After clicking you will see the window with 3 fields:
--   Field for selecting an active resource to install libraries on
+After clicking you see the window with 3 fields:
+-   Field for selecting an active resource to install libraries
 -   Field for selecting group of packages (apt/yum, Python 2, Python 3, R, Java, Others)
 -   Field for search available packages with autocomplete function except for Java. java library you should enter using the next format: "groupID:artifactID:versionID"
 
 ![Install libraries dialog](doc/install_libs_form.png)
 
-You need to wait for a while after resource choosing till list of all available libraries will be received.
+You need to wait for a while after resource choosing till list of all available libraries is received.
 
 ![Libraries list loading](doc/notebook_list_libs.png)
 
-**Note:** apt or yum packages depends on your DLab OS family.
+**Note:** Apt or yum packages depends on your DLab OS family.
 
 **Note:** In group Others you can find other Python (2/3) packages, which haven't classifiers of version.
 
 ![Resource select_lib](doc/notebook_select_lib.png)
 
-After selecting library, you can see it on the right and could delete in from this list before installing.
+After selecting library, you can see it in the midle of the window and can delete it from this list before installation.
 
 ![Resource selected_lib](doc/notebook_selected_libs.png)
 
-After clicking on "Install" button you will see process of installation with appropriate status.
+After clicking on "Install" button you see process of installation with appropriate status.
 
 ![Resources libs_status](doc/notebook_libs_status.png)
 
-**Note:** If package can't be installed you will see "Failed" in status column and button to retry installation.
+**Note:** If package can't be installed you see "Failed" in status column and button to retry installation.
 
 ### Create image <a name="create_image"></a>
 
-Out of each analytical tool instance you can create an AMI image (notebook should be in Running status), including all libraries, which have been installed on it. You can use that AMI to speed-up provisioining of further analytical tool, if you would like to re-use existing configuration. To create an AMI click on a gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the Actions menu for a needed Notebook and hit "Create AMI":
+Out of each analytical tool instance you can create an AMI image (notebook should be in Running status), including all libraries, which have been installed on it. You can use that AMI to speed-up provisioining of further analytical tool, if you want to re-use existing configuration. To create an AMI click on a gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the "Actions" menu for a needed Notebook and hit "Create AMI":
 
 <p align="center"> 
     <img src="doc/notebook_menu_create_ami.png" alt="Notebook create_ami" width="150">
 </p>
 
-On Create AMI popup you will be asked to fill in:
+On "Create AMI" popup you should fill:
 -   text box for an AMI name (mandatory)
 -   text box for an AMI description (optional)
 
@@ -231,11 +230,11 @@
     <img src="doc/create_ami.png" alt="Create AMI" width="480">
 </p>
 
-After clicking on "Assign" button the Notebook status will change to Creating AMI. Once an image is created the Notebook status changes back to Running.
+After clicking on "Create" button the Notebook status changes to "Creating image". Once an image is created the Notebook status changes back to "Running".
 
-To create new analytical environment from custom image click "Create new" button on “List of Resources” page. 
+To create new analytical environment from custom image click on "Create new" button on “List of Resources” page. 
 
-“Create analytical tool” popup will show-up. Choose a template of a Notebook for which the custom image is created:
+“Create analytical tool” popup shows up. Choose project, endpoint, template of a Notebook for which the custom image has been created:
 
 <p align="center"> 
     <img src="doc/create_notebook_from_ami.png" alt="Create notebook from AMI" width="560">
@@ -243,56 +242,59 @@
 
 Before clicking "Create" button you should choose the image from "Select AMI" and fill in the "Name" and "Instance shape".
 
+**NOTE:** This functionality is implemented for AWS and Azure.
+
 --------------------------
 ## Stop Notebook server <a name="notebook_stop"></a>
 
-Once you have stopped working with an analytical tool and you would like to release cloud resources for the sake of the costs, you might want to Stop the notebook. You will be able to Start the notebook again after a while and proceed with your analytics.
+Once you have stopped working with an analytical tool and you need to release Cloud resources for the sake of the costs, you might want to stop the notebook. You are able to start the notebook later and proceed with your analytical work.
 
-To Stop the Notebook click on a gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the Actions column for a needed Notebook and hit Stop:
+To stop the Notebook click on a gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the "Actions" column for a needed Notebook and hit "Stop":
 
 <p align="center"> 
     <img src="doc/notebook_menu_stop.png" alt="Notebook stopping" width="150">
 </p>
 
-Hit OK in confirmation popup.
+Hit "OK" in confirmation popup.
 
-**NOTE:** if any Computational resources except for Spark cluster have been connected to your notebook server – they will be automatically terminated if you stop the notebook and Spark cluster will be automatically stopped.
+**NOTE:** Connected Data Engine Service becomes Terminated while connected (if any) Data Engine (Standalone Apache Spark cluster) becomes Stopped.
 
 <p align="center"> 
     <img src="doc/notebook_stop_confirm.png" alt="Notebook stop confirm" width="400">
 </p>
 
-After you confirm you intent to Stop the notebook - the status will be changed to Stopping and will become Stopped in a while. Spark cluster status will be changed to Stopped and other Computational resource status  will be changed to Terminated.
+After you confirm your intent to stop the notebook - the status changes to "Stopping" and later becomes "Stopped". 
 
 --------------------------------
 ## Terminate Notebook server <a name="notebook_terminate"></a>
 
-Once you have finished working with an analytical tool and you would like to release cloud resources for the sake of the costs, you might want to Terminate the notebook. You will not be able to Start the notebook which has been Terminated. Instead, you will have to create new Notebook server if you will need to proceed your analytical activities.
+Once you have finished working with an analytical tool and you need don't neeed cloud resources anymore, for the sake of the costs, we recommend to terminate the notebook. You are not able to start the notebook which has been terminated. Instead, you have to create new Notebook if you need to proceed with your analytical activities.
 
-To Terminate the Notebook click on a gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the Actions column for a needed Notebook and hit Terminate:
+**NOTE:** Make sure you back-up your data (if exists on Notebook) and playbooks before termination.
 
-**NOTE:** if any Computational resources have been linked to your notebook server – they will be automatically terminated if you stop the notebook.
+To terminate the Notebook click on a gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the "Actions" column for a needed Notebook and hit "Terminate":
 
-Confirm termination of the notebook and afterward notebook status will be changed to **Terminating**:
+**NOTE:** If any Computational resources have been linked to your notebook server – they are automatically terminated if you terminate the notebook.
+
+Confirm termination of the notebook and afterwards notebook status changes to "Terminating":
 
 ![Notebook terminating](doc/notebook_terminating.png)
 
-Once corresponding instances are terminated on cloud, status will finally
-change to Terminated:
+Once corresponding instances become terminated in Cloud console, status finally changes to "Terminated":
 
 ![Notebook terminated](doc/notebook_terminated.png)
 
 ---------------
 ## Deploy Computational resource <a name="computational_deploy"></a>
 
-After deploying Notebook node, you can deploy Computational resource and it will be automatically linked with your Notebook server. Computational resource is a managed cluster platform, that simplifies running big data frameworks, such as Apache Hadoop and Apache Spark on cloud to process and analyze vast amounts of data. Adding Computational resource is not mandatory and is needed in case computational resources are required for job execution.
+After deploying Notebook node, you can deploy Computational resource and it is automatically linked with your Notebook server. Computational resource is a managed cluster platform, that simplifies running big data frameworks, such as Apache Hadoop and Apache Spark on cloud to process and analyze vast amounts of data. Adding Computational resource is not mandatory and is needed in case computational resources are required for job execution.
 
-On “Create Computational Resource” popup you will have to choose Computational resource version (configurable) and specify alias for it. To setup a cluster that meets your needs – you will have to define:
+On “Create Computational Resource” popup you have to choose Computational resource version (configurable) and specify alias for it. To setup a cluster that meets your needs – you have to define:
 
 -   Total number of instances (min 2 and max 14, configurable);
 -   Master and Slave instance shapes (list is configurable and supports all available cloud instance shapes, supported in your cloud region);
 
-Also, if you would like to save some costs for your Computational resource you can create it based on [spot instances](https://aws.amazon.com/ec2/spot/), which are often available at a discount price (this functionality is only available for AWS cloud):
+Also, if you want to save some costs for your Computational resource you can create it based on [spot instances](https://aws.amazon.com/ec2/spot/)(this functionality is for AWS cloud) or [preemptible instances](https://cloud.google.com/compute/docs/instances/preemptible) (this functionality is for GCP), which are often available at a discount price:
 
 -   Select Spot Instance checkbox;
 -   Specify preferable bid for your spot instance in % (between 20 and 90, configurable).
@@ -304,41 +306,48 @@
     <img src="doc/emr_create.png" alt="Create Computational resource on AWS" width="760">
 </p>
 
-You can override the default configurations for applications by supplying a configuration object for applications when you create a cluster (this functionality is only available for Amazon EMR cluster ). The configuration object is referenced as a JSON file.
+You can override the default configurations for applications by supplying a configuration object for applications when you create a cluster (this functionality is only available for Amazon EMR cluster). The configuration object is referenced as a JSON file.
 To tune computational resource configuration check off "Cluster configurations" check box and insert JSON format in text box:
 
 <p align="center"> 
     <img src="doc/emr_create_configuration.png" alt="Create Custom Computational resource on AWS" width="760">
 </p>
 
+This picture shows menu for creating Computational resource for GCP:
+<p align="center"> 
+    <img src="doc/dataproc_create.png" alt="Create Computational resource on GCP" width="760">
+</p>
+
+To create Data Engine Service (Dataproc) with preemptible instances check off 'preemptible node count'. You can add from 1 to 11 preemptible instances.
+
 This picture shows menu for creating Computational resource for Azure:
 <p align="center"> 
     <img src="doc/dataengine_creating_menu.png" alt="Create Computational resource on Azure" width="760">
 </p>
 
-If you click on "Create" button Computational resource creation will kick off. You will see corresponding record on DLab Web UI in status **Creating**:
+If you click on "Create" button Computational resource creation kicks off. You see corresponding record on DLab Web UI in status "Creating":
 
 ![Creating Computational resource](doc/emr_creating.png)
 
-Once Computational resources are provisioned, their status will be changed to **Running**.
+Once Computational resources are provisioned, their status changes to "Running".
 
-Clicking on Computational resource name in DLab dashboard will open Computational resource details popup:
+After clicking on Computational resource name in DLab dashboard you see Computational resource details popup:
 
 <p align="center"> 
     <img src="doc/emr_info.png" alt="Computational resource info" width="480">
 </p>
 
-Also you can go to computational resource master UI via link "Apache Spark Master' or "EMR Master" (this functionality is only available for AWS cloud).
+Also you can go to computational resource master UI via link "Spark job tracker URL', "EMR job tracker URL" or "Dataproc job tracker URL".
 
 Since Computational resource is up and running - you are now able to leverage cluster computational power to run your analytical jobs on.
 
 To do that open any of the analytical tools and select proper kernel/interpreter:
 
-**Jupyter** – goto Kernel and choose preferable interpreter between local and Computational resource ones. Currently we have added support of Python 2/3, Spark, Scala, R into Jupyter.
+**Jupyter** – go to Kernel and choose preferable interpreter between local and Computational resource ones. Currently we have added support of Python 2/3, Spark, Scala, R in Jupyter.
 
 ![Jupiter](doc/jupiter.png)
 
-**Zeppelin** – goto Interpreter Biding menu and switch between local and Computational resource there. Once needed interpreter is selected click on Save.
+**Zeppelin** – go to Interpreter Biding menu and switch between local and Computational resource there. Once needed interpreter is selected click on "Save".
 
 ![Zeppelin](doc/zeppelin.png)
 
@@ -354,11 +363,11 @@
 ![RStudio](doc/rstudio.png)
 
 ---------------
-## Stop  Apache Spark cluster <a name="spark_stop"></a>
+## Stop Standalone Apache Spark cluster <a name="spark_stop"></a>
 
-Once you have stopped working with a spark cluster and you would like to release cloud resources for the sake of the costs, you might want to Stop Apache Spark cluster. You will be able to Start apache Spark cluster again after a while and proceed with your analytics.
+Once you have stopped working with Standalone Apache Spark cluster (Data Engine) and you need to release cloud resources for the sake of the costs, you might want to stop Standalone Apache Spark cluster. You are able to start Standalone Apache Spark cluster again after a while and proceed with your analytics.
 
-To Stop Apache Spark cluster click on <img src="doc/stop_icon.png" alt="stop" width="20"> button close to spark cluster alias.
+To stop Standalone Apache Spark cluster click on <img src="doc/stop_icon.png" alt="stop" width="20"> button close to Standalone Apache Spark cluster alias.
 
 Hit "YES" in confirmation popup.
 
@@ -366,48 +375,103 @@
     <img src="doc/spark_stop_confirm.png" alt="Spark stop confirm" width="400">
 </p>
 
-After you confirm your intent to Apache Spark cluster - the status will be changed to Stopping and will become Stopped in a while.
+After you confirm your intent to stop Standalone Apache Spark cluster - the status changes to "Stopping" and soon becomes "Stopped".
 
 ------------------
 ## Terminate Computational resource <a name="computational_terminate"></a>
 
-To release cluster computational resources click on <img src="doc/cross_icon.png" alt="cross" width="16"> button close to Computational resource alias. Confirm decommissioning of Computational resource by hitting Yes:
+To release computational resources click on <img src="doc/cross_icon.png" alt="cross" width="16"> button close to Computational resource alias. Confirm decommissioning of Computational resource by hitting "Yes":
 
 <p align="center"> 
     <img src="doc/emr_terminate_confirm.png" alt="Computational resource terminate confirm" width="400">
 </p>
 
-In a while Computational resource cluster will get **Terminated**. Corresponding cloud instances will also removed on cloud.
+In a while Computational resource gets "Terminated". Corresponding cloud instance also is removed on cloud.
+
+------------------
+## Scheduler <a name="scheduler"></a>
+
+Scheduler component allows to automatically schedule Start and Stop triggers for a Notebook/Computational, while 
+for Data Engine or Data Engine Service it can only trigger Stop or Terminate action correspondigly. There are 2 types of a scheduler:
+- Scheduler by time;
+- Scheduler by inactivity.
+
+Scheduler by time is for Notebook/Data Engine Start/Stop and for Data Engine/Data Engine Service termination.
+Scheduler by inactivity is for Notebook/Data Engine stopping.
+
+To create scheduler for a Notebook click on an <img src="doc/gear_icon.png" alt="gear" width="20"> icon in the "Actions" column for a needed Notebook and hit "Scheduler":
+
+<p align="center"> 
+    <img src="doc/notebook_menu_scheduler.png" alt="Notebook scheduler action" width="150">
+</p>
+
+Popup with following fields shows up:
+
+- start/finish dates - date range when scheduler is active;
+- start/end time - time when notebook should be running;
+- timezone - your time zone;
+- repeat on - days when scheduler should be active;
+- possibility to synchronize notebook scheduler with computational schedulers;
+- possibility not to stop notebook in case of running job on Standalone Apache Spark cluster.
+
+<p align="center"> 
+    <img src="doc/notebook_scheduler.png" alt="Notebook scheduler" width="400">
+</p>
+
+If you want to stop Notebook on exceeding idle time you should enable "Scheduler by inactivity", fill your inactivity period (in minutes) and click on "Save" button. Notebook is stopped upon exceeding idle time value.
+
+<p align="center"> 
+    <img src="doc/scheduler_by_inactivity.png" alt="Scheduler by Inactivity.png" width="400">
+</p>
+
+Also scheduler can be configured for a Standalone Apache Spark cluster. To configure scheduler for Standalone Apache Spark cluster click on this icon <img src="doc/icon_scheduler_computational.png" alt="scheduler_computational" width="16">:
+
+<p align="center"> 
+    <img src="doc/computational_scheduler_create.png" alt="Computational scheduler create" width="400">
+</p>
+
+There is a possibility to inherit scheduler start settings from notebook, if such scheduler is present:
+
+<p align="center"> 
+    <img src="doc/computational_scheduler.png" alt="Computational scheduler" width="400">
+</p>
+
+Notebook/Standalone Apache Spark cluster is started/stopped automatically after scheduler setting.
+Please also note that if notebook is configured to be stopped, all running data engines assosiated with is stopped (for Standalone Apache Spark cluster) or terminated (for data engine serice) with notebook.
+
+After login user is notified  that corresponding resources are about to be stopped/terminated in some time.
+
+<p align="center"> 
+    <img src="doc/scheduler reminder.png" alt="Scheduler reminder" width="400">
+</p>
 
 --------------------------------
 ## Collaboration space <a name="collaboration_space"></a>
 
 ### Manage Git credentials <a name="git_creds"></a>
 
-To work with Git (pull, push) via UI tool (ungit) you could add multiple credentials in DLab UI, which will be set on all running instances with analytical tools.
+To work with Git (pull, push) via UI tool (ungit) you could add multiple credentials in DLab UI, which are set on all running instances with analytical tools.
 
-When you click on the button "Git credentials" – following popup will show up:
+When you click on the button "Git credentials" – following popup shows up:
 
 <p align="center"> 
     <img src="doc/git_creds_window.png" alt="Git_creds_window" width="760">
 </p>
 
 In this window you need to add:
--   Your Git server hostname, without **http** or **https**, for example: gitlab.com, github.com, or your internal GitLab server, which can be deployed with DLab.
+-   Your Git server hostname, without **http** or **https**, for example: gitlab.com, github.com, bitbucket.com, or your internal Git server.
 -   Your Username and Email - used to display author of commit in git.
 -   Your Login and Password - for authorization into git server.
 
-**Note:** If you have GitLab server, which was deployed with DLab, you should use your LDAP credentials for access to GitLab.
+Once all fields are filled in and you click on "Assign" button, you see the list of all your Git credentials.
 
-Once all fields are filled in and you click on "Assign" button, you will see the list of all your Git credentials.
-
-Clicking on "Apply changes" button, your credentials will be sent to all running instances with analytical tools. It takes a few seconds for changes to be applied.
+Clicking on "Apply changes" button, your credentials are sent to all running instances with analytical tools. It takes a few seconds for changes to be applied.
 
 <p align="center"> 
     <img src="doc/git_creds_window2.png" alt="Git_creds_window1" width="760">
 </p>
 
-On this tab you can also edit your credentials (click on pen icon) or delete (click on bin icon).
+On this tab you can also edit your credentials (click on pen icon <img src="doc/pen_icon.png" alt="pen" width="15">) or delete (click on bin icon <img src="doc/bin_icon.png" alt="bin" width="15">).
 
 ### Git UI tool (ungit) <a name="git_ui"></a>
 
@@ -417,7 +481,7 @@
     <img src="doc/notebook_info.png" alt="Git_ui_link" width="520">
 </p>
 
-Before start working with git repositories, you need to change working directory on the top of window to:
+Before start working with Git repositories, you need to change working directory on the top of window to:
 
 **/home/dlab-user/** or **/opt/zeppelin/notebook** for Zeppelin analytical tool and press Enter.
 
@@ -431,131 +495,154 @@
 
 ![Git_ui_ungit_work](doc/ungit_work.png)
 
-On the top of window in the red field UI show us changed or new files to commit. You can uncheck or add some files to gitignore.
+On the top of window in the red field UI shows us changed or new files to commit. You can uncheck or add some files to gitignore.
 
 **Note:** Git always checks you credentials. If this is your first commit after adding/changing credentials and after clicking on "Commit" button nothing happened - just click on "Commit" button again.
 
 On the right pane of window you also can see buttons to fetch last changes of repository, add upstreams and switch between branches.
 
-To see all modified files - click on the "circle" button on the center:
+To see all modified files - click on the "Circle" button on the center:
 
 ![Git_ui_ungit_changes](doc/ungit_changes.png)
 
-After commit you will see your local version and remote repository. To push you changes - click on your current branch and press "Push" button.
+After commit you see your local version and remote repository. To push you changes - click on your current branch and press "Push" button.
 
 ![Git_ui_ungit_push](doc/ungit_push.png)
 
-Also clicking on "circle" button you can uncommit or revert changes.
+Also clicking on "Circle" button you can uncommit or revert changes.
 
 --------------------------------
-# DLab Health Status Page <a name="health_page"></a>
+# Administration <a name="administration"></a>
 
-Health Status page is an administration page allowing users to start/stop/recreate gateway node. This might be useful in cases when someone manually deleted corresponding Edge node instance from cloud. This would have made DLab as an application corrupted in general. If any actions are manually done to Edge node instance directly via Cloud Web Console – those changes will be synchronized with DLab automatically and shortly Edge Node status will be updated in DLab.
+## Manage roles <a name="manage_roles"></a>
 
-To access Health status page either navigate to it via main menu:
-
-<p align="center"> 
-    <img src="doc/main_menu.png" alt="Main menu" width="250">
-</p>
-
-or by clicking on an icon close to logged in user name in the top right
-corner of the DLab:
-
--   green ![OK](doc/status_icon_ok.png), if Edge node status is Running;
--   red ![Error](doc/status_icon_error.png),if Edge node is Stopped or Terminated;
-
-![Health_status](doc/health_status.png)
-
-To Stop Edge Node please click on actions icon on Health Status page and hit "Stop".
-
-<p align="center"> 
-    <img src="doc/edge_stop.png" alt="EDGE stop" width="150">
-</p>
-
-Confirm you want to stop Edge node by clicking "Yes":
-
-<p align="center"> 
-    <img src="doc/edge_stop_confirm.png" alt="EDGE stop confirm" width="400">
-</p>
-
-In case you Edge node is Stopped or Terminated – you will have to Start or Recreate it correspondingly to proceed working with DLab. This can done as well via context actions menu.
-
-### Backup <a name="backup"></a>
-
-Administrator can use backup functionality. In order to do it click Backup button. "Backup options" popup will show-up. You can choose a preferable option to be backed up.
-
-<p align="center"> 
-    <img src="doc/backup_options.png" alt="Backup options" width="400">
-</p>
-
-Confirm you want to do backup by clicking "Apply".
-
-### Manage environment <a name="manage_environment"></a>
-
-Administrator can manage users environment clicking on Manage environment button. "Manage environment" popup will show-up. All users environments will be shown which at least one instance has Running status:
-
-<p align="center"> 
-    <img src="doc/manage_environment.png" alt="Manage environment" width="520">
-</p>
-
-If Administrator hit "Stop" icon <img src="doc/stop_icon_env.png" alt="stop" width="22"> all running instances except for dataengine service will be stopped and dataengine service will be terminated. User will be able to Start instances again except for dataengine service after a while and proceed with his analytics.
-
-If Administrator hit "Terminate" icon <img src="doc/terminate_icon_env.png" alt="terminate" width="22"> all running and stopped instances will be terminated. User will not be able to Start the inctance which has been Terminated. Instead, user will have to Upload his personal public key or Generate ssh key pairs.
-
-Administrator should confirm user environment stopping or termination by clicking Yes:
-
-<p align="center"> 
-    <img src="doc/manage_env_confirm.png" alt="Manage environment confirm" width="550">
-</p>
-
-Administrator can manage total billing quota for DLab as well as billing quota per user(s).To do this enter appropriate number in text box(es) per user(s) or/and total budget. Hit "Apply" button.
-
-### Manage roles <a name="manage_roles"></a>
-
-Administrator can choose what instance shape(s) and notebook(s) can be allowed for certain group(s) or user(s).
-To do it click on "Manage roles" button. "Manage roles" popup will show-up:
+Administrator can choose what instance shape(s), notebook(s) and computational resource are supposed to create for certain group(s) or user(s). Administrator can also assign administrator per project, who is able to manage roles within particular project.
+To do it click on "Add group" button. "Add group" popup shows up:
 
 <p align="center"> 
     <img src="doc/manage_role.png" alt="Manage roles" width="780">
 </p>
 
-To add group enter group name, choose certain action which should be allowed for group and also you can add discrete user(s) (not mandatory) and then click "Create" button.
-New group will be added and appears on "Manage roles" popup.
+Roles consist of:
+- Administration - allow to execute administrative operation for the whole DLab or administrative operation only per project;
+- Billing - allow to view billing only the own resources or all users;
+- Compute - list of Compute types which are supposed for creation;
+- Compute shapes - list of Compute shapes which are supposed for creation;
+- Notebook - list of Notebook templates which are supposed for creation;
+- Notebook shapes - list of Notebook shapes which are supposed for creation.
 
-Administrator can remove group or user. For that you should only click on "Delete group" button for certain group or click on delete icon <img src="doc/cross_icon.png" alt="delete" width="16"> for particular user. After that Hit "Yes" in confirmation popup.
+<p align="center"> 
+    <img src="doc/roles.png" alt="Roles" width="450">
+</p>
+
+To add group enter group name, choose certain action which should be allowed for group and also you can add discrete user(s) (not mandatory) and then click "Create" button.
+After addidng the group it appears on "Manage roles" popup.
+
+Administrator can remove group or user. For that you should only click on bin icon <img src="doc/bin_icon.png" alt="bin" width="15">for certain group or for icon <img src="doc/delete_btn.png" alt="delete" width="13"> for particular user. After that hit "Yes" in confirmation popup.
 
 <p align="center"> 
     <img src="doc/delete_group.png" alt="Delete group" width="780">
 </p>
 
-### SSN monitor <a name="ssn_monitor"></a>
+## Project management <a name="project_management"></a>
 
-Administrator can monitor SSN HDD, Memory and CPU. 
-Clicking on "SSN monitor button" will open "SSN monitor" popup. 
-There are three tabs on  'SSN monitor' popup: CPU, HDD, Memory:
+After project creation (this step is described in [create project](#setup_edge_node)) administrator is able to manage the project by clicking on gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the "Actions" column for the needed project.
 
 <p align="center"> 
-    <img src="doc/cpu.png" alt="SSN CPU" width="480">
+    <img src="doc/project_view.png" alt="Project view" width="780">
 </p>
 
-<p align="center"> 
-    <img src="doc/memory.png" alt="SSN memory" width="480">
-</p>
+The following menu shows up:
 
 <p align="center"> 
-    <img src="doc/hdd.png" alt="SSN HDD" width="480">
+    <img src="doc/project_menu.png" alt="Project menu" width="150">
 </p>
 
+Administrator can edit already existing project:
+- Add or remove group;
+- Add new endpoint;
+- Switch off/on 'Use shared image' option.
+
+To edit the project hit "Edit project" and choose option which you want to add, remove or change. For applying changes click on "Update" button.
+
+To stop Edge node hit "Stop edge node". After that confirm "OK" in confirmation popup. All related instances change its status from 'Running' to "Stopping" and soon become "Stopped". You are able to start Edge node again after a while and proceed with your work. Do not forget to start notebook again if you want to continue with your analytics. Because start Edge node does not start related instances.
+
+To terminate Edge node hit "Terminate edge node". After that confirm "OK" in confirmation popup. All related instances change its status to "Terminating" and soon become "Terminated".
+
+## Environment management <a name="environment_management"></a>
+
+DLab Environment Management page is an administration page allowing adminstrator to see the list of all users environments and to stop/terminate all of them.
+
+To access Environment management page either navigate to it via main menu:
+
+<p align="center"> 
+    <img src="doc/environment_management.png" alt="Environment management">
+</p>
+
+To stop or terminate the Notebook click on a gear icon <img src="doc/gear_icon.png" alt="gear" width="20"> in the "Actions" column for a needed Notebook and hit "Stop" or "Terminate" action:
+<p align="center"> 
+    <img src="doc/manage_env_actions.png" alt="Manage environment actions" width="160">
+</p>
+
+**NOTE:** Connected Data Engine Server is terminated and related Data Engine is stopped during Notebook stopping. During Notebook termination related Computational resources  are automatically terminated. 
+
+To stop or release specific cluster click an appropriate button close to cluster alias.
+
+<p align="center"> 
+    <img src="doc/managemanage_resource_actions.png" alt="Manage resource action" width="300">
+</p>
+
+Confirm stopping/decommissioning of the Computational resource by hitting "Yes":
+
+<p align="center"> 
+    <img src="doc/manage_env_confirm.png" alt="Manage environment action confirm" width="400">
+</p>
+
+**NOTE:** Terminate action is available only for notebooks and computational resources, not for Edge Nodes.
+
+### Multiple Cloud Endpoints <a name="multiple_cloud_endpoints"></a>
+
+Administrator can connect to any of Cloud endpoints: AWS, GCP, Azure. For that administrator should click on "Endpoints" button. "Connect endpoint" popup shows up:
+
+<p align="center"> 
+    <img src="doc/connect_endpoint.png" alt="Connect endpoint" width="520">
+</p>
+
+Once all fields are filled in and you click on "Connect" button, you are able to see the list of all your added endpoints on "Endpoint list" tab:
+
+<p align="center"> 
+    <img src="doc/endpoint_list.png" alt="Endpoint list" width="520">
+</p>
+
+Administrator can deactivate whole analytical environment via bin icon <img src="doc/bin_icon.png" alt="bin" width="15">. And all related instances change its satuses to "Terminating" and soon become "Terminated".
+
+### Manage DLab quotas <a name="manage_dlab_quotas"></a>
+
+Administrator can set quotas per project and for the whole DLab. To do it click on "Manage DLab quotas" button. "Manage DLab quotas" popup shows up. Administrator can see all active project:
+
+<p align="center"> 
+    <img src="doc/manage_environment.png" alt="Manage environment" width="520">
+</p>
+
+After filling fields and clicking on "Apply" button, new quotas are used for project and DLab.
+If project and DLab quotas are exceeded the warning shows up during login.
+
+<p align="center" class="facebox-popup"> 
+    <img src="doc/exceeded quota.png" alt="Exceeded quota" width="400">
+</p>
+
+In such case user cannot create new instance and already "Running" instance changes its status to "Stopping", except for Data Engine Service (its status changes "Terminating") and soon becomes "Stopped" or "Terminated" appropriately.
+
 --------------------------------
+
 # DLab Billing report <a name="billing_page"></a>
 
 On this page you can see all billing information, including all costs assosiated with service base name of SSN.
 
 ![Billing page](doc/billing_page.png)
 
-In the header you can see 3 fields:
+In the header you can see 2 fields:
 -   Service base name of your environment
--   Resource tag ID
 -   Date period of available billing report
 
 On the center of header you can choose period of report in datepicker:
@@ -566,49 +653,10 @@
 
 You can save billing report in csv format hitting "Export" button.
 
-You can also filter data by each column:
+You can also filter data by environment name, user, project, resource type, instance size, product. 
+On top of that you can sort data by user, project, service charges.
 
-![Billing filter](doc/billing_filter.png)
-
-**Note:** Administrator can see billing report of all users, and only he can see/filter "User" column.
-
-In the footer of billing report, you can see Total cost for all environments.
-
---------------------------------
-# DLab Environment Management Page <a name="environment_management"></a>
-
-DLab Environment Management page is an administration page allowing admins to show the list of all users` environments and to stop/terminate all of them of separate specific resource.
-
-To access Environment management page either navigate to it via main menu:
-
-<p align="center"> 
-    <img src="doc/main_menu_env.png" alt="Main menu" width="250">
-</p>
-
-<p align="center"> 
-    <img src="doc/environment_management.png" alt="Environment management">
-</p>
-
-To Stop or Terminate the Notebook click on a gear icon gear in the Actions column for a needed Notebook and hit Stop or Terminate action:
-<p align="center"> 
-    <img src="doc/manage_env_actions.png" alt="Manage environment actions" width="160">
-</p>
-
-Any Computational resources except for Spark clusters will be automatically terminated and Spark clusters will be stopped in case of Stop action hitting, and all resources will be killed in case of Terminate action hitting.
-
-To stop or release specific cluster click an appropriate button close to cluster alias.
-
-<p align="center"> 
-    <img src="doc/managemanage_resource_actions.png" alt="Manage resource action" width="300">
-</p>
-
-Confirm stopping/decommissioning of the Computational resource by hitting Yes:
-
-<p align="center"> 
-    <img src="doc/manage_env_confirm.png" alt="Manage environment action confirm" width="400">
-</p>
-
-**NOTE:** terminate action is available only for notebooks and computational resources, not for Edge Nodes.
+In the footer of billing report, you can see "Total" cost for all environments.
 
 --------------------------------
 
@@ -628,61 +676,3 @@
 Once your list of filtered by any of the columns, icon <img src="doc/filter_icon.png" alt="filter" width="16"> changes to <img src="doc/sort_icon.png" alt="filter" width="16"> for a filtered columns only.
 
 There is also an option for quick and easy way to filter out all inactive instances (Failed and Terminated) by clicking on “Show active” button in the ribbon. To switch back to the list of all resources, click on “Show all”.
-
-# Scheduler <a name="scheduler"></a>
-
-Scheduler component allows to automatically schedule start/stop of notebook/cluster. There are 2 types of schedulers available:
-- notebook scheduler;
-- data engine scheduler (currently spark cluster only);
-
-To create scheduler for a notebook click on a <img src="doc/gear_icon.png" alt="gear" width="20"> icon in the Actions column for a needed Notebook and hit Scheduler:
-
-<p align="center"> 
-    <img src="doc/notebook_menu_scheduler.png" alt="Notebook scheduler action" width="150">
-</p>
-After clicking you will see popup with the following fields:
-
-- start/finish dates - date range when scheduler is active;
-- start/end time - time when notebook should be running;
-- offset - your zone offset;
-- repeat on - days when scheduler should be active
-- possibility to synchronize notebook scheduler with computational schedulers
-
-<p align="center"> 
-    <img src="doc/notebook_scheduler.png" alt="Notebook scheduler" width="400">
-</p>
-
-Also scheduler can be configured for a spark cluster. To configure scheduler for spark cluster <img src="doc/icon_scheduler_computational.png" alt="scheduler_computational" width="16"> should be clicked (near computational status):
-
-<p align="center"> 
-    <img src="doc/computational_scheduler_create.png" alt="Computational scheduler create" width="400">
-</p>
-
-There is a possibility to inherit scheduler start settings from notebook, if such scheduler is present:
-
-<p align="center"> 
-    <img src="doc/computational_scheduler.png" alt="Computational scheduler" width="400">
-</p>
-
-Once any scheduler is set up, notebook/spark cluster will be started/stopped automatically.
-Please also note that if notebook is configured to be stopped, all running data engines assosiated with it will be stopped (for spark cluster) or terminated (for data engine serice) with notebook.
-
-After login user will be notified  that corresponding resources are about to be stopped/terminated in some time.
-
-<p align="center"> 
-    <img src="doc/scheduler reminder.png" alt="Scheduler reminder" width="400">
-</p>
-
-# Key reupload <a name="key_reupload"></a>
-In case when user private key was corrupted, lost etc. DLAB provide a possibility to reupload user public key.
-It can be done on manage environment page using ACTIONS menu on edge instance:
-
-<p align="center"> 
-    <img src="doc/reupload_key_action.png" alt="Reupload key action" width="200">
-</p>
-
-After that similar to create initial environment dialog appeared where you can upload new key or generate new key-pair:
- 
- <p align="center"> 
-     <img src="doc/reupload_key_dialog.png" alt="Reupload key dialog" width="400">
- </p>
diff --git a/build.properties b/build.properties
index 9265e7e..d765398 100644
--- a/build.properties
+++ b/build.properties
@@ -16,4 +16,4 @@
 # specific language governing permissions and limitations
 # under the License.
 #
-dlab.version=2.2
\ No newline at end of file
+dlab.version=2.3
\ No newline at end of file
diff --git a/doc/billing_filter.png b/doc/billing_filter.png
index 09a0acd..e1dbd78 100644
--- a/doc/billing_filter.png
+++ b/doc/billing_filter.png
Binary files differ
diff --git a/doc/billing_page.png b/doc/billing_page.png
index cc08102..33bd674 100644
--- a/doc/billing_page.png
+++ b/doc/billing_page.png
Binary files differ
diff --git a/doc/bin_icon.png b/doc/bin_icon.png
new file mode 100644
index 0000000..d289b5f
--- /dev/null
+++ b/doc/bin_icon.png
Binary files differ
diff --git a/doc/computational_scheduler.png b/doc/computational_scheduler.png
index b00c626..d87a22f 100644
--- a/doc/computational_scheduler.png
+++ b/doc/computational_scheduler.png
Binary files differ
diff --git a/doc/computational_scheduler_create.png b/doc/computational_scheduler_create.png
index 463351d..5d1ef24 100644
--- a/doc/computational_scheduler_create.png
+++ b/doc/computational_scheduler_create.png
Binary files differ
diff --git a/doc/connect_endpoint.png b/doc/connect_endpoint.png
new file mode 100644
index 0000000..054b3e8
--- /dev/null
+++ b/doc/connect_endpoint.png
Binary files differ
diff --git a/doc/create_notebook_from_ami.png b/doc/create_notebook_from_ami.png
index 7e4453e..11cfde0 100644
--- a/doc/create_notebook_from_ami.png
+++ b/doc/create_notebook_from_ami.png
Binary files differ
diff --git a/doc/dataproc_create.png b/doc/dataproc_create.png
new file mode 100644
index 0000000..cbab3f4
--- /dev/null
+++ b/doc/dataproc_create.png
Binary files differ
diff --git a/doc/delete_btn.png b/doc/delete_btn.png
new file mode 100644
index 0000000..6229abf
--- /dev/null
+++ b/doc/delete_btn.png
Binary files differ
diff --git a/doc/delete_group.png b/doc/delete_group.png
index d5c38e3..9b7c878 100644
--- a/doc/delete_group.png
+++ b/doc/delete_group.png
Binary files differ
diff --git a/doc/emr_creating.png b/doc/emr_creating.png
index 7fb7fde..1e20418 100644
--- a/doc/emr_creating.png
+++ b/doc/emr_creating.png
Binary files differ
diff --git a/doc/emr_terminate_confirm.png b/doc/emr_terminate_confirm.png
index b1fa871..5eb515e 100644
--- a/doc/emr_terminate_confirm.png
+++ b/doc/emr_terminate_confirm.png
Binary files differ
diff --git a/doc/endpoint_list.png b/doc/endpoint_list.png
new file mode 100644
index 0000000..ea8586f
--- /dev/null
+++ b/doc/endpoint_list.png
Binary files differ
diff --git a/doc/environment_management.png b/doc/environment_management.png
index e4c2cda..ba0399c 100644
--- a/doc/environment_management.png
+++ b/doc/environment_management.png
Binary files differ
diff --git a/doc/git_creds_window.png b/doc/git_creds_window.png
index fdf7a41..ed41936 100644
--- a/doc/git_creds_window.png
+++ b/doc/git_creds_window.png
Binary files differ
diff --git a/doc/git_creds_window2.png b/doc/git_creds_window2.png
index 1481df0..f13444f 100644
--- a/doc/git_creds_window2.png
+++ b/doc/git_creds_window2.png
Binary files differ
diff --git a/doc/main_page.png b/doc/main_page.png
index 4338603..b6f1e17 100644
--- a/doc/main_page.png
+++ b/doc/main_page.png
Binary files differ
diff --git a/doc/main_page2.png b/doc/main_page2.png
index 5305a05..3d3af40 100644
--- a/doc/main_page2.png
+++ b/doc/main_page2.png
Binary files differ
diff --git a/doc/main_page3.png b/doc/main_page3.png
index 255de05..1812925 100644
--- a/doc/main_page3.png
+++ b/doc/main_page3.png
Binary files differ
diff --git a/doc/main_page_filter.png b/doc/main_page_filter.png
index 5818548..cd764ec 100644
--- a/doc/main_page_filter.png
+++ b/doc/main_page_filter.png
Binary files differ
diff --git a/doc/manage_env_confirm.png b/doc/manage_env_confirm.png
index 91f3d30..ae4b543 100644
--- a/doc/manage_env_confirm.png
+++ b/doc/manage_env_confirm.png
Binary files differ
diff --git a/doc/manage_environment.png b/doc/manage_environment.png
index ead01e1..73060ff 100644
--- a/doc/manage_environment.png
+++ b/doc/manage_environment.png
Binary files differ
diff --git a/doc/manage_role.png b/doc/manage_role.png
index 152cf7c..9db76c2 100644
--- a/doc/manage_role.png
+++ b/doc/manage_role.png
Binary files differ
diff --git a/doc/managemanage_resource_actions.png b/doc/managemanage_resource_actions.png
index 23c58d4..bd1394c 100644
--- a/doc/managemanage_resource_actions.png
+++ b/doc/managemanage_resource_actions.png
Binary files differ
diff --git a/doc/notebook_create.png b/doc/notebook_create.png
index 18a674b..9ca407e 100644
--- a/doc/notebook_create.png
+++ b/doc/notebook_create.png
Binary files differ
diff --git a/doc/notebook_info.png b/doc/notebook_info.png
index 4cc01a2..83e8e22 100644
--- a/doc/notebook_info.png
+++ b/doc/notebook_info.png
Binary files differ
diff --git a/doc/notebook_libs_status.png b/doc/notebook_libs_status.png
index 5f49722..8aa861d 100644
--- a/doc/notebook_libs_status.png
+++ b/doc/notebook_libs_status.png
Binary files differ
diff --git a/doc/notebook_scheduler.png b/doc/notebook_scheduler.png
index 31bd9ac..81502c3 100644
--- a/doc/notebook_scheduler.png
+++ b/doc/notebook_scheduler.png
Binary files differ
diff --git a/doc/notebook_terminated.png b/doc/notebook_terminated.png
index fb6399b..408e5ee 100644
--- a/doc/notebook_terminated.png
+++ b/doc/notebook_terminated.png
Binary files differ
diff --git a/doc/notebook_terminating.png b/doc/notebook_terminating.png
index d20b967..b62a492 100644
--- a/doc/notebook_terminating.png
+++ b/doc/notebook_terminating.png
Binary files differ
diff --git a/doc/pen_icon.png b/doc/pen_icon.png
new file mode 100644
index 0000000..c6a3a7f
--- /dev/null
+++ b/doc/pen_icon.png
Binary files differ
diff --git a/doc/project_menu.png b/doc/project_menu.png
new file mode 100644
index 0000000..c6d4976
--- /dev/null
+++ b/doc/project_menu.png
Binary files differ
diff --git a/doc/project_view.png b/doc/project_view.png
new file mode 100644
index 0000000..2415ac5
--- /dev/null
+++ b/doc/project_view.png
Binary files differ
diff --git a/doc/roles.png b/doc/roles.png
new file mode 100644
index 0000000..f7468a6
--- /dev/null
+++ b/doc/roles.png
Binary files differ
diff --git a/doc/scheduler_by_inactivity.png b/doc/scheduler_by_inactivity.png
new file mode 100644
index 0000000..decebac
--- /dev/null
+++ b/doc/scheduler_by_inactivity.png
Binary files differ
diff --git a/doc/spark_stop_confirm.png b/doc/spark_stop_confirm.png
index 59b6bf9..7b6bc34 100644
--- a/doc/spark_stop_confirm.png
+++ b/doc/spark_stop_confirm.png
Binary files differ
diff --git a/doc/upload_or_generate_user_key.png b/doc/upload_or_generate_user_key.png
index 2766334..6d6e6e1 100644
--- a/doc/upload_or_generate_user_key.png
+++ b/doc/upload_or_generate_user_key.png
Binary files differ
diff --git a/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json b/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
index d3e48c3..a2db5ae 100644
--- a/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
+++ b/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
@@ -8,10 +8,10 @@
   "exploratory_environment_versions" :
   [
     {
-      "template_name": "Deep Learning  2.2",
+      "template_name": "Deep Learning  2.3",
       "description": "Base image with Deep Learning and Jupyter",
       "environment_type": "exploratory",
-      "version": "deeplearning-2.2",
+      "version": "deeplearning-2.3",
       "vendor": "AWS"
     }
   ]
diff --git a/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json b/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json
index 5ad1114..55116fc 100644
--- a/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json
+++ b/infrastructure-provisioning/src/general/files/azure/deeplearning_description.json
@@ -8,10 +8,10 @@
   "exploratory_environment_versions" :
   [
     {
-      "template_name": "Deep Learning  2.2",
+      "template_name": "Deep Learning  2.3",
       "description": "Base image with Deep Learning and Jupyter",
       "environment_type": "exploratory",
-      "version": "deeplearning-2.2",
+      "version": "deeplearning-2.3",
       "vendor": "Azure"
     }
   ]
diff --git a/infrastructure-provisioning/src/general/files/azure/tensor_description.json b/infrastructure-provisioning/src/general/files/azure/tensor_description.json
index 06f67c7..4a71198 100644
--- a/infrastructure-provisioning/src/general/files/azure/tensor_description.json
+++ b/infrastructure-provisioning/src/general/files/azure/tensor_description.json
@@ -8,10 +8,10 @@
   "exploratory_environment_versions" :
   [
     {
-      "template_name": "TensorFlow 1.8.0",
+      "template_name": "Jupyter with TensorFlow 1.8.0",
       "description": "Base image with TensorFlow and Jupyter node creation routines",
       "environment_type": "exploratory",
-      "version": "tensorflow_gpu-1.4.0",
+      "version": "tensorflow_gpu-1.8.0",
       "vendor": "Azure"
     }
   ]
diff --git a/infrastructure-provisioning/src/general/files/gcp/deeplearning_description.json b/infrastructure-provisioning/src/general/files/gcp/deeplearning_description.json
index 0ea1159..080be57 100644
--- a/infrastructure-provisioning/src/general/files/gcp/deeplearning_description.json
+++ b/infrastructure-provisioning/src/general/files/gcp/deeplearning_description.json
@@ -10,10 +10,10 @@
   "exploratory_environment_versions" :
   [
     {
-      "template_name": "Deep Learning  2.2",
+      "template_name": "Deep Learning  2.3",
       "description": "Base image with Deep Learning and Jupyter",
       "environment_type": "exploratory",
-      "version": "deeplearning-2.2",
+      "version": "deeplearning-2.3",
       "vendor": "GCP"
     }
   ]
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
index 2ca635d..c70e9a9 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/common_lib.py
@@ -30,30 +30,44 @@
 
 def manage_pkg(command, environment, requisites):
     try:
-        allow = False
-        counter = 0
-        while not allow:
-            if counter > 60:
+        attempt = 0
+        installed = False
+        while not installed:
+            print('Pkg installation attempt: {}'.format(attempt))
+            if attempt > 60:
                 print("Notebook is broken please recreate it.")
                 sys.exit(1)
             else:
-                print('Package manager is:')
-                if environment == 'remote':
-                    if sudo('pgrep "^apt" -a && echo "busy" || echo "ready"') == 'busy':
-                        counter += 1
-                        time.sleep(10)
-                    else:
-                        allow = True
-                        sudo('apt-get {0} {1}'.format(command, requisites))
-                elif environment == 'local':
-                    if local('sudo pgrep "^apt" -a && echo "busy" || echo "ready"', capture=True) == 'busy':
-                        counter += 1
-                        time.sleep(10)
-                    else:
-                        allow = True
-                        local('sudo apt-get {0} {1}'.format(command, requisites), capture=True)
-                else:
-                    print('Wrong environment')
+                try:
+                    allow = False
+                    counter = 0
+                    while not allow:
+                        if counter > 60:
+                            print("Notebook is broken please recreate it.")
+                            sys.exit(1)
+                        else:
+                            print('Package manager is:')
+                            if environment == 'remote':
+                                if sudo('pgrep "^apt" -a && echo "busy" || echo "ready"') == 'busy':
+                                    counter += 1
+                                    time.sleep(10)
+                                else:
+                                    allow = True
+                                    sudo('apt-get {0} {1}'.format(command, requisites))
+                            elif environment == 'local':
+                                if local('sudo pgrep "^apt" -a && echo "busy" || echo "ready"', capture=True) == 'busy':
+                                    counter += 1
+                                    time.sleep(10)
+                                else:
+                                    allow = True
+                                    local('sudo apt-get {0} {1}'.format(command, requisites), capture=True)
+                            else:
+                                print('Wrong environment')
+                    installed = True
+                except:
+                    print("Will try to install with nex attempt.")
+                    sudo('dpkg --configure -a')
+                    attempt += 1
     except:
         sys.exit(1)
 
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
index bb073c3..f4cda59 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/ssn_lib.py
@@ -180,7 +180,8 @@
              locale, region_info, ldap_login, tenant_id,
              application_id, hostname, data_lake_name, subscription_id,
              validate_permission_scope, dlab_id, usage_date, product,
-             usage_type, usage, cost, resource_id, tags, billing_dataset_name, report_path=''):
+             usage_type, usage, cost, resource_id, tags, billing_dataset_name, keycloak_client_id,
+             keycloak_client_secret, keycloak_auth_server_url, report_path=''):
     try:
         if not exists(os.environ['ssn_dlab_path'] + 'tmp/ss_started'):
             java_path = sudo("update-alternatives --query java | grep 'Value: ' | grep -o '/.*/jre'")
@@ -195,16 +196,16 @@
             sudo('mv /tmp/ssn.yml ' + os.environ['ssn_dlab_path'] + 'conf/')
             put('/root/templates/proxy_location_webapp_template.conf', '/tmp/proxy_location_webapp_template.conf')
             sudo('mv /tmp/proxy_location_webapp_template.conf ' + os.environ['ssn_dlab_path'] + 'tmp/')
-            if cloud_provider == 'gcp':
-                conf_parameter_name = '--spring.config.location='
+            if cloud_provider == 'aws':
+                conf_parameter_name = '--spring.config.location={0}billing_app.yml --conf '.format(dlab_conf_dir)
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
                     text = f.read()
                 text = text.replace('WEB_CONF', dlab_conf_dir).replace('OS_USR', os_user)\
                     .replace('CONF_PARAMETER_NAME', conf_parameter_name)
                 with open('/root/templates/supervisor_svc.conf', 'w') as f:
                     f.write(text)
-            elif cloud_provider == 'aws' or 'azure':
-                conf_parameter_name = '--conf '
+            elif cloud_provider == 'gcp' or cloud_provider == 'azure':
+                conf_parameter_name = '--spring.config.location='
                 with open('/root/templates/supervisor_svc.conf', 'r') as f:
                     text = f.read()
                 text = text.replace('WEB_CONF', dlab_conf_dir).replace('OS_USR', os_user)\
@@ -285,7 +286,15 @@
                          '--cost {} ' \
                          '--resource_id {} ' \
                          '--tags {} ' \
-                         '--billing_dataset_name "{}" '.\
+                         '--billing_dataset_name "{}" '\
+                         '--mongo_host localhost ' \
+                         '--mongo_port 27017 ' \
+                         '--service_base_name {} ' \
+                         '--os_user {} ' \
+                         '--keystore_password {} ' \
+                         '--keycloak_client_id {} ' \
+                         '--keycloak_client_secret {} ' \
+                         '--keycloak_auth_server_url {} '.\
                             format(cloud_provider,
                                    service_base_name,
                                    tag_resource_id,
@@ -309,7 +318,13 @@
                                    cost,
                                    resource_id,
                                    tags,
-                                   billing_dataset_name)
+                                   billing_dataset_name,
+                                   service_base_name,
+                                   os_user,
+                                   keystore_passwd,
+                                   keycloak_client_id,
+                                   keycloak_client_secret,
+                                   keycloak_auth_server_url)
                 sudo('python /tmp/configure_billing.py {}'.format(params))
             try:
                 if os.environ['conf_stepcerts_enabled'] == 'true':
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index e5fc30f..cd15d42 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -41,6 +41,7 @@
             sudo('echo PATH=$PATH:/usr/local/bin/:/opt/spark/bin/ >> /etc/profile')
             sudo('echo export PATH >> /etc/profile')
             sudo('pip install -UI pip=={} --no-cache-dir'.format(os.environ['conf_pip_version']))
+            sudo('pip install --upgrade setuptools')
             sudo('pip install -U {} --no-cache-dir'.format(requisites))
             sudo('touch /home/{}/.ensure_dir/pip_path_added'.format(os.environ['conf_os_user']))
     except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
index 87587d8..bb8c555 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/ssn_configure.py
@@ -509,7 +509,10 @@
                  "--cost {} " \
                  "--resource_id {} " \
                  "--default_endpoint_name {} " \
-                 "--tags {}". \
+                 "--tags {} " \
+                 "--keycloak_client_id {} " \
+                 "--keycloak_client_secret {} " \
+                 "--keycloak_auth_server_url {}". \
             format(ssn_conf['instance_hostname'],
                    "{}{}.pem".format(os.environ['conf_key_dir'], os.environ['conf_key_name']),
                    os.environ['ssn_dlab_path'],
@@ -535,7 +538,10 @@
                    os.environ['cost'],
                    os.environ['resource_id'],
                    os.environ['default_endpoint_name'],
-                   os.environ['tags'])
+                   os.environ['tags'],
+                   os.environ['keycloak_client_name'],
+                   os.environ['keycloak_client_secret'],
+                   os.environ['keycloak_auth_server_url'])
         try:
             local("~/scripts/{}.py {}".format('configure_ui', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
index 856cce4..dbfd10d 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/ssn_configure.py
@@ -419,20 +419,22 @@
             ssn_conf['datalake_application_id'] = os.environ['azure_application_id']
             for datalake in AzureMeta.list_datalakes(ssn_conf['resource_group_name']):
                 if ssn_conf['datalake_store_name'] == datalake.tags["Name"]:
-                    ssn_conf['datalake_store_name'] = datalake.name
-        params = "--hostname {} --keyfile {} --dlab_path {} --os_user {} --os_family {} --request_id {} " \
-                 "--resource {} --service_base_name {} --cloud_provider {} --billing_enabled {} " \
-                 "--authentication_file {} --offer_number {} --currency {} --locale {} --region_info {}  " \
-                 "--ldap_login {} --tenant_id {} --application_id {} --datalake_store_name {} --cloud_params '{}' " \
-                 "--subscription_id {} --validate_permission_scope {} --default_endpoint_name {}".format(
-                  ssn_conf['instance_host'], ssn_conf['ssh_key_path'], os.environ['ssn_dlab_path'],
-                  ssn_conf['dlab_ssh_user'], os.environ['conf_os_family'], os.environ['request_id'],
-                  os.environ['conf_resource'], ssn_conf['service_base_name'], os.environ['conf_cloud_provider'],
-                  ssn_conf['billing_enabled'], ssn_conf['azure_auth_path'], os.environ['azure_offer_number'],
-                  os.environ['azure_currency'], os.environ['azure_locale'], os.environ['azure_region_info'],
-                  ssn_conf['ldap_login'], ssn_conf['tenant_id'], ssn_conf['datalake_application_id'],
-                  ssn_conf['datalake_store_name'], json.dumps(cloud_params), ssn_conf['subscription_id'],
-                  os.environ['azure_validate_permission_scope'], ssn_conf['default_endpoint_name'])
+                    datalake_store_name = datalake.name
+        params = "--hostname {} --keyfile {} --dlab_path {} --os_user {} --os_family {} --request_id {} \
+                 --resource {} --service_base_name {} --cloud_provider {} --billing_enabled {} --authentication_file {} \
+                 --offer_number {} --currency {} --locale {} --region_info {}  --ldap_login {} --tenant_id {} \
+                 --application_id {} --datalake_store_name {} --cloud_params '{}' --subscription_id {}  \
+                 --validate_permission_scope {} --default_endpoint_name {} --keycloak_client_id {} \
+                 --keycloak_client_secret {} --keycloak_auth_server_url {}". \
+            format(ssn_conf['instnace_ip'], ssn_conf['ssh_key_path'], os.environ['ssn_dlab_path'],
+                   ssn_conf['dlab_ssh_user'], os.environ['conf_os_family'], os.environ['request_id'],
+                   os.environ['conf_resource'], ssn_conf['service_base_name'], os.environ['conf_cloud_provider'],
+                   ssn_conf['billing_enabled'], ssn_conf['azure_auth_path'], os.environ['azure_offer_number'],
+                   os.environ['azure_currency'], os.environ['azure_locale'], os.environ['azure_region_info'],
+                   ssn_conf['ldap_login'], ssn_conf['tenant_id'], ssn_conf['datalake_application_id'], ssn_conf['datalake_store_name'], json.dumps(cloud_params),
+                   ssn_conf['subscription_id'], os.environ['azure_validate_permission_scope'], ssn_conf['default_endpoint_name'],
+                   os.environ['keycloak_client_name'], os.environ['keycloak_client_secret'],
+                   os.environ['keycloak_auth_server_url'])
         local("~/scripts/{}.py {}".format('configure_ui', params))
     except Exception as err:
         traceback.print_exc()
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
index 47b6cde..f9822a0 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/project_prepare.py
@@ -398,7 +398,8 @@
             project_conf['tag_name']: project_conf['shared_bucket_name'],
             "endpoint_tag": project_conf['endpoint_tag'],
             os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value'],
-            "sbn": project_conf['service_base_name']}
+            "sbn": project_conf['service_base_name'],
+            "name": project_conf['shared_bucket_name']}
         params = "--bucket_name {} --tags '{}'".format(project_conf['shared_bucket_name'],
                                                        json.dumps(project_conf['shared_bucket_tags']))
         try:
@@ -412,7 +413,8 @@
             "endpoint_tag": project_conf['endpoint_tag'],
             os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value'],
             "sbn": project_conf['service_base_name'],
-            "project_tag": project_conf['project_tag']}
+            "project_tag": project_conf['project_tag'],
+            "name": project_conf['bucket_name']}
         params = "--bucket_name {} --tags '{}'".format(project_conf['bucket_name'],
                                                        json.dumps(project_conf['bucket_tags']))
 
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
index 4a10115..dd622d2 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/ssn_configure.py
@@ -425,12 +425,13 @@
         params = "--hostname {} --keyfile {} --dlab_path {} --os_user {} --os_family {} --billing_enabled {} " \
                  "--request_id {} --billing_dataset_name {} \
                  --resource {} --service_base_name {} --cloud_provider {} --default_endpoint_name {} " \
-                 "--cloud_params '{}'". \
-            format(ssn_conf['instance_hostname'], ssn_conf['ssh_key_path'], os.environ['ssn_dlab_path'],
-                   ssn_conf['dlab_ssh_user'], os.environ['conf_os_family'], ssn_conf['billing_enabled'],
-                   os.environ['request_id'], os.environ['billing_dataset_name'], os.environ['conf_resource'],
+                 "--cloud_params '{}' --keycloak_client_id {} --keycloak_client_secret {} --keycloak_auth_server_url {}". \
+            format(ssn_conf['instance_hostname'], ssn_conf['ssh_key_path'], os.environ['ssn_dlab_path'], ssn_conf['dlab_ssh_user'],
+                   os.environ['conf_os_family'], ssn_conf['billing_enabled'], os.environ['request_id'],
+                   os.environ['billing_dataset_name'], os.environ['conf_resource'],
                    ssn_conf['service_base_name'], os.environ['conf_cloud_provider'], ssn_conf['default_endpoint_name'],
-                   json.dumps(cloud_params))
+                   json.dumps(cloud_params), os.environ['keycloak_client_name'], os.environ['keycloak_client_secret'],
+                   os.environ['keycloak_auth_server_url'])
         try:
             local("~/scripts/{}.py {}".format('configure_ui', params))
         except:
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_billing.py b/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
index 9f40e72..1df44cc 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_billing.py
@@ -49,15 +49,24 @@
 parser.add_argument('--region_info', type=str, default='', help='Azure region info')
 parser.add_argument('--mongo_password', type=str, help='The password for Mongo DB')
 parser.add_argument('--dlab_dir', type=str, help='The path to dlab dir')
-parser.add_argument('--dlab_id', type=str, default='', help='Column name in report file that contains dlab id tag')
-parser.add_argument('--usage_date', type=str, default='', help='Column name in report file that contains usage date tag')
-parser.add_argument('--product', type=str, default='', help='Column name in report file that contains product name tag')
-parser.add_argument('--usage_type', type=str, default='', help='Column name in report file that contains usage type tag')
-parser.add_argument('--usage', type=str, default='', help='Column name in report file that contains usage tag')
-parser.add_argument('--cost', type=str, default='', help='Column name in report file that contains cost tag')
-parser.add_argument('--resource_id', type=str, default='', help='Column name in report file that contains dlab resource id tag')
-parser.add_argument('--tags', type=str, default='', help='Column name in report file that contains tags')
+parser.add_argument('--dlab_id', type=str, default='resource_tags_user_user_tag', help='Column name in report file that contains dlab id tag')
+parser.add_argument('--usage_date', type=str, default='line_item_usage_start_date', help='Column name in report file that contains usage date tag')
+parser.add_argument('--product', type=str, default='product_product_name', help='Column name in report file that contains product name tag')
+parser.add_argument('--usage_type', type=str, default='line_item_usage_type', help='Column name in report file that contains usage type tag')
+parser.add_argument('--usage', type=str, default='line_item_usage_amount', help='Column name in report file that contains usage tag')
+parser.add_argument('--cost', type=str, default='line_item_blended_cost', help='Column name in report file that contains cost tag')
+parser.add_argument('--resource_id', type=str, default='line_item_resource_id', help='Column name in report file that contains dlab resource id tag')
+parser.add_argument('--tags', type=str, default='line_item_operation,line_item_line_item_description', help='Column name in report file that contains tags')
 parser.add_argument('--billing_dataset_name', type=str, default='', help='Name of gcp billing dataset (in big query service')
+
+parser.add_argument('--mongo_host', type=str, default='localhost', help='Mongo DB host')
+parser.add_argument('--mongo_port', type=str, default='27017', help='Mongo DB port')
+parser.add_argument('--service_base_name', type=str, help='Service Base Name')
+parser.add_argument('--os_user', type=str, help='Dlab user')
+parser.add_argument('--keystore_password', type=str, help='Keystore password')
+parser.add_argument('--keycloak_client_id', type=str, help='Keycloak client id')
+parser.add_argument('--keycloak_client_secret', type=str, help='Keycloak client secret')
+parser.add_argument('--keycloak_auth_server_url', type=str, help='Keycloak auth server url')
 args = parser.parse_args()
 
 
@@ -70,38 +79,76 @@
         if args.cloud_provider == 'aws':
             if args.aws_job_enabled == 'true':
                 args.tag_resource_id =  'resourceTags' + ':' + args.tag_resource_id
-            config_orig = config_orig.replace('<BILLING_BUCKET_NAME>', args.billing_bucket)
-            config_orig = config_orig.replace('<AWS_JOB_ENABLED>', args.aws_job_enabled)
-            config_orig = config_orig.replace('<REPORT_PATH>', args.report_path)
-            config_orig = config_orig.replace('<ACCOUNT_ID>', args.account_id)
-            config_orig = config_orig.replace('<ACCESS_KEY_ID>', args.access_key_id)
-            config_orig = config_orig.replace('<SECRET_ACCESS_KEY>', args.secret_access_key)
-            config_orig = config_orig.replace('<CONF_BILLING_TAG>', args.billing_tag)
-            config_orig = config_orig.replace('<CONF_SERVICE_BASE_NAME>', args.infrastructure_tag)
-            config_orig = config_orig.replace('<MONGODB_PASSWORD>', args.mongo_password)
-            config_orig = config_orig.replace('<DLAB_ID>', args.dlab_id)
-            config_orig = config_orig.replace('<USAGE_DATE>', args.usage_date)
-            config_orig = config_orig.replace('<PRODUCT>', args.product)
-            config_orig = config_orig.replace('<USAGE_TYPE>', args.usage_type)
-            config_orig = config_orig.replace('<USAGE>', args.usage)
-            config_orig = config_orig.replace('<COST>', args.cost)
-            config_orig = config_orig.replace('<RESOURCE_ID>', args.resource_id)
-            config_orig = config_orig.replace('<TAGS>', args.tags)
+            config_orig = config_orig.replace('MONGO_HOST', args.mongo_host)
+            config_orig = config_orig.replace('MONGO_PASSWORD', args.mongo_password)
+            config_orig = config_orig.replace('MONGO_PORT', args.mongo_port)
+            config_orig = config_orig.replace('BILLING_BUCKET_NAME', args.billing_bucket)
+            config_orig = config_orig.replace('REPORT_PATH', args.report_path)
+            config_orig = config_orig.replace('AWS_JOB_ENABLED', args.aws_job_enabled)
+            config_orig = config_orig.replace('ACCOUNT_ID', args.account_id)
+            config_orig = config_orig.replace('ACCESS_KEY_ID', args.access_key_id)
+            config_orig = config_orig.replace('SECRET_ACCESS_KEY', args.secret_access_key)
+            config_orig = config_orig.replace('CONF_BILLING_TAG', args.billing_tag)
+            config_orig = config_orig.replace('SERVICE_BASE_NAME', args.service_base_name)
+            config_orig = config_orig.replace('DLAB_ID', args.dlab_id)
+            config_orig = config_orig.replace('USAGE_DATE', args.usage_date)
+            config_orig = config_orig.replace('PRODUCT', args.product)
+            config_orig = config_orig.replace('USAGE_TYPE', args.usage_type)
+            config_orig = config_orig.replace('USAGE', args.usage)
+            config_orig = config_orig.replace('COST', args.cost)
+            config_orig = config_orig.replace('RESOURCE_ID', args.resource_id)
+            config_orig = config_orig.replace('TAGS', args.tags)
         elif args.cloud_provider == 'azure':
-            config_orig = config_orig.replace('<CLIENT_ID>', args.client_id)
-            config_orig = config_orig.replace('<CLIENT_SECRET>', args.client_secret)
-            config_orig = config_orig.replace('<TENANT_ID>', args.tenant_id)
-            config_orig = config_orig.replace('<SUBSCRIPTION_ID>', args.subscription_id)
-            config_orig = config_orig.replace('<AUTHENTICATION_FILE>', args.authentication_file)
-            config_orig = config_orig.replace('<OFFER_NUMBER>', args.offer_number)
-            config_orig = config_orig.replace('<CURRENCY>', args.currency)
-            config_orig = config_orig.replace('<LOCALE>', args.locale)
-            config_orig = config_orig.replace('<REGION_INFO>', args.region_info)
-            config_orig = config_orig.replace('<MONGODB_PASSWORD>', args.mongo_password)
+            config_orig = config_orig.replace('SERVICE_BASE_NAME', args.service_base_name)
+            config_orig = config_orig.replace('OS_USER', args.os_user)
+            config_orig = config_orig.replace('MONGO_PASSWORD', args.mongo_password)
+            config_orig = config_orig.replace('MONGO_PORT', args.mongo_port)
+            config_orig = config_orig.replace('MONGO_HOST', args.mongo_host)
+            config_orig = config_orig.replace('KEY_STORE_PASSWORD', args.keystore_password)
+            config_orig = config_orig.replace('KEYCLOAK_CLIENT_ID', args.keycloak_client_id)
+            config_orig = config_orig.replace('KEYCLOAK_CLIENT_SECRET', args.keycloak_client_secret)
+            config_orig = config_orig.replace('KEYCLOAK_AUTH_SERVER_URL', args.keycloak_auth_server_url)
+            config_orig = config_orig.replace('CLIENT_ID', args.client_id)
+            config_orig = config_orig.replace('CLIENT_SECRET', args.client_secret)
+            config_orig = config_orig.replace('TENANT_ID', args.tenant_id)
+            config_orig = config_orig.replace('SUBSCRIPTION_ID', args.subscription_id)
+            config_orig = config_orig.replace('AUTHENTICATION_FILE', args.authentication_file)
+            config_orig = config_orig.replace('OFFER_NUMBER', args.offer_number)
+            config_orig = config_orig.replace('CURRENCY', args.currency)
+            config_orig = config_orig.replace('LOCALE', args.locale)
+            config_orig = config_orig.replace('REGION_INFO', args.region_info)
         elif args.cloud_provider == 'gcp':
-            config_orig = config_orig.replace('<CONF_SERVICE_BASE_NAME>', args.infrastructure_tag)
-            config_orig = config_orig.replace('<MONGO_PASSWORD>', args.mongo_password)
-            config_orig = config_orig.replace('<BILLING_DATASET_NAME>', args.billing_dataset_name)
+            config_orig = config_orig.replace('SERVICE_BASE_NAME', args.service_base_name)
+            config_orig = config_orig.replace('OS_USER', args.os_user)
+            config_orig = config_orig.replace('MONGO_PASSWORD', args.mongo_password)
+            config_orig = config_orig.replace('MONGO_PORT', args.mongo_port)
+            config_orig = config_orig.replace('MONGO_HOST', args.mongo_host)
+            config_orig = config_orig.replace('KEY_STORE_PASSWORD', args.keystore_password)
+            config_orig = config_orig.replace('DATASET_NAME', args.billing_dataset_name)
+            config_orig = config_orig.replace('KEYCLOAK_CLIENT_ID', args.keycloak_client_id)
+            config_orig = config_orig.replace('KEYCLOAK_CLIENT_SECRET', args.keycloak_client_secret)
+            config_orig = config_orig.replace('KEYCLOAK_AUTH_SERVER_URL', args.keycloak_auth_server_url)
+        f = open(path, 'w')
+        f.write(config_orig)
+        f.close()
+    except:
+        print("Could not write the target file {}".format(path))
+        sys.exit(1)
+
+def yml_billing_app(path):
+    try:
+        with open(path, 'r') as config_yml_r:
+            config_orig = config_yml_r.read()
+
+        config_orig = config_orig.replace('MONGO_HOST', args.mongo_host)
+        config_orig = config_orig.replace('MONGO_PASSWORD', args.mongo_password)
+        config_orig = config_orig.replace('MONGO_PORT', args.mongo_port)
+        config_orig = config_orig.replace('OS_USER', args.os_user)
+        config_orig = config_orig.replace('KEY_STORE_PASSWORD', args.keystore_password)
+        config_orig = config_orig.replace('KEYCLOAK_CLIENT_ID', args.keycloak_client_id)
+        config_orig = config_orig.replace('KEYCLOAK_CLIENT_SECRET', args.keycloak_client_secret)
+        config_orig = config_orig.replace('KEYCLOAK_AUTH_SERVER_URL', args.keycloak_auth_server_url)
+
         f = open(path, 'w')
         f.write(config_orig)
         f.close()
@@ -134,6 +181,8 @@
     # Access to the bucket without credentials?
     try:
         yml_billing(args.dlab_dir + 'conf/billing.yml')
+        if args.cloud_provider == 'aws':
+            yml_billing_app(args.dlab_dir + 'conf/billing_app.yml')
         yml_self_service(args.dlab_dir + 'conf/self-service.yml')
     except:
         print('Error configure billing')
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
index fdca046..2e3cd85 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ui.py
@@ -72,6 +72,9 @@
 parser.add_argument('--cost', type=str, default=None)
 parser.add_argument('--resource_id', type=str, default=None)
 parser.add_argument('--tags', type=str, default=None)
+parser.add_argument('--keycloak_client_id', type=str, default=None)
+parser.add_argument('--keycloak_client_secret', type=str, default=None)
+parser.add_argument('--keycloak_auth_server_url', type=str, default=None)
 args = parser.parse_args()
 
 dlab_conf_dir = args.dlab_path + 'conf/'
@@ -175,6 +178,8 @@
                 args.dlab_path))
         elif args.cloud_provider == 'aws':
             sudo('cp {0}/sources/services/billing-aws/billing.yml {0}/webapp/billing/conf/'.format(args.dlab_path))
+            sudo('cp {0}/sources/services/billing-aws/src/main/resources/application.yml '
+                 '{0}/webapp/billing/conf/billing_app.yml'.format(args.dlab_path))
             sudo(
                 'cp {0}/sources/services/billing-aws/target/billing-aws*.jar {0}/webapp/billing/lib/'.format(
                     args.dlab_path))
@@ -232,4 +237,5 @@
              args.region_info, args.ldap_login, args.tenant_id, args.application_id,
              args.hostname, args.datalake_store_name, args.subscription_id, args.validate_permission_scope,
              args.dlab_id, args.usage_date, args.product, args.usage_type,
-             args.usage, args.cost, args.resource_id, args.tags, args.billing_dataset_name)
+             args.usage, args.cost, args.resource_id, args.tags, args.billing_dataset_name, args.keycloak_client_id,
+             args.keycloak_client_secret, args.keycloak_auth_server_url)
diff --git a/infrastructure-provisioning/src/ssn/templates/ssn.yml b/infrastructure-provisioning/src/ssn/templates/ssn.yml
index fffa7d2..7b18d26 100644
--- a/infrastructure-provisioning/src/ssn/templates/ssn.yml
+++ b/infrastructure-provisioning/src/ssn/templates/ssn.yml
@@ -62,5 +62,10 @@
     timeout: 3s
     connectionTimeout: 3s
 
+billingService:
+  jerseyClient:
+    timeout: 4m
+    connectionTimeout: 3s
+
 # Log out user on inactivity
 inactiveUserTimeoutMillSec: 7200000
diff --git a/infrastructure-provisioning/terraform/aws/computational_resources/main/main.tf b/infrastructure-provisioning/terraform/aws/computational_resources/main/main.tf
index 1fb08e5..c8bea60 100644
--- a/infrastructure-provisioning/terraform/aws/computational_resources/main/main.tf
+++ b/infrastructure-provisioning/terraform/aws/computational_resources/main/main.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 provider "aws" {
   access_key = var.access_key_id
   secret_key = var.secret_access_key
diff --git a/infrastructure-provisioning/terraform/aws/computational_resources/main/variables.tf b/infrastructure-provisioning/terraform/aws/computational_resources/main/variables.tf
index 13f0d25..25f322b 100644
--- a/infrastructure-provisioning/terraform/aws/computational_resources/main/variables.tf
+++ b/infrastructure-provisioning/terraform/aws/computational_resources/main/variables.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 variable "access_key_id" {}
 
 variable "secret_access_key" {}
diff --git a/infrastructure-provisioning/terraform/aws/endpoint/main/network.tf b/infrastructure-provisioning/terraform/aws/endpoint/main/network.tf
index 661080f..eea071b 100644
--- a/infrastructure-provisioning/terraform/aws/endpoint/main/network.tf
+++ b/infrastructure-provisioning/terraform/aws/endpoint/main/network.tf
@@ -114,6 +114,13 @@
     cidr_blocks = ["0.0.0.0/0"]
   }
 
+  ingress {
+    from_port   = 8088
+    to_port     = 8088
+    protocol    = "tcp"
+    cidr_blocks = ["0.0.0.0/0"]
+  }
+
   egress {
     from_port   = 0
     to_port     = 0
diff --git a/infrastructure-provisioning/terraform/aws/endpoint/main/variables.tf b/infrastructure-provisioning/terraform/aws/endpoint/main/variables.tf
index 8dc3d77..798ddb8 100644
--- a/infrastructure-provisioning/terraform/aws/endpoint/main/variables.tf
+++ b/infrastructure-provisioning/terraform/aws/endpoint/main/variables.tf
@@ -80,4 +80,24 @@
 
 variable "tag_resource_id" {
   default = "user:tag"
-}
\ No newline at end of file
+}
+
+variable "billing_enable" {}
+
+variable "mongo_password" {}
+
+variable "mongo_host" {}
+
+variable "billing_bucket" {}
+
+variable "report_path" {
+  default = ""
+}
+
+variable "aws_job_enabled" {
+  default = "false"
+}
+
+variable "billing_aws_account_id" {}
+
+variable "billing_tag" {}
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/.helmignore b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/.helmignore
index fbe01f8..2f795d4 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/.helmignore
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/.helmignore
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # Patterns to ignore when building packages.
 # This supports shell glob matching, relative path matching, and
 # negation (prefixed with !). Only one pattern per line.
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/Chart.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/Chart.yaml
index 4b334aa..e9d93e2 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/Chart.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/Chart.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: v1
 appVersion: 0.13.2
 description: An online certificate authority and related tools for secure automated
@@ -6,7 +27,7 @@
 home: https://smallstep.com
 icon: https://raw.githubusercontent.com/smallstep/certificates/master/icon.png
 keywords:
-- acme
+  - acme
 - authority
 - ca
 - certificate
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/_helpers.tpl b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/_helpers.tpl
index e240bac..b65f748 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/_helpers.tpl
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/_helpers.tpl
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 {{/* vim: set filetype=mustache: */}}
 {{/*
 Expand the name of the chart.
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/bootstrap.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/bootstrap.yaml
index 4fa3240..354c144 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/bootstrap.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/bootstrap.yaml
@@ -1,11 +1,32 @@
-{{- if .Release.IsInstall -}}
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+  {{- if .Release.IsInstall -}}
 apiVersion: v1
 kind: ServiceAccount
 metadata:
   name: {{ include "step-certificates.fullname" . }}-config
   namespace: {{.Release.Namespace}}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
 ---
 apiVersion: batch/v1
 kind: Job
@@ -24,16 +45,16 @@
       serviceAccountName: {{ include "step-certificates.fullname" . }}-config
       restartPolicy: Never
       volumes:
-      - name: bootstrap
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-bootstrap
+        - name: bootstrap
+          configMap:
+            name: {{ include "step-certificates.fullname" . }}-bootstrap
       containers:
-      - name: config
-        image: "{{ .Values.bootstrapImage.repository }}:{{ .Values.bootstrapImage.tag }}"
-        imagePullPolicy: {{ .Values.bootstrapImage.pullPolicy }}
-        command: ["/bin/sh", "/home/step/bootstrap/bootstrap.sh"]
-        volumeMounts:
-          - name: bootstrap
-            mountPath: /home/step/bootstrap
-            readOnly: true
+        - name: config
+          image: "{{ .Values.bootstrapImage.repository }}:{{ .Values.bootstrapImage.tag }}"
+          imagePullPolicy: {{ .Values.bootstrapImage.pullPolicy }}
+          command: ["/bin/sh", "/home/step/bootstrap/bootstrap.sh"]
+          volumeMounts:
+            - name: bootstrap
+              mountPath: /home/step/bootstrap
+              readOnly: true
 {{- end -}}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ca.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ca.yaml
index 7c5929c..24ed08e 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ca.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ca.yaml
@@ -1,9 +1,30 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: apps/v1
 kind: StatefulSet
 metadata:
   name: {{ include "step-certificates.fullname" . }}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
 spec:
   # Only one replica is supported at this moment
   # Requested {{ .Values.replicaCount }}
@@ -20,99 +41,99 @@
         app.kubernetes.io/instance: {{ .Release.Name }}
     spec:
 {{- if .Release.IsInstall }}
-      initContainers:
-        - name: {{ .Chart.Name }}-init
-          image: busybox:latest
-          imagePullPolicy: {{ .Values.image.pullPolicy }}
-          command: ["sleep", "20"]
+initContainers:
+  - name: {{ .Chart.Name }}-init
+    image: busybox:latest
+    imagePullPolicy: {{ .Values.image.pullPolicy }}
+    command: ["sleep", "20"]
 {{- end }}
-      securityContext:
-        {{- if .Values.ca.runAsRoot }}
-        runAsUser: 0
-        {{- else }}
-        runAsUser: 1000
-        runAsNonRoot: true
-        runAsGroup: 1000
-        fsGroup: 1000
-        allowPrivilegeEscalation: false
-        {{- end }}
-      containers:
-        - name: {{ .Chart.Name }}
-          image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
-          imagePullPolicy: {{ .Values.image.pullPolicy }}
-          command: ["/usr/local/bin/step-ca",
-            "--password-file", "/home/step/secrets/passwords/password",
-            "/home/step/config/ca.json"]
-          env:
-          - name: NAMESPACE
-            value: "{{ .Release.Namespace }}"
-          ports:
-            - name: https
-              containerPort: {{ .Values.service.targetPort }}
-              protocol: TCP
-          livenessProbe:
-            initialDelaySeconds: 5
-            httpGet:
-              path: /health
-              port: {{ .Values.service.targetPort }}
-              scheme: HTTPS
-          readinessProbe:
-            initialDelaySeconds: 5
-            httpGet:
-              path: /health
-              port: {{ .Values.service.targetPort }}
-              scheme: HTTPS
-          resources:
-            {{- toYaml .Values.resources | nindent 12 }}
-          volumeMounts:
-          - name: certs
-            mountPath: /home/step/certs
-            readOnly: true
-          - name: config
-            mountPath: /home/step/config
-            readOnly: true
-          - name: secrets
-            mountPath: /home/step/secrets
-            readOnly: true
-          - name: ca-password
-            mountPath: /home/step/secrets/passwords
-            readOnly: true
-          {{- if .Values.ca.db.enabled }}
-          - name: database
-            mountPath: /home/step/db
-            readOnly: false
-          {{- end }}
-      volumes:
+securityContext:
+  {{- if .Values.ca.runAsRoot }}
+  runAsUser: 0
+  {{- else }}
+  runAsUser: 1000
+  runAsNonRoot: true
+  runAsGroup: 1000
+  fsGroup: 1000
+  allowPrivilegeEscalation: false
+  {{- end }}
+containers:
+  - name: {{ .Chart.Name }}
+    image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
+    imagePullPolicy: {{ .Values.image.pullPolicy }}
+    command: ["/usr/local/bin/step-ca",
+              "--password-file", "/home/step/secrets/passwords/password",
+              "/home/step/config/ca.json"]
+    env:
+      - name: NAMESPACE
+        value: "{{ .Release.Namespace }}"
+    ports:
+      - name: https
+        containerPort: {{ .Values.service.targetPort }}
+        protocol: TCP
+    livenessProbe:
+      initialDelaySeconds: 5
+      httpGet:
+        path: /health
+        port: {{ .Values.service.targetPort }}
+        scheme: HTTPS
+    readinessProbe:
+      initialDelaySeconds: 5
+      httpGet:
+        path: /health
+        port: {{ .Values.service.targetPort }}
+        scheme: HTTPS
+    resources:
+      {{- toYaml .Values.resources | nindent 12 }}
+    volumeMounts:
       - name: certs
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-certs
+        mountPath: /home/step/certs
+        readOnly: true
       - name: config
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-config
+        mountPath: /home/step/config
+        readOnly: true
       - name: secrets
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-secrets
+        mountPath: /home/step/secrets
+        readOnly: true
       - name: ca-password
-        secret:
-          secretName: {{ include "step-certificates.fullname" . }}-ca-password
-      {{- if and .Values.ca.db.enabled (not .Values.ca.db.persistent) }}
-      - name: database
-        emptyDir: {}
-      {{- end }}
-      {{- with .Values.nodeSelector }}
-      nodeSelector:
-      {{- toYaml . | nindent 8 }}
-      {{- end }}
-      {{- with .Values.affinity }}
-      affinity:
-      {{- toYaml . | nindent 8 }}
-      {{- end }}
-      {{- with .Values.tolerations }}
-      tolerations:
-      {{- toYaml . | nindent 8 }}
-      {{- end }}
+        mountPath: /home/step/secrets/passwords
+        readOnly: true
+    {{- if .Values.ca.db.enabled }}
+    - name: database
+      mountPath: /home/step/db
+      readOnly: false
+    {{- end }}
+volumes:
+  - name: certs
+    configMap:
+      name: {{ include "step-certificates.fullname" . }}-certs
+  - name: config
+configMap:
+  name: {{ include "step-certificates.fullname" . }}-config
+  - name: secrets
+configMap:
+  name: {{ include "step-certificates.fullname" . }}-secrets
+  - name: ca-password
+secret:
+  secretName: {{ include "step-certificates.fullname" . }}-ca-password
+  {{- if and .Values.ca.db.enabled (not .Values.ca.db.persistent) }}
+  - name: database
+emptyDir: {}
+  {{- end }}
+  {{- with .Values.nodeSelector }}
+nodeSelector:
+  {{- toYaml . | nindent 8 }}
+  {{- end }}
+  {{- with .Values.affinity }}
+affinity:
+  {{- toYaml . | nindent 8 }}
+  {{- end }}
+  {{- with .Values.tolerations }}
+tolerations:
+  {{- toYaml . | nindent 8 }}
+  {{- end }}
 {{- if and .Values.ca.db.enabled .Values.ca.db.persistent }}
-  volumeClaimTemplates:
+volumeClaimTemplates:
   - metadata:
       name: database
       labels:
@@ -122,16 +143,16 @@
     spec:
       accessModes:
       {{- range .Values.ca.db.accessModes }}
-        - {{ . | quote }}
+      - {{ . | quote }}
       {{- end }}
       resources:
         requests:
           storage: {{ .Values.ca.db.size | quote }}
     {{- if .Values.ca.db.storageClass }}
     {{- if (eq "-" .Values.ca.db.storageClass) }}
-      storageClassName: ""
+    storageClassName: ""
     {{- else }}
-      storageClassName: {{ .Values.ca.db.storageClass | quote }}
+    storageClassName: {{ .Values.ca.db.storageClass | quote }}
     {{- end }}
     {{- end }}
 {{- end }}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/configmaps.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/configmaps.yaml
index 28ad488..1670d9a 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/configmaps.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/configmaps.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # ConfigMaps that will be updated by the configuration job:
 # 1. Step CA config directory.
 # 2. Step CA certs direcotry.
@@ -71,76 +92,76 @@
       permission_error "create secrets"
     fi
 {{ if .Values.autocert.enabled }}
-    echo -n "Checking for permission to create mutatingwebhookconfiguration in {{.Release.Namespace}} namespace: "
+echo -n "Checking for permission to create mutatingwebhookconfiguration in {{.Release.Namespace}} namespace: "
     kubectl auth can-i create mutatingwebhookconfiguration --namespace {{.Release.Namespace}}
     if [ $? -ne 0 ]; then
       permission_error "create mutatingwebhookconfiguration"
-    fi
+  fi
 {{- end }}
 
-    # Setting this here on purpose, after the above section which explicitly checks
-    # for and handles exit errors.
-    set -e
+# Setting this here on purpose, after the above section which explicitly checks
+# for and handles exit errors.
+  set -e
 
-    echo -e "\n\e[1mInitializating the CA...\e[0m"
+  echo -e "\n\e[1mInitializating the CA...\e[0m"
 
-    # CA password
+# CA password
 {{- if .Values.ca.password }}
-    CA_PASSWORD={{ quote .Values.ca.password }}
+  CA_PASSWORD={{ quote .Values.ca.password }}
 {{- else }}
-    CA_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
+  CA_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
 {{- end }}
-    # Provisioner password
+# Provisioner password
 {{- if .Values.ca.provisioner.password }}
-    CA_PROVISIONER_PASSWORD={{ quote .Values.ca.provisioner.password }}
+  CA_PROVISIONER_PASSWORD={{ quote .Values.ca.provisioner.password }}
 {{- else }}
-    CA_PROVISIONER_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
+  CA_PROVISIONER_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
 {{- end }}
 
-    TMP_CA_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
-    TMP_CA_PROVISIONER_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
+  TMP_CA_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
+  TMP_CA_PROVISIONER_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
 
-    echo $CA_PASSWORD > $TMP_CA_PASSWORD
-    echo $CA_PROVISIONER_PASSWORD > $TMP_CA_PROVISIONER_PASSWORD
+  echo $CA_PASSWORD > $TMP_CA_PASSWORD
+  echo $CA_PROVISIONER_PASSWORD > $TMP_CA_PROVISIONER_PASSWORD
 
-    step ca init \
-      --name "{{.Values.ca.name}}" \
-      --dns "{{include "step-certificates.dns" .}}" \
-      --address "{{.Values.ca.address}}" \
-      --provisioner "{{.Values.ca.provisioner.name}}" \
-      --with-ca-url "{{include "step-certificates.url" .}}" \
-      --password-file "$TMP_CA_PASSWORD" \
-      --provisioner-password-file "$TMP_CA_PROVISIONER_PASSWORD" {{ if not .Values.ca.db.enabled }}--no-db{{ end }}
+  step ca init \
+  --name "{{.Values.ca.name}}" \
+  --dns "{{include "step-certificates.dns" .}}" \
+  --address "{{.Values.ca.address}}" \
+  --provisioner "{{.Values.ca.provisioner.name}}" \
+  --with-ca-url "{{include "step-certificates.url" .}}" \
+  --password-file "$TMP_CA_PASSWORD" \
+  --provisioner-password-file "$TMP_CA_PROVISIONER_PASSWORD" {{ if not .Values.ca.db.enabled }}--no-db{{ end }}
 
-    rm -f $TMP_CA_PASSWORD $TMP_CA_PROVISIONER_PASSWORD
+  rm -f $TMP_CA_PASSWORD $TMP_CA_PROVISIONER_PASSWORD
 
-    echo -e "\n\e[1mCreating configmaps and secrets in {{.Release.Namespace}} namespace ...\e[0m"
+  echo -e "\n\e[1mCreating configmaps and secrets in {{.Release.Namespace}} namespace ...\e[0m"
 
-    # Replace secrets created on helm install
-    # It allows to properly remove them on helm delete
-    kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-config --from-file $(step path)/config
-    kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-certs --from-file $(step path)/certs
-    kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-secrets --from-file $(step path)/secrets
+  # Replace secrets created on helm install
+  # It allows to properly remove them on helm delete
+  kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-config --from-file $(step path)/config
+  kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-certs --from-file $(step path)/certs
+  kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-secrets --from-file $(step path)/secrets
 
-    kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-ca-password --from-literal "password=${CA_PASSWORD}"
-    kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-provisioner-password --from-literal "password=${CA_PROVISIONER_PASSWORD}"
+  kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-ca-password --from-literal "password=${CA_PASSWORD}"
+  kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-provisioner-password --from-literal "password=${CA_PROVISIONER_PASSWORD}"
 
-    # Label all configmaps and secrets
-    kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-config {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-certs {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-secrets {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-ca-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-provisioner-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+# Label all configmaps and secrets
+kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-config {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-certs {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-secrets {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-ca-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-provisioner-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
 
-    # Patch webhook if autocert is enabled
+# Patch webhook if autocert is enabled
 {{ if .Values.autocert.enabled }}
-    CA_BUNDLE=$(cat $(step path)/certs/root_ca.crt | base64 | tr -d '\n')
-    kubectl patch mutatingwebhookconfigurations {{ .Release.Name }}-autocert-webhook-config \
-      --type json -p="[{\"op\":\"replace\",\"path\":\"/webhooks/0/clientConfig/caBundle\",\"value\":\"$CA_BUNDLE\"}]"
+  CA_BUNDLE=$(cat $(step path)/certs/root_ca.crt | base64 | tr -d '\n')
+  kubectl patch mutatingwebhookconfigurations {{ .Release.Name }}-autocert-webhook-config \
+  --type json -p="[{\"op\":\"replace\",\"path\":\"/webhooks/0/clientConfig/caBundle\",\"value\":\"$CA_BUNDLE\"}]"
 {{- end }}
 
-    echo -e "\n\e[1mStep Certificates installed!\e[0m"
-    echo
-    echo "CA URL: {{include "step-certificates.url" .}}"
-    echo "CA Fingerprint: $(step certificate fingerprint $(step path)/certs/root_ca.crt)"
-    echo
\ No newline at end of file
+  echo -e "\n\e[1mStep Certificates installed!\e[0m"
+  echo
+echo "CA URL: {{include "step-certificates.url" .}}"
+echo "CA Fingerprint: $(step certificate fingerprint $(step path)/certs/root_ca.crt)"
+  echo
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ingress.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ingress.yaml
index 53264f1..240bdaf 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ingress.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/ingress.yaml
@@ -1,36 +1,57 @@
-{{- if .Values.ingress.enabled -}}
-{{- $fullName := include "step-certificates.fullname" . -}}
+# *****************************************************************************
+#
+  # Licensed to the Apache Software Foundation (ASF) under one
+  # or more contributor license agreements.  See the NOTICE file
+  # distributed with this work for additional information
+  # regarding copyright ownership.  The ASF licenses this file
+  # to you under the Apache License, Version 2.0 (the
+  # "License"); you may not use this file except in compliance
+  # with the License.  You may obtain a copy of the License at
+  #
+  #   http://www.apache.org/licenses/LICENSE-2.0
+  #
+  # Unless required by applicable law or agreed to in writing,
+  # software distributed under the License is distributed on an
+  # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  # KIND, either express or implied.  See the License for the
+  # specific language governing permissions and limitations
+  # under the License.
+  #
+  # ******************************************************************************
+
+  {{- if .Values.ingress.enabled -}}
+  {{- $fullName := include "step-certificates.fullname" . -}}
 apiVersion: extensions/v1beta1
 kind: Ingress
 metadata:
   name: {{ $fullName }}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
   {{- with .Values.ingress.annotations }}
-  annotations:
-    {{- toYaml . | nindent 4 }}
+annotations:
+  {{- toYaml . | nindent 4 }}
   {{- end }}
 spec:
 {{- if .Values.ingress.tls }}
-  tls:
+tls:
   {{- range .Values.ingress.tls }}
-    - hosts:
-      {{- range .hosts }}
-        - {{ . | quote }}
-      {{- end }}
-      secretName: {{ .secretName }}
+- hosts:
+  {{- range .hosts }}
+  - {{ . | quote }}
+  {{- end }}
+  secretName: {{ .secretName }}
   {{- end }}
 {{- end }}
-  rules:
+rules:
   {{- range .Values.ingress.hosts }}
-    - host: {{ .host | quote }}
-      http:
-        paths:
-        {{- range .paths }}
-          - path: {{ . }}
-            backend:
-              serviceName: {{ $fullName }}
-              servicePort: http
-        {{- end }}
+- host: {{ .host | quote }}
+  http:
+    paths:
+    {{- range .paths }}
+    - path: {{ . }}
+      backend:
+        serviceName: {{ $fullName }}
+        servicePort: http
+    {{- end }}
   {{- end }}
 {{- end }}
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/rbac.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/rbac.yaml
index 6f4e137..0534856 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/rbac.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/rbac.yaml
@@ -1,4 +1,25 @@
-{{- if .Release.IsInstall -}}
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+  {{- if .Release.IsInstall -}}
 apiVersion: rbac.authorization.k8s.io/v1
 kind: Role
 metadata:
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/secrets.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/secrets.yaml
index 567a989..68d0b8d 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/secrets.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/secrets.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # Secrets that will be updated by the configuration job:
 # 1. CA keys password.
 # 2. Provisioner password.
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/service.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/service.yaml
index 4ec0783..dccae38 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/service.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/service.yaml
@@ -1,19 +1,40 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: v1
 kind: Service
 metadata:
   name: {{ include "step-certificates.fullname" . }}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
 spec:
   type: {{ .Values.service.type }}
   ports:
     - port: {{ .Values.service.port }}
       targetPort: {{ .Values.service.targetPort }}
 {{- if .Values.service.nodePort }}
-      nodePort: {{ .Values.service.nodePort }}
+nodePort: {{ .Values.service.nodePort }}
 {{- end }}
-      protocol: TCP
-      name: https
-  selector:
-    app.kubernetes.io/name: {{ include "step-certificates.name" . }}
-    app.kubernetes.io/instance: {{ .Release.Name }}
\ No newline at end of file
+protocol: TCP
+name: https
+selector:
+  app.kubernetes.io/name: {{ include "step-certificates.name" . }}
+  app.kubernetes.io/instance: {{ .Release.Name }}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/tests/test-connection.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/tests/test-connection.yaml
index 5ae87c6..4fe296d 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/tests/test-connection.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/templates/tests/test-connection.yaml
@@ -1,11 +1,32 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: v1
 kind: Pod
 metadata:
   name: "{{ include "step-certificates.fullname" . }}-test-connection"
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
-  annotations:
-    "helm.sh/hook": test-success
+  {{ include "step-certificates.labels" . | indent 4 }}
+annotations:
+  "helm.sh/hook": test-success
 spec:
   containers:
     - name: wget
diff --git a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/values.yaml b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/values.yaml
index a8141d6..14a3d3d 100644
--- a/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/values.yaml
+++ b/infrastructure-provisioning/terraform/aws/ssn-helm-charts/main/step-ca-chart/values.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # Default values for step-certificates.
 
 # replicaCount is the number of replicas of step-certificates.
@@ -62,7 +83,7 @@
     storageClass: ${storage_class_name}
     # accessModes defines the Persistent Volume Access Mode.
     accessModes:
-    - ReadWriteOnce
+      - ReadWriteOnce
     # size is the Persistent Volume size.
     size: 10Gi
   # runAsRoot runs the ca as root instead of the step user. This is required in
@@ -91,7 +112,7 @@
   #   memory: 128Mi
   # requests:
   #   cpu: 100m
-  #   memory: 128Mi
+#   memory: 128Mi
 
 # nodeSelector contains the node labels for pod assignment.
 nodeSelector: {}
diff --git a/infrastructure-provisioning/terraform/azure/endpoint/main/sg.tf b/infrastructure-provisioning/terraform/azure/endpoint/main/sg.tf
index 9ab63cc..08984f3 100644
--- a/infrastructure-provisioning/terraform/azure/endpoint/main/sg.tf
+++ b/infrastructure-provisioning/terraform/azure/endpoint/main/sg.tf
@@ -57,6 +57,20 @@
   protocol                    = "TCP"
 }
 
+resource "azurerm_network_security_rule" "inbound-3" {
+  resource_group_name         = data.azurerm_resource_group.data-endpoint-resource-group.name
+  network_security_group_name = azurerm_network_security_group.enpoint-sg.name
+  name                        = "inbound-3"
+  direction                   = "Inbound"
+  access                      = "Allow"
+  priority                    = 300
+  source_address_prefix       = "*"
+  source_port_range           = "*"
+  destination_address_prefix  = "*"
+  destination_port_range      = "8088"
+  protocol                    = "TCP"
+}
+
 resource "azurerm_network_security_rule" "outbound-1" {
   resource_group_name         = data.azurerm_resource_group.data-endpoint-resource-group.name
   network_security_group_name = azurerm_network_security_group.enpoint-sg.name
diff --git a/infrastructure-provisioning/terraform/azure/endpoint/main/variables.tf b/infrastructure-provisioning/terraform/azure/endpoint/main/variables.tf
index a00ce18..abc7e97 100644
--- a/infrastructure-provisioning/terraform/azure/endpoint/main/variables.tf
+++ b/infrastructure-provisioning/terraform/azure/endpoint/main/variables.tf
@@ -61,4 +61,22 @@
 
 variable "endpoint_volume_size" {}
 
-variable "key_path" {}
\ No newline at end of file
+variable "key_path" {}
+
+variable "authentication_file" {
+  default = ""
+}
+
+variable "offer_number" {}
+
+variable "currency" {}
+
+variable "locale" {}
+
+variable "region_info" {}
+
+variable "mongo_password" {}
+
+variable "mongo_host" {}
+
+variable "billing_enable" {}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/bin/deploy/billing_app_aws.yml b/infrastructure-provisioning/terraform/bin/deploy/billing_app_aws.yml
new file mode 100644
index 0000000..dd33a9e
--- /dev/null
+++ b/infrastructure-provisioning/terraform/bin/deploy/billing_app_aws.yml
@@ -0,0 +1,55 @@
+# *****************************************************************************
+#
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing,
+#  software distributed under the License is distributed on an
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#  KIND, either express or implied.  See the License for the
+#  specific language governing permissions and limitations
+#  under the License.
+#
+# ******************************************************************************
+
+spring:
+  main:
+    allow-bean-definition-overriding: true
+  data:
+    mongodb:
+      username: admin
+      password: MONGO_PASSWORD
+      database: dlabdb
+      port: 27017
+      host: MONGO_HOST
+
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
+
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /home/OS_USER/keys/endpoint.keystore.jks
+server.ssl.key-store-password: KEY_STORE_PASSWORD
+server.ssl.key-alias: endpoint
+
+logging:
+  file: /var/opt/dlab/log/ssn/billing.log
+  level:
+    com:
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: dlab
+  resource: KEYCLOAK_CLIENT_ID
+  credentials.secret: CLIENT_SECRET
+  ssl-required: none
+  auth-server-url: KEYCLOAK_AUTH_SERVER_URL
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/bin/deploy/billing_aws.yml b/infrastructure-provisioning/terraform/bin/deploy/billing_aws.yml
new file mode 100644
index 0000000..41add93
--- /dev/null
+++ b/infrastructure-provisioning/terraform/bin/deploy/billing_aws.yml
@@ -0,0 +1,94 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+# Specify the parameters enclosed in angle brackets.
+# Run the follows command to get help for details of configuration:
+# java -jar billing-1.0.jar --help conf
+# java -jar billing-1.0.jar --help {adapter | parser | filter | logappender} [name]
+
+billingEnabled: true
+
+host: MONGO_HOST
+port: 27017
+username: admin
+password: MONGO_PASSWORD
+database: dlabdb
+
+# Adapter for reading source data. Known types: file, s3file
+adapterIn:
+  - type: s3file
+    bucket: BILLING_BUCKET_NAME
+    path: REPORT_PATH
+    awsJobEnabled: AWS_JOB_ENABLED
+    accountId: ACCOUNT_ID
+    accessKeyId: ACCESS_KEY_ID
+    secretAccessKey: SECRET_ACCESS_KEY
+
+# Adapter for writing converted data. Known types: console, file, s3file, mongodb
+adapterOut:
+  - type: mongodlab
+    host: MONGO_HOST
+    port: 27017
+    username: admin
+    password: MONGO_PASSWORD
+    database: dlabdb
+#    bufferSize: 10000
+    upsert: true
+    serviceBaseName: SERVICE_BASE_NAME
+
+# Filter for source and converted data.
+filter:
+  - type: aws
+    currencyCode: USD
+    columnDlabTag: CONF_BILLING_TAG
+    serviceBaseName: SERVICE_BASE_NAME
+
+
+# Parser of source data to common format.
+parser:
+  - type: csv
+    headerLineNo: 1
+    skipLines: 1
+    columnMapping: >-
+      dlab_id=DLAB_ID;usage_date=USAGE_DATE;product=PRODUCT;
+      usage_type=USAGE_TYPE;usage=USAGE;cost=COST;
+      resource_id=RESOURCE_ID;tags=TAGS
+    aggregate: day
+
+
+# Logging configuration.
+logging:
+ # Default logging level
+  level: INFO
+  # Logging levels for appenders.
+  loggers:
+    com.epam: DEBUG
+    org.apache.http: WARN
+    org.mongodb.driver: WARN
+    org.hibernate: WARN
+ # Logging appenders
+  appenders:
+    #- type: console
+    - type: file
+      currentLogFilename: /var/opt/dlab/log/ssn/billing.log
+      archive: true
+      archivedLogFilenamePattern: /var/opt/dlab/log/ssn/billing-%d{yyyy-MM-dd}.log.gz
+      archivedFileCount: 10
diff --git a/infrastructure-provisioning/terraform/bin/deploy/billing_azure.yml b/infrastructure-provisioning/terraform/bin/deploy/billing_azure.yml
new file mode 100644
index 0000000..6953d49
--- /dev/null
+++ b/infrastructure-provisioning/terraform/bin/deploy/billing_azure.yml
@@ -0,0 +1,80 @@
+# *****************************************************************************
+#
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing,
+#  software distributed under the License is distributed on an
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#  KIND, either express or implied.  See the License for the
+#  specific language governing permissions and limitations
+#  under the License.
+#
+# ******************************************************************************
+
+spring:
+  main:
+    allow-bean-definition-overriding: true
+  data:
+    mongodb:
+      username: admin
+      password: MONGO_PASSWORD
+      database: dlabdb
+      port: MONGO_PORT
+      host: MONGO_HOST
+
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
+
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /home/OS_USER/keys/endpoint.keystore.jks
+server.ssl.key-store-password: KEY_STORE_PASSWORD
+server.ssl.key-alias: endpoint
+
+logging:
+  file: /var/opt/dlab/log/ssn/billing.log
+  level:
+    com:
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: dlab
+  resource: KEYCLOAK_CLIENT_ID
+  credentials.secret: KEYCLOAK_CLIENT_SECRET
+  ssl-required: none
+  auth-server-url: KEYCLOAK_AUTH_SERVER_URL
+
+dlab:
+  sbn: SERVICE_BASE_NAME
+  billingEnabled: true
+  clientId: CLIENT_ID
+  clientSecret: CLIENT_SECRET
+  tenantId: TENANT_ID
+  subscriptionId: SUBSCRIPTION_ID
+  authenticationFile: AUTHENTICATION_FILE
+  # Billing configuration for RateCard API. For more details please see https://msdn.microsoft.com/en-us/library/mt219004.aspx
+  offerNumber: OFFER_NUMBER
+  currency: CURRENCY
+  locale: LOCALE
+  regionInfo: REGION_INFO
+  initialDelay: 10
+  period: 60
+  aggregationOutputMongoDataSource:
+    host: MONGO_HOST
+    port: MONGO_PORT
+    username: admin
+    password: MONGO_PASSWORD
+    database: dlabdb
+  ssnStorageAccountTagName: <AZURE_SSN_STORAGE_ACCOUNT_TAG>
+  sharedStorageAccountTagName: <AZURE_SHARED_STORAGE_ACCOUNT_TAG>
+  datalakeTagName: <AZURE_DATALAKE_TAG>
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/bin/deploy/billing_gcp.yml b/infrastructure-provisioning/terraform/bin/deploy/billing_gcp.yml
new file mode 100644
index 0000000..af793ba
--- /dev/null
+++ b/infrastructure-provisioning/terraform/bin/deploy/billing_gcp.yml
@@ -0,0 +1,59 @@
+# *****************************************************************************
+#
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing,
+#  software distributed under the License is distributed on an
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#  KIND, either express or implied.  See the License for the
+#  specific language governing permissions and limitations
+#  under the License.
+#
+# ******************************************************************************
+
+spring:
+  main:
+    allow-bean-definition-overriding: true
+  data:
+    mongodb:
+      username: admin
+      password: MONGO_PASSWORD
+      database: dlabdb
+      port: 27017
+      host: MONGO_HOST
+dlab:
+  sbn: SERVICE_BASE_NAME
+  bigQueryDataset: DATASET_NAME
+  cron: 0 0 * * * *
+
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
+
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /home/OS_USER/keys/endpoint.keystore.jks
+server.ssl.key-store-password: KEY_STORE_PASSWORD
+server.ssl.key-alias: endpoint
+
+logging:
+  file: /var/opt/dlab/log/ssn/billing.log
+  level:
+    com:
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: dlab
+  resource: KEYCLOAK_CLIENT_ID
+  credentials.secret: CLIENT_SECRET
+  ssl-required: none
+  auth-server-url: KEYCLOAK_AUTH_SERVER_URL
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/bin/deploy/endpoint_fab.py b/infrastructure-provisioning/terraform/bin/deploy/endpoint_fab.py
index 814cad8..47ee469 100644
--- a/infrastructure-provisioning/terraform/bin/deploy/endpoint_fab.py
+++ b/infrastructure-provisioning/terraform/bin/deploy/endpoint_fab.py
@@ -1,12 +1,33 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import argparse
+import logging
+import random
+import string
+import sys
+import time
+import traceback
 from fabric import Connection
 from patchwork.files import exists
-import logging
-import argparse
-import sys
-import traceback
-import time
-import string
-import random
 
 conn = None
 args = None
@@ -248,6 +269,39 @@
         traceback.print_exc()
         sys.exit(1)
 
+def ensure_mongo_endpoint():
+    try:
+        print('[INSTALLING MONGO DATABASE]')
+        if not exists(conn, '/home/{}/.ensure_dir/mongo_ensured'.format(args.os_user)):
+            conn.sudo("bash -c 'wget -qO - https://www.mongodb.org/static/pgp/server-4.2.asc | sudo apt-key add -'")
+            conn.sudo("bash -c 'echo \"deb [ arch=amd64,arm64 ] "
+                      "https://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/4.2 multiverse\" | sudo "
+                      "tee /etc/apt/sources.list.d/mongodb-org-4.2.list'")
+            conn.sudo('apt-get update')
+            conn.sudo('apt-get -y --allow-unauthenticated install mongodb-org')
+            conn.sudo('systemctl enable mongod.service')
+            conn.sudo('sudo apt-get -y install python-pip')
+            conn.sudo('pip install -U pymongo pyyaml --no-cache-dir ')
+            conn.sudo('touch /home/{}/.ensure_dir/mongo_ensured'
+                      .format(args.os_user))
+        print('[CONFIGURING MONGO DATABASE]')
+        if not exists(conn, '/lib/systemd/system/mongod.service'):
+            conn.put('./mongo_files/mongod.service_template', '/tmp/mongod.service_template')
+            conn.sudo('sed -i "s/MONGO_USR/mongodb/g" /tmp/mongod.service_template'.format(args.os_user))
+            conn.sudo('cp -i /tmp/mongod.service_template /lib/systemd/system/mongod.service')
+            conn.sudo('systemctl daemon-reload')
+            conn.sudo('systemctl enable mongod.service')
+        if not exists(conn, '/tmp/configure_mongo.py'):
+            conn.put('./mongo_files/configure_mongo.py', '/tmp/configure_mongo.py')
+            conn.sudo('sed -i "s|PASSWORD|{}|g" /tmp/configure_mongo.py'.format(args.mongo_password))
+        if not exists(conn, '/tmp/mongo_roles.json'):
+            conn.put('./mongo_files/gcp/mongo_roles.json', '/tmp/mongo_roles.json')
+        conn.sudo('python /tmp/configure_mongo.py')
+    except Exception as err:
+        logging.error('Failed to install Mongo: ', str(err))
+        traceback.print_exc()
+        sys.exit(1)
+
 
 def create_key_dir_endpoint():
     try:
@@ -293,8 +347,7 @@
             supervisor_conf = '/etc/supervisor/conf.d/supervisor_svc.conf'
             if not exists(conn, '{}/tmp'.format(args.dlab_path)):
                 conn.run('mkdir -p {}/tmp'.format(args.dlab_path))
-            conn.put('./supervisor_svc.conf',
-                     '{}/tmp/supervisor_svc.conf'.format(args.dlab_path))
+            conn.put('./supervisor_svc.conf', '{}/tmp/supervisor_svc.conf'.format(args.dlab_path))
             dlab_conf_dir = '{}/conf/'.format(args.dlab_path)
             if not exists(conn, dlab_conf_dir):
                 conn.run('mkdir -p {}'.format(dlab_conf_dir))
@@ -309,6 +362,11 @@
                                            'subnet-id'.format(interface)).stdout
                 args.vpc2_id = args.vpc_id
                 args.subnet2_id = args.subnet_id
+                conn.sudo('sed -i "s|CONF_PARAMETER|--spring.config.location={0}billing_app.yml --conf |g" {1}/tmp/supervisor_svc.conf'
+                          .format(dlab_conf_dir, args.dlab_path))
+            elif args.cloud_provider == 'gcp' or args.cloud_provider == 'azure':
+                conn.sudo('sed -i "s|CONF_PARAMETER|--spring.config.location=|g" {}/tmp/supervisor_svc.conf'
+                          .format(args.dlab_path))
             conn.sudo('sed -i "s|OS_USR|{}|g" {}/tmp/supervisor_svc.conf'
                       .format(args.os_user, args.dlab_path))
             conn.sudo('sed -i "s|WEB_CONF|{}|g" {}/tmp/supervisor_svc.conf'
@@ -538,8 +596,15 @@
                      '2.2.jar --no-check-certificate'
                      .format(web_path, args.repository_user,
                              args.repository_pass, args.repository_address))
-            conn.run('mv {0}/*.jar {0}/provisioning-service.jar'
+            conn.run('mv {0}/provisioning-service-2.2.jar {0}/provisioning-service.jar'
                      .format(web_path))
+            conn.run('wget -P {}  --user={} --password={} '
+                     'https://{}/repository/packages/billing-{}-'
+                     '2.2.jar --no-check-certificate'
+                     .format(web_path, args.repository_user,
+                             args.repository_pass, args.repository_address, args.cloud_provider))
+            conn.run('mv {0}/billing-{1}-2.2.jar {0}/billing.jar'
+                     .format(web_path, args.cloud_provider))
             conn.sudo('touch {}'.format(ensure_file))
     except Exception as err:
         logging.error('Failed to download jar-provisioner: ', str(err))
@@ -647,6 +712,259 @@
         print('Failed to configure guacamole: ', str(err))
         return False
 
+def configure_billing_endpoint(endpoint_keystore_password):
+    try:
+        if args.billing_enable:
+            conn.put('./billing_{}.yml'.format(args.cloud_provider), '{}/conf/billing.yml'
+                     .format(args.dlab_path))
+            billing_yml_path = "{}/conf/billing.yml".format(args.dlab_path)
+            if args.cloud_provider == 'aws':
+
+                conn.put('./billing_app_{}.yml'.format(args.cloud_provider), '{}/conf/billing_app.yml'
+                         .format(args.dlab_path))
+                billing_app_yml_path = "{}/conf/billing_app.yml".format(args.dlab_path)
+                billing_app_properties = [
+                    {
+                        'key': "MONGO_HOST",
+                        'value': args.mongo_host
+                    },
+                    {
+                        'key': "MONGO_PASSWORD",
+                        'value': args.mongo_password
+                    },
+                    {
+                        'key': "MONGO_PORT",
+                        'value': args.mongo_port
+                    },
+                    {
+                        'key': "OS_USER",
+                        'value': args.os_user
+                    },
+                    {
+                        'key': "KEY_STORE_PASSWORD",
+                        'value': endpoint_keystore_password
+                    },
+                    {
+                        'key': "KEYCLOAK_CLIENT_ID",
+                        'value': args.keycloak_client_id
+                    },
+                    {
+                        'key': "CLIENT_SECRET",
+                        'value': args.keycloak_client_secret
+                    },
+                    {
+                        'key': "KEYCLOAK_AUTH_SERVER_URL",
+                        'value': args.keycloak_auth_server_url
+                    }
+                ]
+                for param in billing_app_properties:
+                    conn.sudo('sed -i "s|{0}|{1}|g" {2}'
+                              .format(param['key'], param['value'], billing_app_yml_path))
+                if args.aws_job_enabled == 'true':
+                    args.tag_resource_id = 'resourceTags' + ':' + args.tag_resource_id
+                billing_properties = [
+                    {
+                        'key': "MONGO_HOST",
+                        'value': args.mongo_host
+                    },
+                    {
+                        'key': "MONGO_PASSWORD",
+                        'value': args.mongo_password
+                    },
+                    {
+                        'key': "MONGO_PORT",
+                        'value': args.mongo_port
+                    },
+                    {
+                        'key': "BILLING_BUCKET_NAME",
+                        'value': args.billing_bucket
+                    },
+                    {
+                        'key': "REPORT_PATH",
+                        'value': args.report_path
+                    },
+                    {
+                        'key': "AWS_JOB_ENABLED",
+                        'value': args.aws_job_enabled
+                    },
+                    {
+                        'key': "ACCOUNT_ID",
+                        'value': args.billing_aws_account_id
+                    },
+                    {
+                        'key': "ACCESS_KEY_ID",
+                        'value': args.access_key_id
+                    },
+                    {
+                        'key': "SECRET_ACCESS_KEY",
+                        'value': args.secret_access_key
+                    },
+                    {
+                        'key': "CONF_BILLING_TAG",
+                        'value': args.billing_tag
+                    },
+                    {
+                        'key': "SERVICE_BASE_NAME",
+                        'value': args.service_base_name
+                    },
+                    {
+                        'key': "DLAB_ID",
+                        'value': args.billing_dlab_id
+                    },
+                    {
+                        'key': "USAGE_DATE",
+                        'value': args.billing_usage_date
+                    },
+                    {
+                        'key': "PRODUCT",
+                        'value': args.billing_product
+                    },
+                    {
+                        'key': "USAGE_TYPE",
+                        'value': args.billing_usage_type
+                    },
+                    {
+                        'key': "USAGE",
+                        'value': args.billing_usage
+                    },
+                    {
+                        'key': "COST",
+                        'value': args.billing_usage_cost
+                    },
+                    {
+                        'key': "RESOURCE_ID",
+                        'value': args.billing_resource_id
+                    },
+                    {
+                        'key': "TAGS",
+                        'value': args.billing_tags
+                    }
+                ]
+            elif args.cloud_provider == 'gcp':
+                billing_properties = [
+                    {
+                        'key': "SERVICE_BASE_NAME",
+                        'value': args.service_base_name
+                    },
+                    {
+                        'key': "OS_USER",
+                        'value': args.os_user
+                    },
+                    {
+                        'key': "MONGO_PASSWORD",
+                        'value': args.mongo_password
+                    },
+                    {
+                        'key': "MONGO_PORT",
+                        'value': args.mongo_port
+                    },
+                    {
+                        'key': "MONGO_HOST",
+                        'value': args.mongo_host
+                    },
+                    {
+                        'key': "KEY_STORE_PASSWORD",
+                        'value': endpoint_keystore_password
+                    },
+                    {
+                        'key': "DATASET_NAME",
+                        'value': args.billing_dataset_name
+                    },
+                    {
+                        'key': "KEYCLOAK_CLIENT_ID",
+                        'value': args.keycloak_client_id
+                    },
+                    {
+                        'key': "CLIENT_SECRET",
+                        'value': args.keycloak_client_secret
+                    },
+                    {
+                        'key': "KEYCLOAK_AUTH_SERVER_URL",
+                        'value': args.keycloak_auth_server_url
+                    }
+                ]
+            elif args.cloud_provider == 'azure':
+                billing_properties = [
+                    {
+                        'key': "SERVICE_BASE_NAME",
+                        'value': args.service_base_name
+                    },
+                    {
+                        'key': "OS_USER",
+                        'value': args.os_user
+                    },
+                    {
+                        'key': "MONGO_PASSWORD",
+                        'value': args.mongo_password
+                    },
+                    {
+                        'key': "MONGO_PORT",
+                        'value': args.mongo_port
+                    },
+                    {
+                        'key': "MONGO_HOST",
+                        'value': args.mongo_host
+                    },
+                    {
+                        'key': "KEY_STORE_PASSWORD",
+                        'value': endpoint_keystore_password
+                    },
+                    {
+                        'key': "KEYCLOAK_CLIENT_ID",
+                        'value': args.keycloak_client_id
+                    },
+                    {
+                        'key': "KEYCLOAK_CLIENT_SECRET",
+                        'value': args.keycloak_client_secret
+                    },
+                    {
+                        'key': "KEYCLOAK_AUTH_SERVER_URL",
+                        'value': args.keycloak_auth_server_url
+                    },
+                    {
+                        'key': "CLIENT_ID",
+                        'value': args.azure_client_id
+                    },
+                    {
+                        'key': "CLIENT_SECRET",
+                        'value': args.azure_client_secret
+                    },
+                    {
+                        'key': "TENANT_ID",
+                        'value': args.tenant_id
+                    },
+                    {
+                        'key': "SUBSCRIPTION_ID",
+                        'value': args.subscription_id
+                    },
+                    {
+                        'key': "AUTHENTICATION_FILE",
+                        'value': args.auth_file_path
+                    },
+                    {
+                        'key': "OFFER_NUMBER",
+                        'value': args.offer_number
+                    },
+                    {
+                        'key': "CURRENCY",
+                        'value': args.currency
+                    },
+                    {
+                        'key': "LOCALE",
+                        'value': args.locale
+                    },
+                    {
+                        'key': "REGION_INFO",
+                        'value': args.region_info
+                    }
+                ]
+            for param in billing_properties:
+                conn.sudo('sed -i "s|{0}|{1}|g" {2}'
+                          .format(param['key'], param['value'], billing_yml_path))
+    except Exception as err:
+        traceback.print_exc()
+        print('Failed to configure billing: ', str(err))
+        return False
 
 def init_args():
     global args
@@ -659,12 +977,12 @@
     parser.add_argument('--hostname', type=str, default='')
     parser.add_argument('--os_user', type=str, default='dlab-user')
     parser.add_argument('--cloud_provider', type=str, default='')
-    parser.add_argument('--mongo_host', type=str, default='MONGO_HOST')
+    parser.add_argument('--mongo_host', type=str, default='localhost')
     parser.add_argument('--mongo_port', type=str, default='27017')
     parser.add_argument('--ss_host', type=str, default='')
     parser.add_argument('--ss_port', type=str, default='8443')
     parser.add_argument('--ssn_ui_host', type=str, default='')
-    # parser.add_argument('--mongo_password', type=str, default='')
+    parser.add_argument('--mongo_password', type=str, default='')
     parser.add_argument('--repository_address', type=str, default='')
     parser.add_argument('--repository_port', type=str, default='')
     parser.add_argument('--repository_user', type=str, default='')
@@ -699,6 +1017,7 @@
     parser.add_argument('--azure_datalake_tag', type=str, default='')
     parser.add_argument('--azure_datalake_enabled', type=str, default='')
     parser.add_argument('--azure_client_id', type=str, default='')
+    parser.add_argument('--azure_client_secret', type=str, default='')
     parser.add_argument('--gcp_project_id', type=str, default='')
     parser.add_argument('--ldap_host', type=str, default='')
     parser.add_argument('--ldap_dn', type=str, default='')
@@ -713,6 +1032,31 @@
     parser.add_argument('--image_enabled', type=str, default='true')
     parser.add_argument('--auth_file_path', type=str, default='')
 
+    #Billing parameter
+    parser.add_argument('--billing_enable', type=bool, default=False)
+    parser.add_argument('--aws_job_enabled', type=str, default='false')
+    parser.add_argument('--billing_bucket', type=str, default='')
+    parser.add_argument('--report_path', type=str, default='')
+    parser.add_argument('--billing_aws_account_id', type=str, default='')
+    parser.add_argument('--access_key_id', type=str, default='')
+    parser.add_argument('--secret_access_key', type=str, default='')
+    parser.add_argument('--billing_tag', type=str, default='dlab')
+    parser.add_argument('--billing_dlab_id', type=str, default='resource_tags_user_user_tag')
+    parser.add_argument('--billing_usage_date', type=str, default='line_item_usage_start_date')
+    parser.add_argument('--billing_product', type=str, default='product_product_name')
+    parser.add_argument('--billing_usage_type', type=str, default='line_item_usage_type')
+    parser.add_argument('--billing_usage', type=str, default='line_item_usage_amount')
+    parser.add_argument('--billing_usage_cost', type=str, default='line_item_blended_cost')
+    parser.add_argument('--billing_resource_id', type=str, default='line_item_resource_id')
+    parser.add_argument('--billing_tags', type=str, default='line_item_operation,line_item_line_item_description')
+    parser.add_argument('--tenant_id', type=str, default='')
+    parser.add_argument('--subscription_id', type=str, default='')
+    parser.add_argument('--offer_number', type=str, default='')
+    parser.add_argument('--currency', type=str, default='')
+    parser.add_argument('--locale', type=str, default='')
+    parser.add_argument('--region_info', type=str, default='')
+    parser.add_argument('--billing_dataset_name', type=str, default='')
+
     # TEMPORARY
     parser.add_argument('--ssn_k8s_nlb_dns_name', type=str, default='')
     parser.add_argument('--ssn_k8s_alb_dns_name', type=str, default='')
@@ -797,6 +1141,9 @@
     logging.info("Installing Docker")
     ensure_docker_endpoint()
 
+    logging.info("Installing Mongo Database")
+    ensure_mongo_endpoint()
+
     logging.info("Configuring Supervisor")
     configure_supervisor_endpoint(endpoint_keystore_password)
 
@@ -818,6 +1165,9 @@
     logging.info("Configuring guacamole")
     configure_guacamole()
 
+    logging.info("Configuring billing")
+    configure_billing_endpoint(endpoint_keystore_password)
+
     logging.info("Starting supervisor")
     start_supervisor_endpoint()
 
diff --git a/infrastructure-provisioning/terraform/bin/deploy/mongo_files/configure_mongo.py b/infrastructure-provisioning/terraform/bin/deploy/mongo_files/configure_mongo.py
new file mode 100644
index 0000000..14b89b4
--- /dev/null
+++ b/infrastructure-provisioning/terraform/bin/deploy/mongo_files/configure_mongo.py
@@ -0,0 +1,95 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+from pymongo import MongoClient
+import yaml
+import subprocess
+import time
+
+path = "/etc/mongod.conf"
+outfile = "/etc/mongo_params.yml"
+
+def add_2_yml_config(path, section, param, value):
+    try:
+        try:
+            with open(path, 'r') as config_yml_r:
+                config_orig = yaml.load(config_yml_r)
+        except:
+            config_orig = {}
+        sections = []
+        for i in config_orig:
+            sections.append(i)
+        if section in sections:
+            config_orig[section].update({param:value})
+        else:
+            config_orig.update({section:{param:value}})
+        with open(path, 'w') as outfile_yml_w:
+            yaml.dump(config_orig, outfile_yml_w, default_flow_style=False)
+        return True
+    except:
+        print("Could not write the target file")
+        return False
+
+
+def read_yml_conf(path, section, param):
+    try:
+        with open(path, 'r') as config_yml:
+            config = yaml.load(config_yml)
+        result = config[section][param]
+        return result
+    except:
+        print("File does not exist")
+        return ''
+
+
+if __name__ == "__main__":
+    mongo_passwd = "PASSWORD"
+    mongo_ip = read_yml_conf(path,'net','bindIp')
+    mongo_port = read_yml_conf(path,'net','port')
+    #mongo_parameters = json.loads(args.mongo_parameters)
+    # Setting up admin's password and enabling security
+    client = MongoClient(mongo_ip + ':' + str(mongo_port))
+    pass_upd = True
+    try:
+        command = ['service', 'mongod', 'start']
+        subprocess.call(command, shell=False)
+        time.sleep(5)
+        client.dlabdb.add_user('admin', mongo_passwd, roles=[{'role':'userAdminAnyDatabase','db':'admin'}])
+        client.dlabdb.command('grantRolesToUser', "admin", roles=["readWrite"])
+        # set_mongo_parameters(client, mongo_parameters)
+
+        # client.dlabdb.security.create_index("expireAt", expireAfterSeconds=7200)
+        if add_2_yml_config(path,'security','authorization','enabled'):
+            command = ['service', 'mongod', 'restart']
+            subprocess.call(command, shell=False)
+    except:
+        print("Looks like MongoDB have already been secured")
+        pass_upd = False
+
+    # Generating output config
+    add_2_yml_config(outfile, 'network', 'ip', mongo_ip)
+    add_2_yml_config(outfile, 'network', 'port', mongo_port)
+    add_2_yml_config(outfile, 'account', 'user', 'admin')
+    if pass_upd:
+        add_2_yml_config(outfile, 'account', 'pass', mongo_passwd)
+
diff --git a/infrastructure-provisioning/terraform/bin/deploy/mongo_files/gcp/mongo_roles.json b/infrastructure-provisioning/terraform/bin/deploy/mongo_files/gcp/mongo_roles.json
new file mode 100644
index 0000000..43d12e3
--- /dev/null
+++ b/infrastructure-provisioning/terraform/bin/deploy/mongo_files/gcp/mongo_roles.json
@@ -0,0 +1,268 @@
+[
+  {
+    "_id": "nbShapes_n1-highcpu-2_fetching",
+    "description": "Use n1-highcpu-2 instance shape for notebook",
+    "exploratory_shapes": [
+      "n1-highcpu-2"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbShapes_n1-highcpu-8_fetching",
+    "description": "Use n1-highcpu-8 instance shape for notebook",
+    "exploratory_shapes": [
+      "n1-highcpu-8"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbShapes_n1-highcpu-32_fetching",
+    "description": "Use n1-highcpu-32 instance shape for notebook",
+    "exploratory_shapes": [
+      "n1-highcpu-32"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbShapes_n1-highmem-4_fetching",
+    "description": "Use n1-highmem-4 instance shape for notebook",
+    "exploratory_shapes": [
+      "n1-highmem-4"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbShapes_n1-highmem-16_fetching",
+    "description": "Use n1-highmem-16 instance shape for notebook",
+    "exploratory_shapes": [
+      "n1-highmem-16"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbShapes_n1-highmem-32_fetching",
+    "description": "Use n1-highmem-32 instance shape for notebook",
+    "exploratory_shapes": [
+      "n1-highmem-32"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbShapes_n1-standard-2_fetching",
+    "description": "Use n1-standard-2 instance shape for notebook",
+    "exploratory_shapes": [
+      "n1-standard-2"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateDeeplearning",
+    "description": "Create Notebook Deep Learning",
+    "exploratories": [
+      "docker.dlab-deeplearning"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateJupyter",
+    "description": "Create Notebook Jupyter",
+    "exploratories": [
+      "docker.dlab-jupyter"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateJupyterLab",
+    "description": "Create Notebook JupyterLab",
+    "exploratories": [
+      "docker.dlab-jupyterlab"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateSuperset",
+    "description": "Create Notebook Superset",
+    "exploratories": [
+      "docker.dlab-superset"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateRstudio",
+    "description": "Create Notebook RStudio",
+    "exploratories": [
+      "docker.dlab-rstudio"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateTensor",
+    "description": "Create Notebook Jupyter with TensorFlow",
+    "exploratories": [
+      "docker.dlab-tensor"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateTensorRstudio",
+    "description": "Create Notebook RStudio with TensorFlow",
+    "exploratories": [
+      "docker.dlab-tensor-rstudio"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateZeppelin",
+    "description": "Create Notebook Apache Zeppelin",
+    "exploratories": [
+      "docker.dlab-zeppelin"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateDataEngine",
+    "description": "Create Data Engine",
+    "computationals": [
+      "docker.dlab-dataengine"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbCreateDataEngineService",
+    "description": "Create Data Engine Service",
+    "computationals": [
+      "docker.dlab-dataengine-service"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "compShapes_n1-standard-2_fetching",
+    "description": "Use n1-standard-2 instance shape for cluster",
+    "computational_shapes": [
+      "n1-standard-2"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "compShapes_n1-highmem-4_fetching",
+    "description": "Use n1-highmem-4 instance shape for cluster",
+    "computational_shapes": [
+      "n1-highmem-4"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "compShapes_n1-highmem-16_fetching",
+    "description": "Use n1-highmem-16 instance shape for cluster",
+    "computational_shapes": [
+      "n1-highmem-16"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "compShapes_n1-highmem-32_fetching",
+    "description": "Use n1-highmem-32 instance shape for cluster",
+    "computational_shapes": [
+      "n1-highmem-32"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "compShapes_n1-highcpu-8_fetching",
+    "description": "Use n1-highcpu-8 instance shape for cluster",
+    "computational_shapes": [
+      "n1-highcpu-8"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "compShapes_n1-highcpu-2_fetching",
+    "description": "Use n1-highcpu-2 instance shape for cluster",
+    "computational_shapes": [
+      "n1-highcpu-2"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "compShapes_n1-highcpu-32_fetching",
+    "description": "Use n1-highcpu-32 instance shape for cluster",
+    "computational_shapes": [
+      "n1-highcpu-32"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "nbBillingReportFull",
+    "description": "View full billing report for all users",
+    "pages": [
+      "/api/infrastructure_provision/billing"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
+    "_id": "admin",
+    "description": "Allow to execute administration operation",
+    "pages": [
+      "environment/*",
+      "/api/infrastructure/backup",
+      "/roleManagement",
+      "/api/settings",
+      "/user/settings",
+      "/api/project",
+      "/api/endpoint"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  }
+]
diff --git a/infrastructure-provisioning/terraform/bin/deploy/mongo_files/mongod.service_template b/infrastructure-provisioning/terraform/bin/deploy/mongo_files/mongod.service_template
new file mode 100644
index 0000000..cddbf66
--- /dev/null
+++ b/infrastructure-provisioning/terraform/bin/deploy/mongo_files/mongod.service_template
@@ -0,0 +1,34 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+[Unit]
+Description=High-performance, schema-free document-oriented database
+After=network.target
+Documentation=https://docs.mongodb.org/manual
+
+[Service]
+Type=forking
+User=MONGO_USR
+Group=MONGO_USR
+ExecStart=/usr/bin/mongod --quiet --config /etc/mongod.conf
+
+[Install]
+WantedBy=multi-user.target
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/bin/deploy/provisioning.yml b/infrastructure-provisioning/terraform/bin/deploy/provisioning.yml
index 9d65fa2..abbbadf 100644
--- a/infrastructure-provisioning/terraform/bin/deploy/provisioning.yml
+++ b/infrastructure-provisioning/terraform/bin/deploy/provisioning.yml
@@ -67,6 +67,11 @@
     timeout: 3s
     connectionTimeout: 3s
 
+billingService:
+  jerseyClient:
+    timeout: 4m
+    connectionTimeout: 3s
+
 # Log out user on inactivity
 inactiveUserTimeoutMillSec: 7200000
 
@@ -76,7 +81,7 @@
 responseDirectory: /opt/dlab/tmp
 handlerDirectory: /opt/dlab/handlers
 dockerLogDirectory: ${LOG_ROOT_DIR}
-warmupPollTimeout: 25s
+warmupPollTimeout: 2m
 resourceStatusPollTimeout: 300m
 keyLoaderPollTimeout: 30m
 requestEnvStatusTimeout: 50s
diff --git a/infrastructure-provisioning/terraform/bin/deploy/renew_certificates.sh b/infrastructure-provisioning/terraform/bin/deploy/renew_certificates.sh
index 90f0e5f..ff3e46d 100644
--- a/infrastructure-provisioning/terraform/bin/deploy/renew_certificates.sh
+++ b/infrastructure-provisioning/terraform/bin/deploy/renew_certificates.sh
@@ -40,4 +40,4 @@
 keytool -importcert -trustcacerts -alias step-ca -file /etc/ssl/certs/root_ca.crt -noprompt -storepass changeit -keystore JAVA_HOME/lib/security/cacerts
 
 # Restarting service
-supervisorctl restart provserv
\ No newline at end of file
+supervisorctl restart all
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/bin/deploy/supervisor_svc.conf b/infrastructure-provisioning/terraform/bin/deploy/supervisor_svc.conf
index ae7b87f..b170043 100644
--- a/infrastructure-provisioning/terraform/bin/deploy/supervisor_svc.conf
+++ b/infrastructure-provisioning/terraform/bin/deploy/supervisor_svc.conf
@@ -32,4 +32,14 @@
 user=root
 stdout_logfile=/var/log/application/provision-service.log
 redirect_stderr=true
+environment=DLAB_CONF_DIR="WEB_CONF"
+
+[program:billing]
+command=java -Xmx1024M -jar -Duser.timezone=UTC -Dfile.encoding=UTF-8 WEB_APP_DIR/billing.jar CONF_PARAMETERWEB_CONFbilling.yml
+directory=WEB_APP_DIR
+autorestart=true
+priority=20
+user=root
+stdout_logfile=/var/log/application/billing.log
+redirect_stderr=true
 environment=DLAB_CONF_DIR="WEB_CONF"
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/bin/dlab.py b/infrastructure-provisioning/terraform/bin/dlab.py
index bd24425..68b8739 100644
--- a/infrastructure-provisioning/terraform/bin/dlab.py
+++ b/infrastructure-provisioning/terraform/bin/dlab.py
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 import argparse
 import itertools
 import json
@@ -9,10 +30,9 @@
 import sys
 import time
 from abc import abstractmethod
-
+from deploy.endpoint_fab import start_deploy
 from fabric import Connection
 from patchwork.transfers import rsync
-from deploy.endpoint_fab import start_deploy
 
 sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
 logging.basicConfig(level=logging.INFO, format='%(levelname)s-%(message)s')
@@ -985,6 +1005,15 @@
                   group='endpoint')
          .add_str('--ldap_users_group', 'ldap users group', required=True,
                   group='endpoint')
+         .add_bool('--billing_enable', 'Billing enable', group='endpoint', default=False)
+         .add_str('--mongo_password', 'Mongo database password', group='endpoint')
+         .add_str('--mongo_host', 'Mongo database host', group='endpoint', default='localhost')
+         .add_str('--billing_bucket', 'Billing bucket name', group='endpoint', default='')
+         .add_str('--report_path', 'The path to report folder', group='endpoint', default='')
+         .add_str('--aws_job_enabled', 'Billing format. Available options: true (aws), false(epam)', group='endpoint',
+                  default='false')
+         .add_str('--billing_aws_account_id', 'The ID of ASW linked account', group='endpoint', default='')
+         .add_str('--billing_tag', 'Billing tag', group='endpoint', default='dlab')
          )
         return params.build()
 
@@ -1167,6 +1196,10 @@
          .add_str('--endpoint_policies', 'Endpoint policies list', group='endpoint')
          .add_str('--endpoint_roles', 'Endpoint roles list', group='endpoint')
          .add_str('--bucket_region', 'Bucket region', group='endpoint')
+         .add_bool('--billing_enable', 'Billing enable', group='endpoint', default=False)
+         .add_str('--billing_dataset_name', 'Billing dataset name', group='endpoint')
+         .add_str('--mongo_password', 'Mongo database password', group='endpoint')
+         .add_str('--mongo_host', 'Mongo database host', group='endpoint', default='localhost')
          )
         return params.build()
 
@@ -1233,6 +1266,15 @@
          .add_str('--endpoint_shape', 'Instance shape of Endpoint.', default='Standard_DS2_v2', group='endpoint')
          .add_str('--endpoint_volume_size', 'Endpoint disk size', default='30', group='endpoint')
          .add_str('--additional_tag', 'Additional tag.', default='product:dlab', group='endpoint')
+         .add_str('--tenant_id', 'Azure tenant ID', group='endpoint', default='')
+         .add_str('--subscription_id', 'Azure subscription ID', group='endpoint', default='')
+         .add_str('--offer_number', 'Azure offer number', group='endpoint', default='')
+         .add_str('--currency', 'Azure currency for billing', group='endpoint', default='')
+         .add_str('--locale', 'Azure locale', group='endpoint', default='')
+         .add_str('--region_info', 'Azure region info', group='endpoint', default='')
+         .add_str('--mongo_password', 'Mongo database password', group='endpoint')
+         .add_str('--mongo_host', 'Mongo database host', group='endpoint', default='localhost')
+         .add_bool('--billing_enable', 'Billing enable', group='endpoint', default=False)
          )
         return params.build()
 
diff --git a/infrastructure-provisioning/terraform/bin/terraform-cli.py b/infrastructure-provisioning/terraform/bin/terraform-cli.py
index 8e1bd60..f8d593b 100755
--- a/infrastructure-provisioning/terraform/bin/terraform-cli.py
+++ b/infrastructure-provisioning/terraform/bin/terraform-cli.py
@@ -1,17 +1,38 @@
 #!/usr/bin/env python
-import itertools
-import json
-import os
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 import abc
 import argparse
-
-import time
-from fabric import Connection
-from patchwork.transfers import rsync
+import itertools
+import json
 import logging
+import os
 import os.path
 import sys
+import time
 from deploy.endpoint_fab import start_deploy
+from fabric import Connection
+from patchwork.transfers import rsync
 
 sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
 logging.basicConfig(level=logging.INFO,
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf b/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf
index cd965ef..65becf8 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/main/network.tf
@@ -53,7 +53,7 @@
   network = data.google_compute_network.endpoint_vpc_data.name
   allow {
     protocol = "tcp"
-    ports    = ["22", "8084", "8085", "4822"]
+    ports    = ["22", "8084", "8085", "4822", "8088"]
   }
   target_tags   = ["${var.service_base_name}-${var.endpoint_id}-endpoint"]
   source_ranges = [var.firewall_ing_cidr_range]
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf b/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf
index 170016c..b1d89ba 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/main/variables.tf
@@ -153,3 +153,11 @@
 variable "ldap_bind_creds" {}
 
 variable "ldap_users_group" {}
+
+variable "billing_enable" {}
+
+variable "billing_dataset_name" {}
+
+variable "mongo_password" {}
+
+variable "mongo_host" {}
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py
index 33e08f7..ac36747 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.py
@@ -1,10 +1,31 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import argparse
+import logging
+import sys
+import time
+import traceback
 from fabric import Connection
 from patchwork.files import exists
-import logging
-import argparse
-import sys
-import traceback
-import time
 
 conn = None
 args = None
diff --git a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml
index fd5fc9b..6edb057 100644
--- a/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml
+++ b/infrastructure-provisioning/terraform/gcp/endpoint/provisioning.yml
@@ -76,7 +76,7 @@
 responseDirectory: /opt/dlab/tmp
 handlerDirectory: /opt/dlab/handlers
 dockerLogDirectory: ${LOG_ROOT_DIR}
-warmupPollTimeout: 25s
+warmupPollTimeout: 2m
 resourceStatusPollTimeout: 300m
 keyLoaderPollTimeout: 30m
 requestEnvStatusTimeout: 50s
diff --git a/infrastructure-provisioning/terraform/gcp/main/main.tf b/infrastructure-provisioning/terraform/gcp/main/main.tf
index 3cbca78..4393f59 100644
--- a/infrastructure-provisioning/terraform/gcp/main/main.tf
+++ b/infrastructure-provisioning/terraform/gcp/main/main.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 provider "google" {
   credentials = "${var.credentials}"
   project     = "${var.project_name}"
diff --git a/infrastructure-provisioning/terraform/gcp/main/variables.tf b/infrastructure-provisioning/terraform/gcp/main/variables.tf
index 2b328ee..3c2feb0 100644
--- a/infrastructure-provisioning/terraform/gcp/main/variables.tf
+++ b/infrastructure-provisioning/terraform/gcp/main/variables.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 variable "credentials" {}
 
 variable "project_name" {}
diff --git a/infrastructure-provisioning/terraform/gcp/modules/common/iam.tf b/infrastructure-provisioning/terraform/gcp/modules/common/iam.tf
index 6931f60..bd65eb9 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/common/iam.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/common/iam.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 locals {
   service_name = "${var.project_tag}-ps-sa"
   role_name    = "${var.project_tag}-ps-role"
diff --git a/infrastructure-provisioning/terraform/gcp/modules/common/network.tf b/infrastructure-provisioning/terraform/gcp/modules/common/network.tf
index 8d34249..cf3d294 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/common/network.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/common/network.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 resource "google_compute_subnetwork" "subnet" {
   name          = "${var.project_tag}-subnet"
   ip_cidr_range = "${var.cidr_range}"
diff --git a/infrastructure-provisioning/terraform/gcp/modules/common/variables.tf b/infrastructure-provisioning/terraform/gcp/modules/common/variables.tf
index b4180ad..448d373 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/common/variables.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/common/variables.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 variable "project_tag" {}
 
 variable "endpoint_tag" {}
diff --git a/infrastructure-provisioning/terraform/gcp/modules/data_engine/instance.tf b/infrastructure-provisioning/terraform/gcp/modules/data_engine/instance.tf
index fdb6250..a185a57 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/data_engine/instance.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/data_engine/instance.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 locals {
   notebook_name = "${var.project_tag}-nb-${var.notebook_name}"
   cluster_name  = "${var.project_tag}-de-${var.notebook_name}-${var.cluster_name}"
diff --git a/infrastructure-provisioning/terraform/gcp/modules/data_engine/variables.tf b/infrastructure-provisioning/terraform/gcp/modules/data_engine/variables.tf
index 12c4f92..e950ed1 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/data_engine/variables.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/data_engine/variables.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 variable "project_tag" {}
 
 variable "endpoint_tag" {}
diff --git a/infrastructure-provisioning/terraform/gcp/modules/dataproc/instance.tf b/infrastructure-provisioning/terraform/gcp/modules/dataproc/instance.tf
index 415479c..1419c56 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/dataproc/instance.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/dataproc/instance.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 locals {
   dataproc_name = "${var.project_tag}-des-${var.notebook_name}-${var.cluster_name}"
 }
diff --git a/infrastructure-provisioning/terraform/gcp/modules/dataproc/variables.tf b/infrastructure-provisioning/terraform/gcp/modules/dataproc/variables.tf
index d3dcf15..bac08a2 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/dataproc/variables.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/dataproc/variables.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 variable "region" {}
 
 variable "project_tag" {}
diff --git a/infrastructure-provisioning/terraform/gcp/modules/notebook/instance.tf b/infrastructure-provisioning/terraform/gcp/modules/notebook/instance.tf
index 0943e51..e89f69b 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/notebook/instance.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/notebook/instance.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 locals {
   name = "${var.project_tag}-nb-${var.notebook_name}"
 }
diff --git a/infrastructure-provisioning/terraform/gcp/modules/notebook/variables.tf b/infrastructure-provisioning/terraform/gcp/modules/notebook/variables.tf
index 6217480..bf51d34 100644
--- a/infrastructure-provisioning/terraform/gcp/modules/notebook/variables.tf
+++ b/infrastructure-provisioning/terraform/gcp/modules/notebook/variables.tf
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 variable "project_tag" {}
 
 variable "endpoint_tag" {}
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/files/get_configmap_values.sh b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/files/get_configmap_values.sh
index 4f27a1b..3085eb7 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/files/get_configmap_values.sh
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/files/get_configmap_values.sh
@@ -1,5 +1,26 @@
 #!/bin/bash
 
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 creds_file_path=$1
 gke_name=$2
 region=$3
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/.helmignore b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/.helmignore
index fbe01f8..2f795d4 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/.helmignore
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/.helmignore
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # Patterns to ignore when building packages.
 # This supports shell glob matching, relative path matching, and
 # negation (prefixed with !). Only one pattern per line.
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/Chart.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/Chart.yaml
index 4b334aa..e9d93e2 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/Chart.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/Chart.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: v1
 appVersion: 0.13.2
 description: An online certificate authority and related tools for secure automated
@@ -6,7 +27,7 @@
 home: https://smallstep.com
 icon: https://raw.githubusercontent.com/smallstep/certificates/master/icon.png
 keywords:
-- acme
+  - acme
 - authority
 - ca
 - certificate
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/_helpers.tpl b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/_helpers.tpl
index e240bac..b65f748 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/_helpers.tpl
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/_helpers.tpl
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 {{/* vim: set filetype=mustache: */}}
 {{/*
 Expand the name of the chart.
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/bootstrap.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/bootstrap.yaml
index 4fa3240..354c144 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/bootstrap.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/bootstrap.yaml
@@ -1,11 +1,32 @@
-{{- if .Release.IsInstall -}}
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+  {{- if .Release.IsInstall -}}
 apiVersion: v1
 kind: ServiceAccount
 metadata:
   name: {{ include "step-certificates.fullname" . }}-config
   namespace: {{.Release.Namespace}}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
 ---
 apiVersion: batch/v1
 kind: Job
@@ -24,16 +45,16 @@
       serviceAccountName: {{ include "step-certificates.fullname" . }}-config
       restartPolicy: Never
       volumes:
-      - name: bootstrap
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-bootstrap
+        - name: bootstrap
+          configMap:
+            name: {{ include "step-certificates.fullname" . }}-bootstrap
       containers:
-      - name: config
-        image: "{{ .Values.bootstrapImage.repository }}:{{ .Values.bootstrapImage.tag }}"
-        imagePullPolicy: {{ .Values.bootstrapImage.pullPolicy }}
-        command: ["/bin/sh", "/home/step/bootstrap/bootstrap.sh"]
-        volumeMounts:
-          - name: bootstrap
-            mountPath: /home/step/bootstrap
-            readOnly: true
+        - name: config
+          image: "{{ .Values.bootstrapImage.repository }}:{{ .Values.bootstrapImage.tag }}"
+          imagePullPolicy: {{ .Values.bootstrapImage.pullPolicy }}
+          command: ["/bin/sh", "/home/step/bootstrap/bootstrap.sh"]
+          volumeMounts:
+            - name: bootstrap
+              mountPath: /home/step/bootstrap
+              readOnly: true
 {{- end -}}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ca.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ca.yaml
index 1113f2b..2551cc5 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ca.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ca.yaml
@@ -1,9 +1,30 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: apps/v1
 kind: StatefulSet
 metadata:
   name: {{ include "step-certificates.fullname" . }}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
 spec:
   # Only one replica is supported at this moment
   # Requested {{ .Values.replicaCount }}
@@ -21,99 +42,99 @@
         app: {{ include "step-certificates.fullname" . }}
     spec:
 {{- if .Release.IsInstall }}
-      initContainers:
-        - name: {{ .Chart.Name }}-init
-          image: busybox:latest
-          imagePullPolicy: {{ .Values.image.pullPolicy }}
-          command: ["sleep", "20"]
+initContainers:
+  - name: {{ .Chart.Name }}-init
+    image: busybox:latest
+    imagePullPolicy: {{ .Values.image.pullPolicy }}
+    command: ["sleep", "20"]
 {{- end }}
-      securityContext:
-        {{- if .Values.ca.runAsRoot }}
-        runAsUser: 0
-        {{- else }}
-        runAsUser: 1000
-        runAsNonRoot: true
-        runAsGroup: 1000
-        fsGroup: 1000
-        allowPrivilegeEscalation: false
-        {{- end }}
-      containers:
-        - name: {{ .Chart.Name }}
-          image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
-          imagePullPolicy: {{ .Values.image.pullPolicy }}
-          command: ["/usr/local/bin/step-ca",
-            "--password-file", "/home/step/secrets/passwords/password",
-            "/home/step/config/ca.json"]
-          env:
-          - name: NAMESPACE
-            value: "{{ .Release.Namespace }}"
-          ports:
-            - name: https
-              containerPort: {{ .Values.service.targetPort }}
-              protocol: TCP
-          livenessProbe:
-            initialDelaySeconds: 5
-            httpGet:
-              path: /health
-              port: {{ .Values.service.targetPort }}
-              scheme: HTTPS
-          readinessProbe:
-            initialDelaySeconds: 5
-            httpGet:
-              path: /health
-              port: {{ .Values.service.targetPort }}
-              scheme: HTTPS
-          resources:
-            {{- toYaml .Values.resources | nindent 12 }}
-          volumeMounts:
-          - name: certs
-            mountPath: /home/step/certs
-            readOnly: true
-          - name: config
-            mountPath: /home/step/config
-            readOnly: true
-          - name: secrets
-            mountPath: /home/step/secrets
-            readOnly: true
-          - name: ca-password
-            mountPath: /home/step/secrets/passwords
-            readOnly: true
-          {{- if .Values.ca.db.enabled }}
-          - name: database
-            mountPath: /home/step/db
-            readOnly: false
-          {{- end }}
-      volumes:
+securityContext:
+  {{- if .Values.ca.runAsRoot }}
+  runAsUser: 0
+  {{- else }}
+  runAsUser: 1000
+  runAsNonRoot: true
+  runAsGroup: 1000
+  fsGroup: 1000
+  allowPrivilegeEscalation: false
+  {{- end }}
+containers:
+  - name: {{ .Chart.Name }}
+    image: "{{ .Values.image.repository }}:{{ .Values.image.tag }}"
+    imagePullPolicy: {{ .Values.image.pullPolicy }}
+    command: ["/usr/local/bin/step-ca",
+              "--password-file", "/home/step/secrets/passwords/password",
+              "/home/step/config/ca.json"]
+    env:
+      - name: NAMESPACE
+        value: "{{ .Release.Namespace }}"
+    ports:
+      - name: https
+        containerPort: {{ .Values.service.targetPort }}
+        protocol: TCP
+    livenessProbe:
+      initialDelaySeconds: 5
+      httpGet:
+        path: /health
+        port: {{ .Values.service.targetPort }}
+        scheme: HTTPS
+    readinessProbe:
+      initialDelaySeconds: 5
+      httpGet:
+        path: /health
+        port: {{ .Values.service.targetPort }}
+        scheme: HTTPS
+    resources:
+      {{- toYaml .Values.resources | nindent 12 }}
+    volumeMounts:
       - name: certs
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-certs
+        mountPath: /home/step/certs
+        readOnly: true
       - name: config
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-config
+        mountPath: /home/step/config
+        readOnly: true
       - name: secrets
-        configMap:
-          name: {{ include "step-certificates.fullname" . }}-secrets
+        mountPath: /home/step/secrets
+        readOnly: true
       - name: ca-password
-        secret:
-          secretName: {{ include "step-certificates.fullname" . }}-ca-password
-      {{- if and .Values.ca.db.enabled (not .Values.ca.db.persistent) }}
-      - name: database
-        emptyDir: {}
-      {{- end }}
-      {{- with .Values.nodeSelector }}
-      nodeSelector:
-      {{- toYaml . | nindent 8 }}
-      {{- end }}
-      {{- with .Values.affinity }}
-      affinity:
-      {{- toYaml . | nindent 8 }}
-      {{- end }}
-      {{- with .Values.tolerations }}
-      tolerations:
-      {{- toYaml . | nindent 8 }}
-      {{- end }}
+        mountPath: /home/step/secrets/passwords
+        readOnly: true
+    {{- if .Values.ca.db.enabled }}
+    - name: database
+      mountPath: /home/step/db
+      readOnly: false
+    {{- end }}
+volumes:
+  - name: certs
+    configMap:
+      name: {{ include "step-certificates.fullname" . }}-certs
+  - name: config
+configMap:
+  name: {{ include "step-certificates.fullname" . }}-config
+  - name: secrets
+configMap:
+  name: {{ include "step-certificates.fullname" . }}-secrets
+  - name: ca-password
+secret:
+  secretName: {{ include "step-certificates.fullname" . }}-ca-password
+  {{- if and .Values.ca.db.enabled (not .Values.ca.db.persistent) }}
+  - name: database
+emptyDir: {}
+  {{- end }}
+  {{- with .Values.nodeSelector }}
+nodeSelector:
+  {{- toYaml . | nindent 8 }}
+  {{- end }}
+  {{- with .Values.affinity }}
+affinity:
+  {{- toYaml . | nindent 8 }}
+  {{- end }}
+  {{- with .Values.tolerations }}
+tolerations:
+  {{- toYaml . | nindent 8 }}
+  {{- end }}
 {{- if and .Values.ca.db.enabled .Values.ca.db.persistent }}
-  volumeClaimTemplates:
+volumeClaimTemplates:
   - metadata:
       name: database
       labels:
@@ -123,16 +144,16 @@
     spec:
       accessModes:
       {{- range .Values.ca.db.accessModes }}
-        - {{ . | quote }}
+      - {{ . | quote }}
       {{- end }}
       resources:
         requests:
           storage: {{ .Values.ca.db.size | quote }}
     {{- if .Values.ca.db.storageClass }}
     {{- if (eq "-" .Values.ca.db.storageClass) }}
-      storageClassName: ""
+    storageClassName: ""
     {{- else }}
-      storageClassName: {{ .Values.ca.db.storageClass | quote }}
+    storageClassName: {{ .Values.ca.db.storageClass | quote }}
     {{- end }}
     {{- end }}
 {{- end }}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/configmaps.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/configmaps.yaml
index 28ad488..1670d9a 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/configmaps.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/configmaps.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # ConfigMaps that will be updated by the configuration job:
 # 1. Step CA config directory.
 # 2. Step CA certs direcotry.
@@ -71,76 +92,76 @@
       permission_error "create secrets"
     fi
 {{ if .Values.autocert.enabled }}
-    echo -n "Checking for permission to create mutatingwebhookconfiguration in {{.Release.Namespace}} namespace: "
+echo -n "Checking for permission to create mutatingwebhookconfiguration in {{.Release.Namespace}} namespace: "
     kubectl auth can-i create mutatingwebhookconfiguration --namespace {{.Release.Namespace}}
     if [ $? -ne 0 ]; then
       permission_error "create mutatingwebhookconfiguration"
-    fi
+  fi
 {{- end }}
 
-    # Setting this here on purpose, after the above section which explicitly checks
-    # for and handles exit errors.
-    set -e
+# Setting this here on purpose, after the above section which explicitly checks
+# for and handles exit errors.
+  set -e
 
-    echo -e "\n\e[1mInitializating the CA...\e[0m"
+  echo -e "\n\e[1mInitializating the CA...\e[0m"
 
-    # CA password
+# CA password
 {{- if .Values.ca.password }}
-    CA_PASSWORD={{ quote .Values.ca.password }}
+  CA_PASSWORD={{ quote .Values.ca.password }}
 {{- else }}
-    CA_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
+  CA_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
 {{- end }}
-    # Provisioner password
+# Provisioner password
 {{- if .Values.ca.provisioner.password }}
-    CA_PROVISIONER_PASSWORD={{ quote .Values.ca.provisioner.password }}
+  CA_PROVISIONER_PASSWORD={{ quote .Values.ca.provisioner.password }}
 {{- else }}
-    CA_PROVISIONER_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
+  CA_PROVISIONER_PASSWORD=$(head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo '')
 {{- end }}
 
-    TMP_CA_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
-    TMP_CA_PROVISIONER_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
+  TMP_CA_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
+  TMP_CA_PROVISIONER_PASSWORD=$(mktemp /tmp/autocert.XXXXXX)
 
-    echo $CA_PASSWORD > $TMP_CA_PASSWORD
-    echo $CA_PROVISIONER_PASSWORD > $TMP_CA_PROVISIONER_PASSWORD
+  echo $CA_PASSWORD > $TMP_CA_PASSWORD
+  echo $CA_PROVISIONER_PASSWORD > $TMP_CA_PROVISIONER_PASSWORD
 
-    step ca init \
-      --name "{{.Values.ca.name}}" \
-      --dns "{{include "step-certificates.dns" .}}" \
-      --address "{{.Values.ca.address}}" \
-      --provisioner "{{.Values.ca.provisioner.name}}" \
-      --with-ca-url "{{include "step-certificates.url" .}}" \
-      --password-file "$TMP_CA_PASSWORD" \
-      --provisioner-password-file "$TMP_CA_PROVISIONER_PASSWORD" {{ if not .Values.ca.db.enabled }}--no-db{{ end }}
+  step ca init \
+  --name "{{.Values.ca.name}}" \
+  --dns "{{include "step-certificates.dns" .}}" \
+  --address "{{.Values.ca.address}}" \
+  --provisioner "{{.Values.ca.provisioner.name}}" \
+  --with-ca-url "{{include "step-certificates.url" .}}" \
+  --password-file "$TMP_CA_PASSWORD" \
+  --provisioner-password-file "$TMP_CA_PROVISIONER_PASSWORD" {{ if not .Values.ca.db.enabled }}--no-db{{ end }}
 
-    rm -f $TMP_CA_PASSWORD $TMP_CA_PROVISIONER_PASSWORD
+  rm -f $TMP_CA_PASSWORD $TMP_CA_PROVISIONER_PASSWORD
 
-    echo -e "\n\e[1mCreating configmaps and secrets in {{.Release.Namespace}} namespace ...\e[0m"
+  echo -e "\n\e[1mCreating configmaps and secrets in {{.Release.Namespace}} namespace ...\e[0m"
 
-    # Replace secrets created on helm install
-    # It allows to properly remove them on helm delete
-    kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-config --from-file $(step path)/config
-    kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-certs --from-file $(step path)/certs
-    kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-secrets --from-file $(step path)/secrets
+  # Replace secrets created on helm install
+  # It allows to properly remove them on helm delete
+  kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-config --from-file $(step path)/config
+  kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-certs --from-file $(step path)/certs
+  kbreplace -n {{.Release.Namespace}} create configmap {{ include "step-certificates.fullname" . }}-secrets --from-file $(step path)/secrets
 
-    kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-ca-password --from-literal "password=${CA_PASSWORD}"
-    kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-provisioner-password --from-literal "password=${CA_PROVISIONER_PASSWORD}"
+  kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-ca-password --from-literal "password=${CA_PASSWORD}"
+  kbreplace -n {{.Release.Namespace}} create secret generic {{ include "step-certificates.fullname" . }}-provisioner-password --from-literal "password=${CA_PROVISIONER_PASSWORD}"
 
-    # Label all configmaps and secrets
-    kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-config {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-certs {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-secrets {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-ca-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
-    kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-provisioner-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+# Label all configmaps and secrets
+kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-config {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-certs {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label configmap {{ include "step-certificates.fullname" . }}-secrets {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-ca-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
+kubectl -n {{.Release.Namespace}} label secret {{ include "step-certificates.fullname" . }}-provisioner-password {{ include "step-certificates.labels" . | replace ": " "=" | replace "\n" " " }}
 
-    # Patch webhook if autocert is enabled
+# Patch webhook if autocert is enabled
 {{ if .Values.autocert.enabled }}
-    CA_BUNDLE=$(cat $(step path)/certs/root_ca.crt | base64 | tr -d '\n')
-    kubectl patch mutatingwebhookconfigurations {{ .Release.Name }}-autocert-webhook-config \
-      --type json -p="[{\"op\":\"replace\",\"path\":\"/webhooks/0/clientConfig/caBundle\",\"value\":\"$CA_BUNDLE\"}]"
+  CA_BUNDLE=$(cat $(step path)/certs/root_ca.crt | base64 | tr -d '\n')
+  kubectl patch mutatingwebhookconfigurations {{ .Release.Name }}-autocert-webhook-config \
+  --type json -p="[{\"op\":\"replace\",\"path\":\"/webhooks/0/clientConfig/caBundle\",\"value\":\"$CA_BUNDLE\"}]"
 {{- end }}
 
-    echo -e "\n\e[1mStep Certificates installed!\e[0m"
-    echo
-    echo "CA URL: {{include "step-certificates.url" .}}"
-    echo "CA Fingerprint: $(step certificate fingerprint $(step path)/certs/root_ca.crt)"
-    echo
\ No newline at end of file
+  echo -e "\n\e[1mStep Certificates installed!\e[0m"
+  echo
+echo "CA URL: {{include "step-certificates.url" .}}"
+echo "CA Fingerprint: $(step certificate fingerprint $(step path)/certs/root_ca.crt)"
+  echo
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ingress.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ingress.yaml
index 53264f1..240bdaf 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ingress.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/ingress.yaml
@@ -1,36 +1,57 @@
-{{- if .Values.ingress.enabled -}}
-{{- $fullName := include "step-certificates.fullname" . -}}
+# *****************************************************************************
+#
+  # Licensed to the Apache Software Foundation (ASF) under one
+  # or more contributor license agreements.  See the NOTICE file
+  # distributed with this work for additional information
+  # regarding copyright ownership.  The ASF licenses this file
+  # to you under the Apache License, Version 2.0 (the
+  # "License"); you may not use this file except in compliance
+  # with the License.  You may obtain a copy of the License at
+  #
+  #   http://www.apache.org/licenses/LICENSE-2.0
+  #
+  # Unless required by applicable law or agreed to in writing,
+  # software distributed under the License is distributed on an
+  # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  # KIND, either express or implied.  See the License for the
+  # specific language governing permissions and limitations
+  # under the License.
+  #
+  # ******************************************************************************
+
+  {{- if .Values.ingress.enabled -}}
+  {{- $fullName := include "step-certificates.fullname" . -}}
 apiVersion: extensions/v1beta1
 kind: Ingress
 metadata:
   name: {{ $fullName }}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
   {{- with .Values.ingress.annotations }}
-  annotations:
-    {{- toYaml . | nindent 4 }}
+annotations:
+  {{- toYaml . | nindent 4 }}
   {{- end }}
 spec:
 {{- if .Values.ingress.tls }}
-  tls:
+tls:
   {{- range .Values.ingress.tls }}
-    - hosts:
-      {{- range .hosts }}
-        - {{ . | quote }}
-      {{- end }}
-      secretName: {{ .secretName }}
+- hosts:
+  {{- range .hosts }}
+  - {{ . | quote }}
+  {{- end }}
+  secretName: {{ .secretName }}
   {{- end }}
 {{- end }}
-  rules:
+rules:
   {{- range .Values.ingress.hosts }}
-    - host: {{ .host | quote }}
-      http:
-        paths:
-        {{- range .paths }}
-          - path: {{ . }}
-            backend:
-              serviceName: {{ $fullName }}
-              servicePort: http
-        {{- end }}
+- host: {{ .host | quote }}
+  http:
+    paths:
+    {{- range .paths }}
+    - path: {{ . }}
+      backend:
+        serviceName: {{ $fullName }}
+        servicePort: http
+    {{- end }}
   {{- end }}
 {{- end }}
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/rbac.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/rbac.yaml
index 6f4e137..0534856 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/rbac.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/rbac.yaml
@@ -1,4 +1,25 @@
-{{- if .Release.IsInstall -}}
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+  {{- if .Release.IsInstall -}}
 apiVersion: rbac.authorization.k8s.io/v1
 kind: Role
 metadata:
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/secrets.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/secrets.yaml
index 567a989..68d0b8d 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/secrets.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/secrets.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # Secrets that will be updated by the configuration job:
 # 1. CA keys password.
 # 2. Provisioner password.
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/service.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/service.yaml
index 4ec0783..dccae38 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/service.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/service.yaml
@@ -1,19 +1,40 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: v1
 kind: Service
 metadata:
   name: {{ include "step-certificates.fullname" . }}
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
+  {{ include "step-certificates.labels" . | indent 4 }}
 spec:
   type: {{ .Values.service.type }}
   ports:
     - port: {{ .Values.service.port }}
       targetPort: {{ .Values.service.targetPort }}
 {{- if .Values.service.nodePort }}
-      nodePort: {{ .Values.service.nodePort }}
+nodePort: {{ .Values.service.nodePort }}
 {{- end }}
-      protocol: TCP
-      name: https
-  selector:
-    app.kubernetes.io/name: {{ include "step-certificates.name" . }}
-    app.kubernetes.io/instance: {{ .Release.Name }}
\ No newline at end of file
+protocol: TCP
+name: https
+selector:
+  app.kubernetes.io/name: {{ include "step-certificates.name" . }}
+  app.kubernetes.io/instance: {{ .Release.Name }}
\ No newline at end of file
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/tests/test-connection.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/tests/test-connection.yaml
index 5ae87c6..4fe296d 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/tests/test-connection.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/templates/tests/test-connection.yaml
@@ -1,11 +1,32 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 apiVersion: v1
 kind: Pod
 metadata:
   name: "{{ include "step-certificates.fullname" . }}-test-connection"
   labels:
-{{ include "step-certificates.labels" . | indent 4 }}
-  annotations:
-    "helm.sh/hook": test-success
+  {{ include "step-certificates.labels" . | indent 4 }}
+annotations:
+  "helm.sh/hook": test-success
 spec:
   containers:
     - name: wget
diff --git a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/values.yaml b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/values.yaml
index 45350b6..269e7fa 100644
--- a/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/values.yaml
+++ b/infrastructure-provisioning/terraform/gcp/ssn-gke/main/modules/helm_charts/step-ca-chart/values.yaml
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 # Default values for step-certificates.
 
 # replicaCount is the number of replicas of step-certificates.
@@ -63,7 +84,7 @@
     storageClass: standard
     # accessModes defines the Persistent Volume Access Mode.
     accessModes:
-    - ReadWriteOnce
+      - ReadWriteOnce
     # size is the Persistent Volume size.
     size: 10Gi
   # runAsRoot runs the ca as root instead of the step user. This is required in
@@ -92,7 +113,7 @@
   #   memory: 128Mi
   # requests:
   #   cpu: 100m
-  #   memory: 128Mi
+#   memory: 128Mi
 
 # nodeSelector contains the node labels for pod assignment.
 nodeSelector: {}
diff --git a/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/resources/css/login.css b/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/resources/css/login.css
index b31310a..1f5d717 100644
--- a/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/resources/css/login.css
+++ b/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/resources/css/login.css
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 .login-pf body {
     background: url("../img/login-background.png") no-repeat center center fixed;
     background-size: cover;
diff --git a/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/theme.properties b/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/theme.properties
index ed1c3c1..5ff84ad 100644
--- a/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/theme.properties
+++ b/infrastructure-provisioning/terraform/keycloak-theme/dlab/login/theme.properties
@@ -1,25 +1,36 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
 parent=base
 import=common/keycloak
-
 styles=node_modules/patternfly/dist/css/patternfly.css node_modules/patternfly/dist/css/patternfly-additions.css lib/zocial/zocial.css css/login.css
 meta=viewport==width=device-width,initial-scale=1
-
 kcHtmlClass=login-pf
 kcLoginClass=login-pf-page
-
 kcLogoLink=http://www.keycloak.org
-
 kcLogoClass=login-pf-brand
-
 kcContainerClass=container-fluid
 kcContentClass=col-sm-8 col-sm-offset-2 col-md-6 col-md-offset-3 col-lg-6 col-lg-offset-3
 kcContentWrapperClass=row
-
 kcHeaderClass=login-pf-page-header
 kcFeedbackAreaClass=col-md-12
 kcLocaleClass=col-xs-12 col-sm-1
 kcAlertIconClasserror=pficon pficon-error-circle-o
-
 kcFormAreaClass=col-sm-10 col-sm-offset-1 col-md-8 col-md-offset-2 col-lg-8 col-lg-offset-2
 kcFormCardClass=card-pf
 kcFormCardAccountClass=login-pf-accounts
@@ -29,13 +40,10 @@
 kcFormSocialAccountDoubleListClass=login-pf-social-double-col
 kcFormSocialAccountListLinkClass=login-pf-social-link
 kcFormHeaderClass=login-pf-header
-
 kcFeedbackErrorIcon=pficon pficon-error-circle-o
 kcFeedbackWarningIcon=pficon pficon-warning-triangle-o
 kcFeedbackSuccessIcon=pficon pficon-ok
 kcFeedbackInfoIcon=pficon pficon-info
-
-
 kcFormClass=form-horizontal
 kcFormGroupClass=form-group
 kcFormGroupErrorClass=has-error
@@ -48,10 +56,7 @@
 kcFormSettingClass=login-pf-settings
 kcTextareaClass=form-control
 kcSignUpClass=login-pf-signup
-
-
 kcInfoAreaClass=col-xs-12 col-sm-4 col-md-4 col-lg-5 details
-
 ##### css classes for form buttons
 # main class used for all buttons
 kcButtonClass=btn
@@ -61,9 +66,7 @@
 # classes defining size of the button
 kcButtonLargeClass=btn-lg
 kcButtonBlockClass=btn-block
-
 ##### css classes for input
 kcInputLargeClass=input-lg
-
 ##### css classes for form accessability
 kcSrOnlyClass=sr-only
diff --git a/integration-tests-cucumber/pom.xml b/integration-tests-cucumber/pom.xml
deleted file mode 100644
index d96a492..0000000
--- a/integration-tests-cucumber/pom.xml
+++ /dev/null
@@ -1,114 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-
-    <groupId>com.epam.dlab</groupId>
-    <artifactId>integration-tests</artifactId>
-    <version>1.0.0-SNAPSHOT</version>
-    <packaging>jar</packaging>
-
-    <properties>
-        <cucumber.version>4.2.6</cucumber.version>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-            <version>2.9.9</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-            <version>2.9.9</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-annotations</artifactId>
-            <version>2.9.9</version>
-        </dependency>
-        <dependency>
-            <groupId>org.projectlombok</groupId>
-            <artifactId>lombok</artifactId>
-            <version>1.18.8</version>
-        </dependency>
-        <dependency>
-            <groupId>org.mongodb</groupId>
-            <artifactId>mongo-java-driver</artifactId>
-            <version>3.10.2</version>
-        </dependency>
-
-
-        <dependency>
-            <groupId>io.cucumber</groupId>
-            <artifactId>cucumber-java</artifactId>
-            <version>${cucumber.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>io.cucumber</groupId>
-            <artifactId>cucumber-junit</artifactId>
-            <version>${cucumber.version}</version>
-            <scope>test</scope>
-        </dependency>
-
-        <dependency>
-            <groupId>junit</groupId>
-            <artifactId>junit</artifactId>
-            <version>4.12</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>com.jayway.restassured</groupId>
-            <artifactId>rest-assured</artifactId>
-            <version>2.9.0</version>
-            <scope>test</scope>
-        </dependency>
-
-    </dependencies>
-
-    <build>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <testFailureIgnore>true</testFailureIgnore>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>3.7.0</version>
-                <configuration>
-                    <encoding>UTF-8</encoding>
-                    <source>1.8</source>
-                    <target>1.8</target>
-                </configuration>
-            </plugin>
-            <plugin>
-                <groupId>net.masterthought</groupId>
-                <artifactId>maven-cucumber-reporting</artifactId>
-                <version>2.8.0</version>
-                <executions>
-                    <execution>
-                        <id>execution</id>
-                        <phase>verify</phase>
-                        <goals>
-                            <goal>generate</goal>
-                        </goals>
-                        <configuration>
-                            <projectName>CucumberWebGui</projectName>
-                            <outputDirectory>${project.build.directory}/cucumber-report-html</outputDirectory>
-                            <cucumberOutput>${project.build.directory}/cucumber.json</cucumberOutput>
-                            <skippedFails>true</skippedFails>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-        </plugins>
-    </build>
-</project>
diff --git a/integration-tests-cucumber/src/main/java/org/apache/dlab/dto/EndpointDTO.java b/integration-tests-cucumber/src/main/java/org/apache/dlab/dto/EndpointDTO.java
deleted file mode 100644
index a06a4d7..0000000
--- a/integration-tests-cucumber/src/main/java/org/apache/dlab/dto/EndpointDTO.java
+++ /dev/null
@@ -1,19 +0,0 @@
-package org.apache.dlab.dto;
-
-import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import lombok.AllArgsConstructor;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-@Data
-@JsonIgnoreProperties(ignoreUnknown = true)
-@AllArgsConstructor
-@NoArgsConstructor
-public class EndpointDTO {
-	private String name;
-	private String url;
-	private String account;
-	@JsonProperty("endpoint_tag")
-	private String tag;
-}
diff --git a/integration-tests-cucumber/src/main/java/org/apache/dlab/mongo/MongoDBHelper.java b/integration-tests-cucumber/src/main/java/org/apache/dlab/mongo/MongoDBHelper.java
deleted file mode 100644
index 11e4dea..0000000
--- a/integration-tests-cucumber/src/main/java/org/apache/dlab/mongo/MongoDBHelper.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package org.apache.dlab.mongo;
-
-import com.mongodb.client.MongoClient;
-import com.mongodb.client.MongoClients;
-import org.apache.dlab.util.PropertyHelper;
-
-public class MongoDBHelper {
-	private static final MongoClient client = MongoClients
-			.create(PropertyHelper.read("mongo.connection.string"));
-
-	public static void cleanCollection(String collection) {
-		client.getDatabase(PropertyHelper.read("mongo.db.name")).getCollection(collection).drop();
-	}
-}
diff --git a/integration-tests-cucumber/src/main/java/org/apache/dlab/util/JacksonMapper.java b/integration-tests-cucumber/src/main/java/org/apache/dlab/util/JacksonMapper.java
deleted file mode 100644
index a18d4d9..0000000
--- a/integration-tests-cucumber/src/main/java/org/apache/dlab/util/JacksonMapper.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package org.apache.dlab.util;
-
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-
-public final class JacksonMapper {
-	private static final ObjectMapper MAPPER = new ObjectMapper();
-
-	public static <T> String marshall(T obj) {
-		try {
-			return MAPPER.writeValueAsString(obj);
-		} catch (JsonProcessingException e) {
-			throw new IllegalArgumentException(e);
-		}
-	}
-}
diff --git a/integration-tests-cucumber/src/main/java/org/apache/dlab/util/PropertyHelper.java b/integration-tests-cucumber/src/main/java/org/apache/dlab/util/PropertyHelper.java
deleted file mode 100644
index 48d7cca..0000000
--- a/integration-tests-cucumber/src/main/java/org/apache/dlab/util/PropertyHelper.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package org.apache.dlab.util;
-
-import java.io.FileInputStream;
-import java.io.InputStream;
-import java.util.Properties;
-
-public class PropertyHelper {
-
-	private final static Properties PROPERTIES;
-
-	static {
-		PROPERTIES = new Properties();
-		try (InputStream inputStream = new FileInputStream(System.getProperty("config.file"))) {
-			PROPERTIES.load(inputStream);
-		} catch (Exception e) {
-			e.printStackTrace();
-		}
-	}
-
-	public static String read(String prop) {
-		return PROPERTIES.getProperty(prop);
-	}
-}
diff --git a/integration-tests-cucumber/src/test/java/dlab/Constants.java b/integration-tests-cucumber/src/test/java/dlab/Constants.java
deleted file mode 100644
index 4e30e99..0000000
--- a/integration-tests-cucumber/src/test/java/dlab/Constants.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package dlab;
-
-public interface Constants {
-	String API_URI = "https://localhost:8443/api/";
-}
diff --git a/integration-tests-cucumber/src/test/java/dlab/RunCucumberTest.java b/integration-tests-cucumber/src/test/java/dlab/RunCucumberTest.java
deleted file mode 100644
index fdc8a4c..0000000
--- a/integration-tests-cucumber/src/test/java/dlab/RunCucumberTest.java
+++ /dev/null
@@ -1,10 +0,0 @@
-package dlab;
-
-import cucumber.api.CucumberOptions;
-import cucumber.api.junit.Cucumber;
-import org.junit.runner.RunWith;
-
-@RunWith(Cucumber.class)
-@CucumberOptions(plugin = {"json:target/cucumber.json"})
-public class RunCucumberTest {
-}
diff --git a/integration-tests-cucumber/src/test/java/dlab/endpoint/EndpointSteps.java b/integration-tests-cucumber/src/test/java/dlab/endpoint/EndpointSteps.java
deleted file mode 100644
index e66fc33..0000000
--- a/integration-tests-cucumber/src/test/java/dlab/endpoint/EndpointSteps.java
+++ /dev/null
@@ -1,97 +0,0 @@
-package dlab.endpoint;
-
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
-import com.jayway.restassured.specification.RequestSpecification;
-import cucumber.api.java.en.And;
-import cucumber.api.java.en.Given;
-import cucumber.api.java.en.Then;
-import cucumber.api.java.en.When;
-import org.apache.dlab.dto.EndpointDTO;
-import org.apache.dlab.mongo.MongoDBHelper;
-import org.apache.dlab.util.JacksonMapper;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import static com.jayway.restassured.RestAssured.given;
-import static dlab.Constants.API_URI;
-import static org.hamcrest.MatcherAssert.assertThat;
-import static org.hamcrest.core.IsEqual.equalTo;
-
-public class EndpointSteps {
-	private RequestSpecification request;
-	private Response response;
-	private String name;
-
-	@Given("User try to create new endpoint with name {string} and uri {string} and account {string} and {string}")
-	public void userTryToCreateNewEndpoint(String name, String uri, String account, String tag) {
-		this.name = name;
-		request = given().body(JacksonMapper.marshall(new EndpointDTO(name, uri, account, tag)))
-				.auth()
-				.oauth2("token123")
-				.contentType(ContentType.JSON);
-
-	}
-
-	@When("User send create new endpoint request")
-	public void userSendCreateNewEndpoint() {
-		response = request.post(API_URI + "endpoint");
-	}
-
-	@Given("There is no endpoints in DLab")
-	public void thereIsNoEndpointsInDLab() {
-		MongoDBHelper.cleanCollection("endpoints");
-
-	}
-
-	@Then("Response status code is {int}")
-	public void responseStatusCodeIs(int code) {
-		assertThat(response.getStatusCode(), equalTo(code));
-	}
-
-	@And("Endpoint URI is present in location header")
-	public void endpointURIIsPresentInLocationHeader() {
-		assertThat(response.getHeader("Location"), equalTo(API_URI + "endpoint/" + name));
-	}
-
-	@When("User try to get information about endpoint with name {string}")
-	public void userTryToGetInformationAboutEndpointWithName(String endpoint) throws URISyntaxException {
-		response = authenticatedRequest()
-				.get(new URI(API_URI + "endpoint/" + endpoint));
-
-	}
-
-	@And("Endpoint information is successfully returned with " +
-			"name {string}, uri {string}, account {string}, and tag {string}")
-	public void endpointInformationIsSuccessfullyReturnedWithNameUriAccountAndTag(String name, String uri,
-																				  String account, String tag) {
-		final EndpointDTO dto = response.getBody().as(EndpointDTO.class);
-		assertThat(dto.getAccount(), equalTo(account));
-		assertThat(dto.getName(), equalTo(name));
-		assertThat(dto.getUrl(), equalTo(uri));
-		assertThat(dto.getTag(), equalTo(tag));
-
-	}
-
-	@When("User try to get information about endpoints")
-	public void userTryToGetInformationAboutEndpoints() throws URISyntaxException {
-		response = authenticatedRequest()
-				.get(new URI(API_URI + "endpoint"));
-
-	}
-
-	@And("There are endpoints with name test1 and test2")
-	public void thereAreEndpointsWithNameTestAndTest() {
-		final EndpointDTO[] endpoints = response.getBody().as(EndpointDTO[].class);
-		assertThat(2, equalTo(endpoints.length));
-		assertThat("test1", equalTo(endpoints[0].getName()));
-		assertThat("test2", equalTo(endpoints[1].getName()));
-	}
-
-	private RequestSpecification authenticatedRequest() {
-		return given()
-				.auth()
-				.oauth2("token123");
-	}
-}
diff --git a/integration-tests-cucumber/src/test/java/dlab/login/LoginSteps.java b/integration-tests-cucumber/src/test/java/dlab/login/LoginSteps.java
deleted file mode 100644
index 32b29cb..0000000
--- a/integration-tests-cucumber/src/test/java/dlab/login/LoginSteps.java
+++ /dev/null
@@ -1,44 +0,0 @@
-package dlab.login;
-
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
-import com.jayway.restassured.specification.RequestSpecification;
-import cucumber.api.java.en.Given;
-import cucumber.api.java.en.Then;
-import cucumber.api.java.en.When;
-import gherkin.deps.com.google.gson.JsonObject;
-
-import java.net.URI;
-import java.net.URISyntaxException;
-
-import static com.jayway.restassured.RestAssured.given;
-import static dlab.Constants.API_URI;
-import static org.hamcrest.core.IsEqual.equalTo;
-import static org.junit.Assert.assertThat;
-
-public class LoginSteps {
-
-
-	private static final String LOGIN_RESOURCE_PATH = API_URI + "user/login";
-	private RequestSpecification request;
-	private Response response;
-
-	@Given("User try to login to Dlab with {string} and {string}")
-	public void userProvidedLoginAndPassword(String username, String password) {
-		JsonObject jsonObject = new JsonObject();
-		jsonObject.addProperty("username", username);
-		jsonObject.addProperty("password", password);
-		request = given().body(jsonObject.toString()).contentType(ContentType.JSON);
-	}
-
-	@When("user try to login")
-	public void userTryToLogin() throws URISyntaxException {
-		response = request.post(new URI(LOGIN_RESOURCE_PATH));
-	}
-
-	@Then("response code is {string}")
-	public void responseCodeIs(String status) {
-		assertThat(response.getStatusCode(), equalTo(Integer.valueOf(status)));
-
-	}
-}
diff --git a/integration-tests-cucumber/src/test/resources/config.properties b/integration-tests-cucumber/src/test/resources/config.properties
deleted file mode 100644
index 5cfad3c..0000000
--- a/integration-tests-cucumber/src/test/resources/config.properties
+++ /dev/null
@@ -1,2 +0,0 @@
-mongo.connection.string=mongodb://localhost:27017/DLAB
-mongo.db.name=DLAB
\ No newline at end of file
diff --git a/integration-tests-cucumber/src/test/resources/dlab/endpoint.feature b/integration-tests-cucumber/src/test/resources/dlab/endpoint.feature
deleted file mode 100644
index 7281a24..0000000
--- a/integration-tests-cucumber/src/test/resources/dlab/endpoint.feature
+++ /dev/null
@@ -1,57 +0,0 @@
-Feature: Endpoint management in DLab
-  Such feature allowed to manage endpoint inside DLab
-
-  Scenario Outline: Create new endpoint when it does not exist
-
-    Given There is no endpoints in DLab
-    And User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
-    When User send create new endpoint request
-    Then Response status code is 200
-    And Endpoint URI is present in location header
-    Examples:
-      | name          | uri     | account   | tag      |
-      | test_endpoint | someuri | 123231312 | some_tag |
-
-
-  Scenario Outline: Create new endpoint when it exist already
-
-    Given There is no endpoints in DLab
-    And User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
-    And  User send create new endpoint request
-    When User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
-    And User send create new endpoint request
-    Then Response status code is 409
-    Examples:
-      | name          | uri     | account   | tag      |
-      | test_endpoint | someuri | 123231312 | some_tag |
-
-
-  Scenario Outline: Get information for endpoint
-
-    Given There is no endpoints in DLab
-    And User try to create new endpoint with name "<name>" and uri "<uri>" and account "<account>" and "<tag>"
-    And  User send create new endpoint request
-    When User try to get information about endpoint with name "<name>"
-    Then Response status code is 200
-    And Endpoint information is successfully returned with name "<name>", uri "<uri>", account "<account>", and tag "<tag>"
-    Examples:
-      | name          | uri     | account   | tag      |
-      | test_endpoint | someuri | 123231312 | some_tag |
-
-
-  Scenario: Get list of endpoints
-
-    Given There is no endpoints in DLab
-    And User try to create new endpoint with name "test1" and uri "someuri1" and account "123" and "customTag1"
-    And  User send create new endpoint request
-    And User try to create new endpoint with name "test2" and uri "someuri2" and account "1233" and "customTag4"
-    And  User send create new endpoint request
-    When User try to get information about endpoints
-    Then Response status code is 200
-    And There are endpoints with name test1 and test2
-
-  Scenario: Get not endpoint that does not exist
-
-    Given There is no endpoints in DLab
-    When User try to get information about endpoint with name "someName"
-    Then Response status code is 404
diff --git a/integration-tests-cucumber/src/test/resources/dlab/login.feature b/integration-tests-cucumber/src/test/resources/dlab/login.feature
deleted file mode 100644
index 493a51e..0000000
--- a/integration-tests-cucumber/src/test/resources/dlab/login.feature
+++ /dev/null
@@ -1,12 +0,0 @@
-Feature: DLab login API
-  Used to check DLab login flow
-
-  Scenario Outline: User try to login to DLab
-    Given User try to login to Dlab with "<username>" and "<password>"
-    When user try to login
-    Then response code is "<status>"
-
-    Examples:
-      | username       | password | status |
-      | test           | pass     | 200    |
-      | not_valid_user | pass     | 401    |
\ No newline at end of file
diff --git a/integration-tests/README.MD b/integration-tests/README.MD
deleted file mode 100644
index 3cdda00..0000000
--- a/integration-tests/README.MD
+++ /dev/null
@@ -1,80 +0,0 @@
-# The automation tests
-
-## What is it?
-This is the module with automation tests for dlab integration testing.
-
-## How to run?
-
-Automation tests could be ran from UNIX and Windows machines.
-The prerequisites are:
-* The environment is established
-* The machine, where tests are going to be ran, has access to Jenkins
-* Set of configurations are performed
-* Set of Java parameters are passed.
-
-## What are java parameters?
-
-The Java parameters are key value pairs prefixed with "-D" passed to Java 
-Required Java parameters are: "jenkins.buildNumber", "cluster.username", "cluster.os.family"
-
-Example:
-* -Djenkins.buildNumber=57 -Dcluster.username=dlab-user -Dcluster.os.family=debian
-
-## What are other required configurations?
-
-### The config.properties file
- 
- "config.properties" file example could be found in the module root "example" directory.
- 
-### Jupyther scenario files
- Scenario files example can be found in module root "scenario_jupyter" directory.
- 
-### Amazon instances configuration json files
-Amazon instances configuration json files examples can be found in module root "ec2_templates" directory.
-
-## Automation run modes
- There are 3 modes: server and dev and dev local. Base on these modes the configuration files should be placed in different locations.
- 
-### Server mode 
-Server does not require any specific parameters. The configuration files locations is following for server mode:
-* conf.file.location=${conf.root.path}/config.properties
-* keys.directory.location=${conf.root.path}/keys
-* python.files.location=${conf.root.path}/scenario_jupyter/
-* cluster.config.file.location=${conf.root.path}/ec2_templates/
-
-### Dev mode.
-Dev mode has 2 choices run against service deployed in Amazon or against local mocked services. 
-
-#### Dev mode against services deployed locally
-To be able to run tests against locally deployed service, it is required to pass following parameters:
-
- * -Dconf.root.path=examples
-
-Also you should describe following parameters into config.properties file or to pass it to JVM:
- * -Drun.mode.local=true
- * -Duse.jenkins=false
- * -Dssn.url=https://localhost:8443
- * -Dservice.base.name=dev
- * -Dnotebooks.to.test=rstudio,jupyter,zeppelin
-
-#### Dev mode against services deployed in Amazon
-To be able to run tests against services deployed in Amazon, it is required to pass following parameters: 
- 
- * -Drun.mode=dev
- * -Dconf.root.path=examples
- * -Djenkins.buildNumber=163
- * -Dnotebooks.to.test=rstudio
- * -Dexecution.threads=1
-
-
-The "execution.threads" property with value 1  limits threads for notebooks testing to 1. So all tests would be executed synchronously.
-
-To run automation tests in dev mode it is required to pass a few more Java parameters: 
-* "run.mode" set to "dev".
-* 
-
-
-Example:
-* -Drun.mode=dev.
-
-In this case the application configuration is following:
diff --git a/integration-tests/examples/azure_templates/deeplearning/deeplearning-notebook.json b/integration-tests/examples/azure_templates/deeplearning/deeplearning-notebook.json
deleted file mode 100644
index bf0159d..0000000
--- a/integration-tests/examples/azure_templates/deeplearning/deeplearning-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-deeplearning",
-  "name" : "set the name",
-  "shape" : "Standard_NC6",
-  "version" : "deeplearning-2.2",
-  "template_name" : "Deep Learning 2.2"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/deeplearning/spark_cluster.json b/integration-tests/examples/azure_templates/deeplearning/spark_cluster.json
deleted file mode 100644
index 565307a..0000000
--- a/integration-tests/examples/azure_templates/deeplearning/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "Standard_NC6",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/jupyter/jupyter-notebook.json b/integration-tests/examples/azure_templates/jupyter/jupyter-notebook.json
deleted file mode 100644
index ea6dd42..0000000
--- a/integration-tests/examples/azure_templates/jupyter/jupyter-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-jupyter",
-  "name" : "set the name",
-  "shape" : "Standard_F2s",
-  "version" : "jupyter_notebook-5.7.4",
-  "template_name" : "Jupyter notebook 5.7.4"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/jupyter/spark_cluster.json b/integration-tests/examples/azure_templates/jupyter/spark_cluster.json
deleted file mode 100644
index 51615e1..0000000
--- a/integration-tests/examples/azure_templates/jupyter/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "Standard_F4s",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/rstudio/rstudio-notebook.json b/integration-tests/examples/azure_templates/rstudio/rstudio-notebook.json
deleted file mode 100644
index 7a82e7c..0000000
--- a/integration-tests/examples/azure_templates/rstudio/rstudio-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-rstudio",
-  "name" : "set the name",
-  "shape" : "Standard_F2s",
-  "version" : "RStudio-1.1.463",
-  "template_name" : "RStudio 1.1.463"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/rstudio/spark_cluster.json b/integration-tests/examples/azure_templates/rstudio/spark_cluster.json
deleted file mode 100644
index 51615e1..0000000
--- a/integration-tests/examples/azure_templates/rstudio/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "Standard_F4s",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/tensor/spark_cluster.json b/integration-tests/examples/azure_templates/tensor/spark_cluster.json
deleted file mode 100644
index 565307a..0000000
--- a/integration-tests/examples/azure_templates/tensor/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "Standard_NC6",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/tensor/tensor-notebook.json b/integration-tests/examples/azure_templates/tensor/tensor-notebook.json
deleted file mode 100644
index 7e62794..0000000
--- a/integration-tests/examples/azure_templates/tensor/tensor-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-tensor",
-  "name" : "set the name",
-  "shape" : "Standard_NC6",
-  "version" : "tensorflow_gpu-1.3.0",
-  "template_name" : "TensorFlow 1.3.0"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/zeppelin/spark_cluster.json b/integration-tests/examples/azure_templates/zeppelin/spark_cluster.json
deleted file mode 100644
index 51615e1..0000000
--- a/integration-tests/examples/azure_templates/zeppelin/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "Standard_F4s",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/azure_templates/zeppelin/zeppelin-notebook.json b/integration-tests/examples/azure_templates/zeppelin/zeppelin-notebook.json
deleted file mode 100644
index 2c7aff4..0000000
--- a/integration-tests/examples/azure_templates/zeppelin/zeppelin-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-zeppelin",
-  "name" : "set the name",
-  "shape" : "Standard_F2s",
-  "version" : "zeppelin-0.8.0",
-  "template_name": "Apache Zeppelin 0.8.0"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/config.properties b/integration-tests/examples/config.properties
deleted file mode 100644
index 4ee463e..0000000
--- a/integration-tests/examples/config.properties
+++ /dev/null
@@ -1,89 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-JENKINS_USERNAME=
-JENKINS_PASSWORD=
-USERNAME=
-PASSWORD=
-NOT_IAM_USERNAME=
-NOT_IAM_PASSWORD=
-NOT_DLAB_USERNAME=
-NOT_DLAB_PASSWORD=
-JENKINS_JOB_URL=
-USER_FOR_ACTIVATE_KEY=
-PASSWORD_FOR_ACTIVATE_KEY=
-
-ACCESS_KEY_PRIV_FILE_NAME=
-ACCESS_KEY_PUB_FILE_NAME=
-
-AWS_ACCESS_KEY_ID=
-AWS_SECRET_ACCESS_KEY=
-AWS_REGION=
-AWS_REQUEST_TIMEOUT=10s
-
-TIMEOUT_JENKINS_AUTOTEST=20m
-TIMEOUT_UPLOAD_KEY=40m
-TIMEOUT_SSN_STARTUP=60m
-
-CLUSTER_OS_USERNAME=dlab-user
-CLUSTER_OS_FAMILY=debian
-
-#NOTEBOOKS_TO_TEST=\
-#		[\
-#			{\
-#				"notebook_template": "jupyter",\
-#				"data_engine_type": "dataengine",\
-#				"full_test": false,\
-#				"timeout_notebook_create": "60m",\
-#				"timeout_notebook_startup": "20m",\
-#				"timeout_notebook_shutdown": "10m",\
-#				"timeout_cluster_create": "60m",\
-#				"timeout_cluster_terminate": "20m",\
-#				"timeout_lib_groups": "5m",\
-#				"timeout_lib_list": "5m",\
-#				"timeout_lib_install": "15m"\
-#			},\
-#			{\
-#				"notebook_template": "rstudio",\
-#				"data_engine_type": "dataengine-service"\
-#			},\
-#                       {\
-#                               "notebook_template": "zeppelin",\
-#                               "data_engine_type": "dataengine-service"\
-#                       }\
-#		]
-
-NOTEBOOKS_TO_TEST=[\
-                        {\
-                                "notebook_template":"jupyter",\
-                                "data_engine_type":"dataengine",\
-                                "full_test":true\
-                        },\
-                        {\
-                                "notebook_template":"jupyter",\
-                                "data_engine_type":"dataengine-service"\
-                        }\
-                ]
-JUPYTER_SCENARIO_FILES=
-S3_TESTS_TEMPLATE_BUCKET_NAME=
-
-#RUN_MODE_LOCAL=true
-#USE_JENKINS=false
-#SSN_URL=https://localhost:8443
-#SERVICE_BASE_NAME=
diff --git a/integration-tests/examples/copy_files.py b/integration-tests/examples/copy_files.py
deleted file mode 100644
index 3ecc17d..0000000
--- a/integration-tests/examples/copy_files.py
+++ /dev/null
@@ -1,113 +0,0 @@
-#!/usr/bin/python
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import os, sys, json
-import argparse
-from fabric.api import *
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--storage', type=str, default='S3/GCP buckets, Azure Blob container / Datalake folder')
-parser.add_argument('--notebook', type=str, default='aws, azure, gcp')
-parser.add_argument('--cloud', type=str, default='aws, azure, gcp')
-parser.add_argument('--azure_storage_account', type=str, default='')
-parser.add_argument('--azure_datalake_account', type=str, default='')
-args = parser.parse_args()
-
-dataset_file = ['airports.csv', 'carriers.csv', '2008.csv.bz2']
-
-def download_dataset():
-    try:
-        for f in dataset_file:
-            local('wget http://stat-computing.org/dataexpo/2009/{0} -O /tmp/{0}'.format(f))
-    except Exception as err:
-        print('Failed to download test dataset', str(err))
-        sys.exit(1)
-
-def upload_aws():
-    try:
-        for f in dataset_file:
-            local('aws s3 cp /tmp/{0} s3://{1}/{2}_dataset/ --sse AES256'.format(f, args.storage, args.notebook))
-    except Exception as err:
-        print('Failed to upload test dataset to bucket', str(err))
-        sys.exit(1)
-
-def upload_azure_datalake():
-    try:
-        from azure.datalake.store import core, lib, multithread
-        sp_creds = json.loads(open(os.environ['AZURE_AUTH_LOCATION']).read())
-        dl_filesystem_creds = lib.auth(tenant_id=json.dumps(sp_creds['tenantId']).replace('"', ''),
-                                       client_secret=json.dumps(sp_creds['clientSecret']).replace('"', ''),
-                                       client_id=json.dumps(sp_creds['clientId']).replace('"', ''),
-                                       resource='https://datalake.azure.net/')
-        datalake_client = core.AzureDLFileSystem(dl_filesystem_creds, store_name=args.azure_datalake_account)
-        for f in dataset_file:
-            multithread.ADLUploader(datalake_client,
-                                    lpath='/tmp/{0}'.format(f),
-                                    rpath='{0}/{1}_dataset/{2}'.format(args.storage, args.notebook, f))
-    except Exception as err:
-        print('Failed to upload test dataset to datalake store', str(err))
-        sys.exit(1)
-
-def upload_azure_blob():
-    try:
-        from azure.mgmt.storage import StorageManagementClient
-        from azure.storage.blob import BlockBlobService
-        from azure.common.client_factory import get_client_from_auth_file
-        storage_client = get_client_from_auth_file(StorageManagementClient)
-        resource_group_name = ''
-        for i in storage_client.storage_accounts.list():
-            if args.storage.replace('container', 'storage') == str(i.tags.get('Name')):
-                resource_group_name = str(i.tags.get('SBN'))
-        secret_key = storage_client.storage_accounts.list_keys(resource_group_name, args.azure_storage_account).keys[0].value
-        block_blob_service = BlockBlobService(account_name=args.azure_storage_account, account_key=secret_key)
-        for f in dataset_file:
-            block_blob_service.create_blob_from_path(args.storage, '{0}_dataset/{1}'.format(args.notebook, f), '/tmp/{0}'.format(f))
-    except Exception as err:
-        print('Failed to upload test dataset to blob storage', str(err))
-        sys.exit(1)
-
-def upload_gcp():
-    try:
-        for f in dataset_file:
-            local('sudo gsutil -m cp /tmp/{0} gs://{1}/{2}_dataset/'.format(f, args.storage, args.notebook))
-    except Exception as err:
-        print('Failed to upload test dataset to bucket', str(err))
-        sys.exit(1)
-
-if __name__ == "__main__":
-    download_dataset()
-    if args.cloud == 'aws':
-        upload_aws()
-    elif args.cloud == 'azure':
-        os.environ['AZURE_AUTH_LOCATION'] = '/home/dlab-user/keys/azure_auth.json'
-        if args.azure_datalake_account:
-            upload_azure_datalake()
-        else:
-            upload_azure_blob()
-    elif args.cloud == 'gcp':
-        upload_gcp()
-    else:
-        print('Error! Unknown cloud provider.')
-        sys.exit(1)
-
-    sys.exit(0)
diff --git a/integration-tests/examples/ec2_templates/deeplearning/EMR.json b/integration-tests/examples/ec2_templates/deeplearning/EMR.json
deleted file mode 100644
index 62f80d3..0000000
--- a/integration-tests/examples/ec2_templates/deeplearning/EMR.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "2",
-  "emr_master_instance_type" : "c4.xlarge",
-  "emr_slave_instance_type" : "c4.xlarge",
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/deeplearning/EMR_spot.json b/integration-tests/examples/ec2_templates/deeplearning/EMR_spot.json
deleted file mode 100644
index feedfd1..0000000
--- a/integration-tests/examples/ec2_templates/deeplearning/EMR_spot.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "3",
-  "emr_master_instance_type" : "c4.large",
-  "emr_slave_instance_type" : "c4.large",
-  "emr_slave_instance_spot":true,
-  "emr_slave_instance_spot_pct_price":40,
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/deeplearning/deeplearning-notebook.json b/integration-tests/examples/ec2_templates/deeplearning/deeplearning-notebook.json
deleted file mode 100644
index 846203b..0000000
--- a/integration-tests/examples/ec2_templates/deeplearning/deeplearning-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-deeplearning",
-  "name" : "set the name",
-  "shape" : "p2.xlarge",
-  "version" : "deeplearning-2.2",
-  "template_name" : "Deep Learning 2.2"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/deeplearning/spark_cluster.json b/integration-tests/examples/ec2_templates/deeplearning/spark_cluster.json
deleted file mode 100644
index 33975f2..0000000
--- a/integration-tests/examples/ec2_templates/deeplearning/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "p2.xlarge",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/EMR.json b/integration-tests/examples/ec2_templates/jupyter/EMR.json
deleted file mode 100644
index 62f80d3..0000000
--- a/integration-tests/examples/ec2_templates/jupyter/EMR.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "2",
-  "emr_master_instance_type" : "c4.xlarge",
-  "emr_slave_instance_type" : "c4.xlarge",
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/EMR_spot.json b/integration-tests/examples/ec2_templates/jupyter/EMR_spot.json
deleted file mode 100644
index feedfd1..0000000
--- a/integration-tests/examples/ec2_templates/jupyter/EMR_spot.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "3",
-  "emr_master_instance_type" : "c4.large",
-  "emr_slave_instance_type" : "c4.large",
-  "emr_slave_instance_spot":true,
-  "emr_slave_instance_spot_pct_price":40,
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/jupyter-notebook.json b/integration-tests/examples/ec2_templates/jupyter/jupyter-notebook.json
deleted file mode 100644
index adbea51..0000000
--- a/integration-tests/examples/ec2_templates/jupyter/jupyter-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-jupyter",
-  "name" : "set the name",
-  "shape" : "t2.medium",
-  "version" : "jupyter_notebook-5.7.4",
-  "template_name" : "Jupyter notebook 5.7.4"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/jupyter/spark_cluster.json b/integration-tests/examples/ec2_templates/jupyter/spark_cluster.json
deleted file mode 100644
index 65b0d92..0000000
--- a/integration-tests/examples/ec2_templates/jupyter/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "c4.xlarge",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/EMR.json b/integration-tests/examples/ec2_templates/rstudio/EMR.json
deleted file mode 100644
index 62f80d3..0000000
--- a/integration-tests/examples/ec2_templates/rstudio/EMR.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "2",
-  "emr_master_instance_type" : "c4.xlarge",
-  "emr_slave_instance_type" : "c4.xlarge",
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/EMR_spot.json b/integration-tests/examples/ec2_templates/rstudio/EMR_spot.json
deleted file mode 100644
index feedfd1..0000000
--- a/integration-tests/examples/ec2_templates/rstudio/EMR_spot.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "3",
-  "emr_master_instance_type" : "c4.large",
-  "emr_slave_instance_type" : "c4.large",
-  "emr_slave_instance_spot":true,
-  "emr_slave_instance_spot_pct_price":40,
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/rstudio-notebook.json b/integration-tests/examples/ec2_templates/rstudio/rstudio-notebook.json
deleted file mode 100644
index d06b9b0..0000000
--- a/integration-tests/examples/ec2_templates/rstudio/rstudio-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-rstudio",
-  "name" : "set the name",
-  "shape" : "t2.medium",
-  "version" : "RStudio-1.1.463",
-  "template_name" : "RStudio 1.1.463"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/rstudio/spark_cluster.json b/integration-tests/examples/ec2_templates/rstudio/spark_cluster.json
deleted file mode 100644
index 65b0d92..0000000
--- a/integration-tests/examples/ec2_templates/rstudio/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "c4.xlarge",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/EMR.json b/integration-tests/examples/ec2_templates/tensor/EMR.json
deleted file mode 100644
index 62f80d3..0000000
--- a/integration-tests/examples/ec2_templates/tensor/EMR.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "2",
-  "emr_master_instance_type" : "c4.xlarge",
-  "emr_slave_instance_type" : "c4.xlarge",
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/EMR_spot.json b/integration-tests/examples/ec2_templates/tensor/EMR_spot.json
deleted file mode 100644
index feedfd1..0000000
--- a/integration-tests/examples/ec2_templates/tensor/EMR_spot.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "3",
-  "emr_master_instance_type" : "c4.large",
-  "emr_slave_instance_type" : "c4.large",
-  "emr_slave_instance_spot":true,
-  "emr_slave_instance_spot_pct_price":40,
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/spark_cluster.json b/integration-tests/examples/ec2_templates/tensor/spark_cluster.json
deleted file mode 100644
index 33975f2..0000000
--- a/integration-tests/examples/ec2_templates/tensor/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "p2.xlarge",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/tensor/tensor-notebook.json b/integration-tests/examples/ec2_templates/tensor/tensor-notebook.json
deleted file mode 100644
index a5945a0..0000000
--- a/integration-tests/examples/ec2_templates/tensor/tensor-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-tensor",
-  "name" : "set the name",
-  "shape" : "p2.xlarge",
-  "version" : "tensorflow_gpu-1.3.0",
-  "template_name" : "TensorFlow 1.3.0"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/EMR.json b/integration-tests/examples/ec2_templates/zeppelin/EMR.json
deleted file mode 100644
index 62f80d3..0000000
--- a/integration-tests/examples/ec2_templates/zeppelin/EMR.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "2",
-  "emr_master_instance_type" : "c4.xlarge",
-  "emr_slave_instance_type" : "c4.xlarge",
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/EMR_spot.json b/integration-tests/examples/ec2_templates/zeppelin/EMR_spot.json
deleted file mode 100644
index feedfd1..0000000
--- a/integration-tests/examples/ec2_templates/zeppelin/EMR_spot.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "emr_instance_count" : "3",
-  "emr_master_instance_type" : "c4.large",
-  "emr_slave_instance_type" : "c4.large",
-  "emr_slave_instance_spot":true,
-  "emr_slave_instance_spot_pct_price":40,
-  "emr_version" : "emr-5.12.0",
-  "notebook_name" : "set notebook name",
-  "template_name" : "EMR cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/spark_cluster.json b/integration-tests/examples/ec2_templates/zeppelin/spark_cluster.json
deleted file mode 100644
index 65b0d92..0000000
--- a/integration-tests/examples/ec2_templates/zeppelin/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "c4.xlarge",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/ec2_templates/zeppelin/zeppelin-notebook.json b/integration-tests/examples/ec2_templates/zeppelin/zeppelin-notebook.json
deleted file mode 100644
index 287ef45..0000000
--- a/integration-tests/examples/ec2_templates/zeppelin/zeppelin-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-zeppelin",
-  "name" : "set the name",
-  "shape" : "t2.medium",
-  "version" : "zeppelin-0.8.0",
-  "template_name": "Apache Zeppelin 0.8.0"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/deeplearning/dataproc.json b/integration-tests/examples/gcp_templates/deeplearning/dataproc.json
deleted file mode 100644
index 1f4724f..0000000
--- a/integration-tests/examples/gcp_templates/deeplearning/dataproc.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "dataproc_master_count": "1",
-  "dataproc_slave_count": "2",
-  "dataproc_preemptible_count": "0",
-  "dataproc_master_instance_type" : "n1-standard-2",
-  "dataproc_slave_instance_type" : "n1-standard-2",
-  "dataproc_version" : "1.2",
-  "notebook_name": "set notebook name",
-  "template_name" : "Dataproc cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/deeplearning/deeplearning-notebook.json b/integration-tests/examples/gcp_templates/deeplearning/deeplearning-notebook.json
deleted file mode 100644
index 50924b3..0000000
--- a/integration-tests/examples/gcp_templates/deeplearning/deeplearning-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-deeplearning",
-  "name" : "set the name",
-  "shape" : "n1-highcpu-8",
-  "version" : "deeplearning-1.9",
-  "template_name" : "Deep Learning 1.9"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/deeplearning/spark_cluster.json b/integration-tests/examples/gcp_templates/deeplearning/spark_cluster.json
deleted file mode 100644
index 63047da..0000000
--- a/integration-tests/examples/gcp_templates/deeplearning/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "n1-standard-2",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/jupyter/dataproc.json b/integration-tests/examples/gcp_templates/jupyter/dataproc.json
deleted file mode 100644
index 1f4724f..0000000
--- a/integration-tests/examples/gcp_templates/jupyter/dataproc.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "dataproc_master_count": "1",
-  "dataproc_slave_count": "2",
-  "dataproc_preemptible_count": "0",
-  "dataproc_master_instance_type" : "n1-standard-2",
-  "dataproc_slave_instance_type" : "n1-standard-2",
-  "dataproc_version" : "1.2",
-  "notebook_name": "set notebook name",
-  "template_name" : "Dataproc cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/jupyter/jupyter-notebook.json b/integration-tests/examples/gcp_templates/jupyter/jupyter-notebook.json
deleted file mode 100644
index c109dc1..0000000
--- a/integration-tests/examples/gcp_templates/jupyter/jupyter-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-jupyter",
-  "name" : "set the name",
-  "shape" : "n1-standard-2",
-  "version" : "jupyter_notebook-5.7.4",
-  "template_name" : "Jupyter notebook 5.7.4"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/jupyter/spark_cluster.json b/integration-tests/examples/gcp_templates/jupyter/spark_cluster.json
deleted file mode 100644
index 63047da..0000000
--- a/integration-tests/examples/gcp_templates/jupyter/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "n1-standard-2",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/rstudio/dataproc.json b/integration-tests/examples/gcp_templates/rstudio/dataproc.json
deleted file mode 100644
index 1f4724f..0000000
--- a/integration-tests/examples/gcp_templates/rstudio/dataproc.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "dataproc_master_count": "1",
-  "dataproc_slave_count": "2",
-  "dataproc_preemptible_count": "0",
-  "dataproc_master_instance_type" : "n1-standard-2",
-  "dataproc_slave_instance_type" : "n1-standard-2",
-  "dataproc_version" : "1.2",
-  "notebook_name": "set notebook name",
-  "template_name" : "Dataproc cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/rstudio/rstudio-notebook.json b/integration-tests/examples/gcp_templates/rstudio/rstudio-notebook.json
deleted file mode 100644
index 607992a..0000000
--- a/integration-tests/examples/gcp_templates/rstudio/rstudio-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-rstudio",
-  "name" : "set the name",
-  "shape" : "n1-standard-2",
-  "version" : "RStudio-1.1.463",
-  "template_name" : "RStudio 1.1.463"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/rstudio/spark_cluster.json b/integration-tests/examples/gcp_templates/rstudio/spark_cluster.json
deleted file mode 100644
index 63047da..0000000
--- a/integration-tests/examples/gcp_templates/rstudio/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "n1-standard-2",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/tensor/dataproc.json b/integration-tests/examples/gcp_templates/tensor/dataproc.json
deleted file mode 100644
index 1f4724f..0000000
--- a/integration-tests/examples/gcp_templates/tensor/dataproc.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "dataproc_master_count": "1",
-  "dataproc_slave_count": "2",
-  "dataproc_preemptible_count": "0",
-  "dataproc_master_instance_type" : "n1-standard-2",
-  "dataproc_slave_instance_type" : "n1-standard-2",
-  "dataproc_version" : "1.2",
-  "notebook_name": "set notebook name",
-  "template_name" : "Dataproc cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/tensor/spark_cluster.json b/integration-tests/examples/gcp_templates/tensor/spark_cluster.json
deleted file mode 100644
index 63047da..0000000
--- a/integration-tests/examples/gcp_templates/tensor/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "n1-standard-2",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/tensor/tensor-notebook.json b/integration-tests/examples/gcp_templates/tensor/tensor-notebook.json
deleted file mode 100644
index 6586c21..0000000
--- a/integration-tests/examples/gcp_templates/tensor/tensor-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-tensor",
-  "name" : "set the name",
-  "shape" : "n1-highcpu-8",
-  "version" : "tensorflow_gpu-1.4.0",
-  "template_name" : "TensorFlow 1.4.0"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/zeppelin/dataproc.json b/integration-tests/examples/gcp_templates/zeppelin/dataproc.json
deleted file mode 100644
index 1f4724f..0000000
--- a/integration-tests/examples/gcp_templates/zeppelin/dataproc.json
+++ /dev/null
@@ -1,12 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine-service",
-  "name" : "set the name",
-  "dataproc_master_count": "1",
-  "dataproc_slave_count": "2",
-  "dataproc_preemptible_count": "0",
-  "dataproc_master_instance_type" : "n1-standard-2",
-  "dataproc_slave_instance_type" : "n1-standard-2",
-  "dataproc_version" : "1.2",
-  "notebook_name": "set notebook name",
-  "template_name" : "Dataproc cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/zeppelin/spark_cluster.json b/integration-tests/examples/gcp_templates/zeppelin/spark_cluster.json
deleted file mode 100644
index 63047da..0000000
--- a/integration-tests/examples/gcp_templates/zeppelin/spark_cluster.json
+++ /dev/null
@@ -1,8 +0,0 @@
-{
-  "image" : "docker.dlab-dataengine",
-  "name" : "set the name",
-  "dataengine_instance_count" : "2",
-  "dataengine_instance_shape": "n1-standard-2",
-  "notebook_name" : "set notebook name",
-  "template_name": "Apache Spark cluster"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/gcp_templates/zeppelin/zeppelin-notebook.json b/integration-tests/examples/gcp_templates/zeppelin/zeppelin-notebook.json
deleted file mode 100644
index 56b8790..0000000
--- a/integration-tests/examples/gcp_templates/zeppelin/zeppelin-notebook.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
-  "image" : "docker.dlab-zeppelin",
-  "name" : "set the name",
-  "shape" : "n1-standard-2",
-  "version" : "zeppelin-0.8.0",
-  "template_name": "Apache Zeppelin 0.8.0"
-}
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py b/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
deleted file mode 100644
index edc981e..0000000
--- a/integration-tests/examples/scenario_deeplearning/deeplearning_tests.py
+++ /dev/null
@@ -1,140 +0,0 @@
-#!/usr/bin/python
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import os, sys, json
-from fabric.api import *
-import argparse
-
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--storage', type=str, default='')
-parser.add_argument('--cloud', type=str, default='')
-parser.add_argument('--os_user', type=str, default='')
-parser.add_argument('--cluster_name', type=str, default='')
-parser.add_argument('--azure_storage_account', type=str, default='')
-parser.add_argument('--azure_datalake_account', type=str, default='')
-args = parser.parse_args()
-
-
-def prepare_templates():
-    try:
-        local('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"')
-        local('unzip -q /tmp/dogscats.zip -d /tmp')
-        local('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"))
-        local('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user))
-        local('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user))
-        local('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user))
-    except Exception as err:
-        print('Failed to download/unpack image dataset!', str(err))
-        sys.exit(1)
-    local('mkdir -p /home/{0}/logs'.format(args.os_user))
-    local('mv /tmp/deeplearning /home/{0}/test_templates'.format(args.os_user))
-
-def get_storage():
-    storages = {"aws": args.storage,
-                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
-                "gcp": args.storage}
-    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
-    if args.azure_datalake_account:
-        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
-        protocols['azure'] = 'adl'
-    return (storages[args.cloud], protocols[args.cloud])
-
-def prepare_ipynb(kernel_name, template_path, ipynb_name):
-    with open(template_path, 'r') as f:
-        text = f.read()
-    text = text.replace('KERNEL_NAME', kernel_name)
-    with open('/home/{}/{}.ipynb'.format(args.os_user, ipynb_name), 'w') as f:
-        f.write(text)
-
-def run_ipynb(ipynb_name):
-    local('export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64:/usr/lib64/openmpi/lib; ' \
-            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name))
-
-def run_tensor():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_tensor.ipynb'.format(args.os_user), 'preparation_tensor')
-        run_ipynb('preparation_tensor')
-        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_tensor.ipynb'.format(args.os_user), 'visualization_tensor')
-        run_ipynb('visualization_tensor')
-
-def run_caffe():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_caffe.ipynb'.format(args.os_user), 'test_caffe')
-        run_ipynb('test_caffe')
-
-def run_caffe2():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_caffe2.ipynb'.format(args.os_user), 'test_caffe2')
-        run_ipynb('test_caffe2')
-
-def run_cntk():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_cntk.ipynb'.format(args.os_user), 'test_cntk')
-        run_ipynb('test_cntk')
-
-def run_keras():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_keras.ipynb'.format(args.os_user), 'test_keras')
-        run_ipynb('test_keras')
-
-def run_mxnet():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_mxnet.ipynb'.format(args.os_user), 'test_mxnet')
-        run_ipynb('test_mxnet')
-
-def run_theano():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_theano.ipynb'.format(args.os_user), 'test_theano')
-        run_ipynb('test_theano')
-
-def run_torch():
-    interpreters = ['itorch']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_torch.ipynb'.format(args.os_user), 'test_torch')
-        run_ipynb('test_torch')
-
-
-if __name__ == "__main__":
-    try:
-        prepare_templates()
-        run_tensor()
-        run_caffe()
-        run_caffe2()
-        run_cntk()
-        run_keras()
-        run_mxnet()
-        run_theano()
-        run_torch()
-    except Exception as err:
-        print('Error!', str(err))
-        sys.exit(1)
-
-    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_jupyter/jupyter_tests.py b/integration-tests/examples/scenario_jupyter/jupyter_tests.py
deleted file mode 100644
index 018e678..0000000
--- a/integration-tests/examples/scenario_jupyter/jupyter_tests.py
+++ /dev/null
@@ -1,102 +0,0 @@
-#!/usr/bin/python
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import os, sys, json
-from fabric.api import *
-import argparse
-
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--storage', type=str, default='')
-parser.add_argument('--cloud', type=str, default='')
-parser.add_argument('--os_user', type=str, default='')
-parser.add_argument('--cluster_name', type=str, default='')
-parser.add_argument('--azure_storage_account', type=str, default='')
-parser.add_argument('--azure_datalake_account', type=str, default='')
-args = parser.parse_args()
-
-
-def prepare_templates():
-    local('mv /tmp/jupyter /home/{0}/test_templates'.format(args.os_user))
-
-def get_storage():
-    storages = {"aws": args.storage,
-                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
-                "gcp": args.storage}
-    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
-    if args.azure_datalake_account:
-        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
-        protocols['azure'] = 'adl'
-    return (storages[args.cloud], protocols[args.cloud])
-
-def prepare_ipynb(kernel_name, template_path, ipynb_name):
-    with open(template_path, 'r') as f:
-        text = f.read()
-    text = text.replace('WORKING_STORAGE', get_storage()[0])
-    text = text.replace('PROTOCOL_NAME', get_storage()[1])
-    text = text.replace('KERNEL_NAME', kernel_name)
-    with open('/home/{}/{}.ipynb'.format(args.os_user, ipynb_name), 'w') as f:
-        f.write(text)
-
-def run_ipynb(ipynb_name):
-    local('jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name))
-
-def run_pyspark():
-    interpreters = ['pyspark_local', 'pyspark_' + args.cluster_name]
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_pyspark.ipynb'.format(args.os_user),
-                      'preparation_pyspark')
-        run_ipynb('preparation_pyspark')
-        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_pyspark.ipynb'.format(args.os_user),
-                      'visualization_pyspark')
-        run_ipynb('visualization_pyspark')
-
-def run_spark():
-    interpreters = ['apache_toree_scala', 'toree_' + args.cluster_name]
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_spark.ipynb'.format(args.os_user),
-                      'preparation_spark')
-        run_ipynb('preparation_spark')
-
-def run_sparkr():
-    interpreters = ['ir', 'r_' + args.cluster_name]
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_sparkr.ipynb'.format(args.os_user),
-                      'preparation_sparkr')
-        run_ipynb('preparation_sparkr')
-        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_sparkr.ipynb'.format(args.os_user),
-                      'visualization_sparkr')
-        run_ipynb('visualization_sparkr')
-
-
-if __name__ == "__main__":
-    try:
-        prepare_templates()
-        run_pyspark()
-        run_spark()
-        run_sparkr()
-    except Exception as err:
-        print('Error!', str(err))
-        sys.exit(1)
-
-    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_rstudio/rstudio_tests.py b/integration-tests/examples/scenario_rstudio/rstudio_tests.py
deleted file mode 100644
index d9a1540..0000000
--- a/integration-tests/examples/scenario_rstudio/rstudio_tests.py
+++ /dev/null
@@ -1,95 +0,0 @@
-#!/usr/bin/python
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import os, sys, json
-from fabric.api import *
-import argparse
-
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--storage', type=str, default='')
-parser.add_argument('--cloud', type=str, default='')
-parser.add_argument('--os_user', type=str, default='')
-parser.add_argument('--cluster_name', type=str, default='')
-parser.add_argument('--azure_storage_account', type=str, default='')
-parser.add_argument('--azure_datalake_account', type=str, default='')
-args = parser.parse_args()
-
-
-def prepare_templates():
-    local('mv /tmp/rstudio /home/{0}/test_templates'.format(args.os_user))
-
-def get_storage():
-    storages = {"aws": args.storage,
-                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
-                "gcp": args.storage}
-    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
-    if args.azure_datalake_account:
-        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
-        protocols['azure'] = 'adl'
-    return (storages[args.cloud], protocols[args.cloud])
-
-def prepare_rscript(template_path, rscript_name, kernel='remote'):
-    with open(template_path, 'r') as f:
-        text = f.read()
-    text = text.replace('WORKING_STORAGE', get_storage()[0])
-    text = text.replace('PROTOCOL_NAME', get_storage()[1])
-    if kernel == 'remote':
-        if '-de-' in args.cluster_name:
-            text = text.replace('MASTER', 'master')
-        elif '-des-' in args.cluster_name:
-            text = text.replace('MASTER', 'master = "yarn"')
-    elif kernel == 'local':
-        text = text.replace('MASTER', 'master = "local[*]"')
-    with open('/home/{}/{}.r'.format(args.os_user, rscript_name), 'w') as f:
-        f.write(text)
-
-def enable_local_kernel():
-    local("sed -i 's/^master/#master/' /home/{0}/.Rprofile".format(args.os_user))
-    local('''sed -i "s/^/#/g" /home/{0}/.Renviron'''.format(args.os_user))
-    local('''sed -i "/\/opt\/spark\//s/#//g" /home/{0}/.Renviron'''.format(args.os_user))
-    local('rm -f metastore_db/db* derby.log')
-
-def run_rscript(rscript_name):
-    local('R < /home/{0}/{1}.r --no-save'.format(args.os_user, rscript_name))
-
-
-if __name__ == "__main__":
-    try:
-        prepare_templates()
-        # Running on remote kernel
-        prepare_rscript('/home/{}/test_templates/template_preparation.r'.format(args.os_user), 'preparation', 'remote')
-        run_rscript('preparation')
-        prepare_rscript('/home/{}/test_templates/template_visualization.r'.format(args.os_user), 'visualization', 'remote')
-        run_rscript('visualization')
-        # Running on local kernel
-        enable_local_kernel()
-        prepare_rscript('/home/{}/test_templates/template_preparation.r'.format(args.os_user), 'preparation', 'local')
-        prepare_rscript('/home/{}/test_templates/template_visualization.r'.format(args.os_user), 'visualization', 'local')
-        run_rscript('preparation')
-        run_rscript('visualization')
-    except Exception as err:
-        print('Error!', str(err))
-        sys.exit(1)
-
-    sys.exit(0)
diff --git a/integration-tests/examples/scenario_tensor/tensor_tests.py b/integration-tests/examples/scenario_tensor/tensor_tests.py
deleted file mode 100644
index 3bbb93b..0000000
--- a/integration-tests/examples/scenario_tensor/tensor_tests.py
+++ /dev/null
@@ -1,91 +0,0 @@
-#!/usr/bin/python
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import os, sys, json
-from fabric.api import *
-import argparse
-
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--storage', type=str, default='')
-parser.add_argument('--cloud', type=str, default='')
-parser.add_argument('--os_user', type=str, default='')
-parser.add_argument('--cluster_name', type=str, default='')
-parser.add_argument('--azure_storage_account', type=str, default='')
-parser.add_argument('--azure_datalake_account', type=str, default='')
-args = parser.parse_args()
-
-
-def prepare_templates():
-    try:
-        local('/bin/bash -c "source /etc/profile && wget http://files.fast.ai/data/dogscats.zip -O /tmp/dogscats.zip"')
-        local('unzip -q /tmp/dogscats.zip -d /tmp')
-        local('/bin/bash -c "mkdir -p /home/{0}/{1}"'.format(args.os_user, "{test,train}"))
-        local('mv /tmp/dogscats/test1/* /home/{0}/test'.format(args.os_user))
-        local('/bin/bash -c "mv /tmp/dogscats/valid/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user))
-        local('/bin/bash -c "mv /tmp/dogscats/train/{0}/* /home/{1}/train"'.format("{cats,dogs}", args.os_user))
-    except Exception as err:
-        print('Failed to download/unpack image dataset!', str(err))
-        sys.exit(1)
-    local('mkdir -p /home/{0}/logs'.format(args.os_user))
-    local('mv /tmp/tensor /home/{0}/test_templates'.format(args.os_user))
-
-def get_storage():
-    storages = {"aws": args.storage,
-                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
-                "gcp": args.storage}
-    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
-    if args.azure_datalake_account:
-        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
-        protocols['azure'] = 'adl'
-    return (storages[args.cloud], protocols[args.cloud])
-
-def prepare_ipynb(kernel_name, template_path, ipynb_name):
-    with open(template_path, 'r') as f:
-        text = f.read()
-    text = text.replace('KERNEL_NAME', kernel_name)
-    with open('/home/{}/{}.ipynb'.format(args.os_user, ipynb_name), 'w') as f:
-        f.write(text)
-
-def run_ipynb(ipynb_name):
-    local('export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/opt/cudnn/lib64:/usr/local/cuda/lib64; ' \
-            'jupyter nbconvert --ExecutePreprocessor.timeout=-1 --ExecutePreprocessor.startup_timeout=300 --execute /home/{}/{}.ipynb'.format(args.os_user, ipynb_name))
-
-def run_tensor():
-    interpreters = ['pyspark_local']
-    for i in interpreters:
-        prepare_ipynb(i, '/home/{}/test_templates/template_preparation_tensor.ipynb'.format(args.os_user), 'preparation_tensor')
-        run_ipynb('preparation_tensor')
-        prepare_ipynb(i, '/home/{}/test_templates/template_visualization_tensor.ipynb'.format(args.os_user), 'visualization_tensor')
-        run_ipynb('visualization_tensor')
-
-
-if __name__ == "__main__":
-    try:
-        prepare_templates()
-        run_tensor()
-    except Exception as err:
-        print('Error!', str(err))
-        sys.exit(1)
-
-    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py b/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
deleted file mode 100644
index 1c9f05c..0000000
--- a/integration-tests/examples/scenario_zeppelin/zeppelin_tests.py
+++ /dev/null
@@ -1,180 +0,0 @@
-#!/usr/bin/python
-
-# *****************************************************************************
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-# 
-#   http://www.apache.org/licenses/LICENSE-2.0
-# 
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-# ******************************************************************************
-
-import os, sys, json
-from fabric.api import *
-import argparse
-import requests
-
-
-parser = argparse.ArgumentParser()
-parser.add_argument('--storage', type=str, default='')
-parser.add_argument('--cloud', type=str, default='')
-parser.add_argument('--os_user', type=str, default='')
-parser.add_argument('--cluster_name', type=str, default='')
-parser.add_argument('--azure_storage_account', type=str, default='')
-parser.add_argument('--azure_datalake_account', type=str, default='')
-args = parser.parse_args()
-
-
-def prepare_templates():
-    local('mv /tmp/zeppelin /home/{0}/test_templates'.format(args.os_user))
-
-def get_storage():
-    storages = {"aws": args.storage,
-                "azure": "{0}@{1}.blob.core.windows.net".format(args.storage, args.azure_storage_account),
-                "gcp": args.storage}
-    protocols = {"aws": "s3a", "azure": "wasbs", "gcp": "gs"}
-    if args.azure_datalake_account:
-        storages['azure'] = "{0}.azuredatalakestore.net/{1}".format(args.azure_datalake_account, args.storage)
-        protocols['azure'] = 'adl'
-    return (storages[args.cloud], protocols[args.cloud])
-
-def get_note_status(note_id, notebook_ip):
-    running = False
-    local('sleep 5')
-    response = requests.get('http://{0}:8080/api/notebook/job/{1}'.format(notebook_ip, note_id))
-    status = json.loads(response.content)
-    for i in status.get('body'):
-        if i.get('status') == "RUNNING" or i.get('status') == "PENDING":
-            print('Notebook status: {}'.format(i.get('status')))
-            running = True
-        elif i.get('status') == "ERROR":
-            print('Error in notebook')
-            sys.exit(1)
-    if running:
-        local('sleep 5')
-        get_note_status(note_id, notebook_ip)
-    else:
-        return "OK"
-
-def import_note(note_path, notebook_ip):
-    headers = {'Accept': 'application/json', 'Content-Type': 'application/json', 'Expires': '0'}
-    response = requests.post('http://{0}:8080/api/notebook/import'.format(notebook_ip), data=open(note_path, 'rb'), headers=headers)
-    status = json.loads(response.content)
-    if status.get('status') == 'OK':
-        print('Imported notebook: {}'.format(note_path))
-        return status.get('body')
-    else:
-        print('Failed to import notebook')
-        sys.exit(1)
-
-def prepare_note(interpreter_name, template_path, note_name):
-    with open(template_path, 'r') as f:
-        text = f.read()
-    text = text.replace('INTERPRETER_NAME', interpreter_name)
-    text = text.replace('WORKING_STORAGE', get_storage()[0])
-    text = text.replace('PROTOCOL_NAME', get_storage()[1])
-    with open(note_name, 'w') as f:
-        f.write(text)
-
-def run_note(note_id, notebook_ip):
-    response = requests.post('http://{0}:8080/api/notebook/job/{1}'.format(notebook_ip, note_id))
-    status = json.loads(response.content)
-    if status.get('status') == 'OK':
-        get_note_status(note_id, notebook_ip)
-    else:
-        print('Failed to run notebook')
-        sys.exit(1)
-
-def remove_note(note_id, notebook_ip):
-    response = requests.delete('http://{0}:8080/api/notebook/{1}'.format(notebook_ip, note_id))
-    status = json.loads(response.content)
-    if status.get('status') == 'OK':
-        return "OK"
-    else:
-        sys.exit(1)
-
-def restart_interpreter(notebook_ip, interpreter):
-    response = requests.get('http://{0}:8080/api/interpreter/setting'.format(notebook_ip))
-    status = json.loads(response.content)
-    if status.get('status') == 'OK':
-        id = [i['id'] for i in status['body'] if i['name'] in interpreter][0]
-        response = requests.put('http://{0}:8080/api/interpreter/setting/restart/{1}'.format(notebook_ip, id))
-        status = json.loads(response.content)
-        if status.get('status') == 'OK':
-            local('sleep 5')
-            return "OK"
-        else:
-            print('Failed to restart interpreter')
-            sys.exit(1)
-    else:
-        print('Failed to get interpreter settings')
-        sys.exit(1)
-
-def run_pyspark():
-    interpreters = ['local_interpreter_python2.pyspark', args.cluster_name + "_py2.pyspark"]
-    for i in interpreters:
-        prepare_note(i, '/home/{}/test_templates/template_preparation_pyspark.json'.format(args.os_user),
-                     '/home/{}/preparation_pyspark.json'.format(args.os_user))
-        note_id = import_note('/home/{}/preparation_pyspark.json'.format(args.os_user), notebook_ip)
-        run_note(note_id, notebook_ip)
-        remove_note(note_id, notebook_ip)
-        prepare_note(i, '/home/{}/test_templates/template_visualization_pyspark.json'.format(args.os_user),
-                     '/home/{}/visualization_pyspark.json'.format(args.os_user))
-        note_id = import_note('/home/{}/visualization_pyspark.json'.format(args.os_user), notebook_ip)
-        run_note(note_id, notebook_ip)
-        remove_note(note_id, notebook_ip)
-        restart_interpreter(notebook_ip, i)
-
-def run_sparkr():
-    if os.path.exists('/opt/livy/'):
-        interpreters = ['local_interpreter_python2.sparkr', args.cluster_name + "_py2.sparkr"]
-    else:
-        interpreters = ['local_interpreter_python2.r', args.cluster_name + "_py2.r"]
-    for i in interpreters:
-        prepare_note(i, '/home/{}/test_templates/template_preparation_sparkr.json'.format(args.os_user),
-                     '/home/{}/preparation_sparkr.json'.format(args.os_user))
-        note_id = import_note('/home/{}/preparation_sparkr.json'.format(args.os_user), notebook_ip)
-        run_note(note_id, notebook_ip)
-        remove_note(note_id, notebook_ip)
-        prepare_note(i, '/home/{}/test_templates/template_visualization_sparkr.json'.format(args.os_user),
-                     '/home/{}/visualization_sparkr.json'.format(args.os_user))
-        note_id = import_note('/home/{}/visualization_sparkr.json'.format(args.os_user), notebook_ip)
-        run_note(note_id, notebook_ip)
-        remove_note(note_id, notebook_ip)
-        restart_interpreter(notebook_ip, i)
-
-def run_spark():
-    interpreters = ['local_interpreter_python2.spark', args.cluster_name + "_py2.spark"]
-    for i in interpreters:
-        prepare_note(i, '/home/{}/test_templates/template_preparation_spark.json'.format(args.os_user),
-                     '/home/{}/preparation_spark.json'.format(args.os_user))
-        note_id = import_note('/home/{}/preparation_spark.json'.format(args.os_user), notebook_ip)
-        run_note(note_id, notebook_ip)
-        remove_note(note_id, notebook_ip)
-        restart_interpreter(notebook_ip, i)
-
-
-if __name__ == "__main__":
-    try:
-        notebook_ip = local('hostname -I', capture=True)
-        prepare_templates()
-        run_pyspark()
-        run_sparkr()
-        run_spark()
-    except Exception as err:
-        print('Error!', str(err))
-        sys.exit(1)
-
-    sys.exit(0)
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/deeplearning/lib_groups.json b/integration-tests/examples/test_libs/deeplearning/lib_groups.json
deleted file mode 100644
index 0e8d040..0000000
--- a/integration-tests/examples/test_libs/deeplearning/lib_groups.json
+++ /dev/null
@@ -1,6 +0,0 @@
-[
-  "pip2",
-  "pip3",
-  "others",
-  "os_pkg"
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/deeplearning/lib_list.json b/integration-tests/examples/test_libs/deeplearning/lib_list.json
deleted file mode 100644
index 8e6d23d..0000000
--- a/integration-tests/examples/test_libs/deeplearning/lib_list.json
+++ /dev/null
@@ -1,18 +0,0 @@
-[
-  {
-    "group": "os_pkg",
-    "start_with": "py"
-  },
-  {
-    "group": "others",
-    "start_with": "gh"
-  },
-  {
-    "group": "pip2",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip3",
-    "start_with": "sp"
-  }
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/jupyter/lib_groups.json b/integration-tests/examples/test_libs/jupyter/lib_groups.json
deleted file mode 100644
index 821acb6..0000000
--- a/integration-tests/examples/test_libs/jupyter/lib_groups.json
+++ /dev/null
@@ -1,7 +0,0 @@
-[
-  "pip2",
-  "pip3",
-  "others",
-  "os_pkg",
-  "r_pkg"
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/jupyter/lib_list.json b/integration-tests/examples/test_libs/jupyter/lib_list.json
deleted file mode 100644
index 7986430..0000000
--- a/integration-tests/examples/test_libs/jupyter/lib_list.json
+++ /dev/null
@@ -1,22 +0,0 @@
-[
-  {
-    "group": "os_pkg",
-    "start_with": "py"
-  },
-  {
-    "group": "others",
-    "start_with": "gh"
-  },
-  {
-    "group": "r_pkg",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip2",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip3",
-    "start_with": "sp"
-  }
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/lib_groups.json b/integration-tests/examples/test_libs/lib_groups.json
deleted file mode 100644
index 9c4e5f0..0000000
--- a/integration-tests/examples/test_libs/lib_groups.json
+++ /dev/null
@@ -1,6 +0,0 @@
-[
-  "pip2",
-  "pip3",
-  "os_pkg",
-  "r_pkg"
-]
diff --git a/integration-tests/examples/test_libs/lib_list.json b/integration-tests/examples/test_libs/lib_list.json
deleted file mode 100644
index 18385fc..0000000
--- a/integration-tests/examples/test_libs/lib_list.json
+++ /dev/null
@@ -1,6 +0,0 @@
-[
-  {
-    "group": "os_pkg",
-    "start_with": "py"
-  }
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/rstudio/lib_groups.json b/integration-tests/examples/test_libs/rstudio/lib_groups.json
deleted file mode 100644
index 821acb6..0000000
--- a/integration-tests/examples/test_libs/rstudio/lib_groups.json
+++ /dev/null
@@ -1,7 +0,0 @@
-[
-  "pip2",
-  "pip3",
-  "others",
-  "os_pkg",
-  "r_pkg"
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/rstudio/lib_list.json b/integration-tests/examples/test_libs/rstudio/lib_list.json
deleted file mode 100644
index 7986430..0000000
--- a/integration-tests/examples/test_libs/rstudio/lib_list.json
+++ /dev/null
@@ -1,22 +0,0 @@
-[
-  {
-    "group": "os_pkg",
-    "start_with": "py"
-  },
-  {
-    "group": "others",
-    "start_with": "gh"
-  },
-  {
-    "group": "r_pkg",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip2",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip3",
-    "start_with": "sp"
-  }
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/tensor/lib_groups.json b/integration-tests/examples/test_libs/tensor/lib_groups.json
deleted file mode 100644
index 0e8d040..0000000
--- a/integration-tests/examples/test_libs/tensor/lib_groups.json
+++ /dev/null
@@ -1,6 +0,0 @@
-[
-  "pip2",
-  "pip3",
-  "others",
-  "os_pkg"
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/tensor/lib_list.json b/integration-tests/examples/test_libs/tensor/lib_list.json
deleted file mode 100644
index 8e6d23d..0000000
--- a/integration-tests/examples/test_libs/tensor/lib_list.json
+++ /dev/null
@@ -1,18 +0,0 @@
-[
-  {
-    "group": "os_pkg",
-    "start_with": "py"
-  },
-  {
-    "group": "others",
-    "start_with": "gh"
-  },
-  {
-    "group": "pip2",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip3",
-    "start_with": "sp"
-  }
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/zeppelin/lib_groups.json b/integration-tests/examples/test_libs/zeppelin/lib_groups.json
deleted file mode 100644
index 821acb6..0000000
--- a/integration-tests/examples/test_libs/zeppelin/lib_groups.json
+++ /dev/null
@@ -1,7 +0,0 @@
-[
-  "pip2",
-  "pip3",
-  "others",
-  "os_pkg",
-  "r_pkg"
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_libs/zeppelin/lib_list.json b/integration-tests/examples/test_libs/zeppelin/lib_list.json
deleted file mode 100644
index 7986430..0000000
--- a/integration-tests/examples/test_libs/zeppelin/lib_list.json
+++ /dev/null
@@ -1,22 +0,0 @@
-[
-  {
-    "group": "os_pkg",
-    "start_with": "py"
-  },
-  {
-    "group": "others",
-    "start_with": "gh"
-  },
-  {
-    "group": "r_pkg",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip2",
-    "start_with": "sp"
-  },
-  {
-    "group": "pip3",
-    "start_with": "sp"
-  }
-]
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/README.txt b/integration-tests/examples/test_templates/README.txt
deleted file mode 100644
index 1ee4d9f..0000000
--- a/integration-tests/examples/test_templates/README.txt
+++ /dev/null
@@ -1,2 +0,0 @@
-Copy those directories to S3 bucket from which these templates will be copied.
-The source bucket name is specified in config.properties file.
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/deeplearning/cat_gray.jpg b/integration-tests/examples/test_templates/deeplearning/cat_gray.jpg
deleted file mode 100644
index 43c5ce3..0000000
--- a/integration-tests/examples/test_templates/deeplearning/cat_gray.jpg
+++ /dev/null
Binary files differ
diff --git a/integration-tests/examples/test_templates/deeplearning/conv.prototxt b/integration-tests/examples/test_templates/deeplearning/conv.prototxt
deleted file mode 100644
index 0343891..0000000
--- a/integration-tests/examples/test_templates/deeplearning/conv.prototxt
+++ /dev/null
@@ -1,48 +0,0 @@
-# *****************************************************************************
-#
-#  Licensed to the Apache Software Foundation (ASF) under one
-#  or more contributor license agreements.  See the NOTICE file
-#  distributed with this work for additional information
-#  regarding copyright ownership.  The ASF licenses this file
-#  to you under the Apache License, Version 2.0 (the
-#  "License"); you may not use this file except in compliance
-#  with the License.  You may obtain a copy of the License at
-#
-#  http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing,
-#  software distributed under the License is distributed on an
-#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-#  KIND, either express or implied.  See the License for the
-#  specific language governing permissions and limitations
-#  under the License.
-#
-# ******************************************************************************
-
-# Simple single-layer network to showcase editing model parameters.
-name: "convolution"
-layer {
-  name: "data"
-  type: "Input"
-  top: "data"
-  input_param { shape: { dim: 1 dim: 1 dim: 100 dim: 100 } }
-}
-layer {
-  name: "conv"
-  type: "Convolution"
-  bottom: "data"
-  top: "conv"
-  convolution_param {
-    num_output: 3
-    kernel_size: 5
-    stride: 1
-    weight_filler {
-      type: "gaussian"
-      std: 0.01
-    }
-    bias_filler {
-      type: "constant"
-      value: 0
-    }
-  }
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_caffe.ipynb b/integration-tests/examples/test_templates/deeplearning/template_caffe.ipynb
deleted file mode 100644
index 11457fc..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_caffe.ipynb
+++ /dev/null
@@ -1,99 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt\n",
-    "%matplotlib inline\n",
-    "\n",
-    "# Make sure that caffe is on the python path:\n",
-    "caffe_root = './'  # this file is expected to be in {caffe_root}/examples\n",
-    "import sys\n",
-    "sys.path.insert(0, caffe_root + 'python')\n",
-    "\n",
-    "import caffe\n",
-    "\n",
-    "# configure plotting\n",
-    "plt.rcParams['figure.figsize'] = (10, 10)\n",
-    "plt.rcParams['image.interpolation'] = 'nearest'\n",
-    "plt.rcParams['image.cmap'] = 'gray'"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Load the net, list its data and params, and filter an example image.\n",
-    "caffe.set_mode_gpu()\n",
-    "net = caffe.Net('test_templates/conv.prototxt', caffe.TEST)\n",
-    "print(\"blobs {}\\nparams {}\".format(net.blobs.keys(), net.params.keys()))\n",
-    "\n",
-    "# load image and prepare as a single input batch for Caffe\n",
-    "im = np.array(caffe.io.load_image('test_templates/cat_gray.jpg', color=False)).squeeze()\n",
-    "plt.title(\"original image\")\n",
-    "plt.imshow(im)\n",
-    "plt.axis('off')\n",
-    "\n",
-    "im_input = im[np.newaxis, np.newaxis, :, :]\n",
-    "net.blobs['data'].reshape(*im_input.shape)\n",
-    "net.blobs['data'].data[...] = im_input"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# helper show filter outputs\n",
-    "def show_filters(net):\n",
-    "    net.forward()\n",
-    "    plt.figure()\n",
-    "    filt_min, filt_max = net.blobs['conv'].data.min(), net.blobs['conv'].data.max()\n",
-    "    for i in range(3):\n",
-    "        plt.subplot(1,4,i+2)\n",
-    "        plt.title(\"filter #{} output\".format(i))\n",
-    "        plt.imshow(net.blobs['conv'].data[0, i], vmin=filt_min, vmax=filt_max)\n",
-    "        plt.tight_layout()\n",
-    "        plt.axis('off')\n",
-    "\n",
-    "# filter the image with initial \n",
-    "show_filters(net)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_caffe2.ipynb b/integration-tests/examples/test_templates/deeplearning/template_caffe2.ipynb
deleted file mode 100644
index f771e33..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_caffe2.ipynb
+++ /dev/null
@@ -1,619 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Caffe2 Basic Concepts - Operators & Nets\n",
-    "\n",
-    "In this tutorial we will go through a set of Caffe2 basics: the basic concepts including how operators and nets are being written.\n",
-    "\n",
-    "First, let's import caffe2. `core` and `workspace` are usually the two that you need most. If you want to manipulate protocol buffers generated by caffe2, you probably also want to import `caffe2_pb2` from `caffe2.proto`."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# We'll also import a few standard python libraries\n",
-    "from matplotlib import pyplot\n",
-    "import numpy as np\n",
-    "import time\n",
-    "\n",
-    "# These are the droids you are looking for.\n",
-    "from caffe2.python import core, workspace\n",
-    "from caffe2.proto import caffe2_pb2\n",
-    "\n",
-    "# Let's show all plots inline.\n",
-    "%matplotlib inline"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "You might see a warning saying that caffe2 does not have GPU support. That means you are running a CPU-only build. Don't be alarmed - anything CPU is still runnable without problem."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "## Workspaces\n",
-    "\n",
-    "Let's cover workspaces first, where all the data reside.\n",
-    "\n",
-    "If you are familiar with Matlab, workspace consists of blobs you create and store in memory. For now, consider a blob to be a N-dimensional Tensor similar to numpy's ndarray, but is contiguous. Down the road, we will show you that a blob is actually a typed pointer that can store any type of C++ objects, but Tensor is the most common type stored in a blob. Let's show what the interface looks like.\n",
-    "\n",
-    "`Blobs()` prints out all existing blobs in the workspace. \n",
-    "`HasBlob()` queries if a blob exists in the workspace. For now, we don't have anything yet."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
-    "print(\"Workspace has blob 'X'? {}\".format(workspace.HasBlob(\"X\")))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "We can feed blobs into the workspace using `FeedBlob()`."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "X = np.random.randn(2, 3).astype(np.float32)\n",
-    "print(\"Generated X from numpy:\\n{}\".format(X))\n",
-    "workspace.FeedBlob(\"X\", X)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Now, let's take a look what blobs there are in the workspace."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
-    "print(\"Workspace has blob 'X'? {}\".format(workspace.HasBlob(\"X\")))\n",
-    "print(\"Fetched X:\\n{}\".format(workspace.FetchBlob(\"X\")))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Let's verify that the arrays are equal."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "np.testing.assert_array_equal(X, workspace.FetchBlob(\"X\"))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Also, if you are trying to access a blob that does not exist, an error will be thrown:"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "try:\n",
-    "    workspace.FetchBlob(\"invincible_pink_unicorn\")\n",
-    "except RuntimeError as err:\n",
-    "    print(err)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "One thing that you might not use immediately: you can have multiple workspaces in Python using different names, and switch between them. Blobs in different workspaces are separate from each other. You can query the current workspace using `CurrentWorkspace`. Let's try switching the workspace by name (gutentag) and creating a new one if it doesn't exist."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "print(\"Current workspace: {}\".format(workspace.CurrentWorkspace()))\n",
-    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
-    "\n",
-    "# Switch the workspace. The second argument \"True\" means creating \n",
-    "# the workspace if it is missing.\n",
-    "workspace.SwitchWorkspace(\"gutentag\", True)\n",
-    "\n",
-    "# Let's print the current workspace. Note that there is nothing in the\n",
-    "# workspace yet.\n",
-    "print(\"Current workspace: {}\".format(workspace.CurrentWorkspace()))\n",
-    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Let's switch back to the default workspace."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "workspace.SwitchWorkspace(\"default\")\n",
-    "print(\"Current workspace: {}\".format(workspace.CurrentWorkspace()))\n",
-    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Finally, `ResetWorkspace()` clears anything that is in the current workspace."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "workspace.ResetWorkspace()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Operators\n",
-    "\n",
-    "Operators in Caffe2 are kind of like functions. From the C++ side, they all derive from a common interface, and are registered by type, so that we can call different operators during runtime. The interface of operators is defined in `caffe2/proto/caffe2.proto`. Basically, it takes in a bunch of inputs, and produces a bunch of outputs.\n",
-    "\n",
-    "Remember, when we say \"create an operator\" in Caffe2 Python, nothing gets run yet. All it does is to create the protocol buffere that specifies what the operator should be. At a later time it will be sent to the C++ backend for execution. If you are not familiar with protobuf, it is a json-like serialization tool for structured data. Find more about protocol buffers [here](https://developers.google.com/protocol-buffers/).\n",
-    "\n",
-    "Let's see an actual example."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Create an operator.\n",
-    "op = core.CreateOperator(\n",
-    "    \"Relu\", # The type of operator that we want to run\n",
-    "    [\"X\"], # A list of input blobs by their names\n",
-    "    [\"Y\"], # A list of output blobs by their names\n",
-    ")\n",
-    "# and we are done!"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "As we mentioned, the created op is actually a protobuf object. Let's show the content."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "print(\"Type of the created op is: {}\".format(type(op)))\n",
-    "print(\"Content:\\n\")\n",
-    "print(str(op))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "OK, let's run the operator. We first feed in the input X to the workspace. \n",
-    "Then the simplest way to run an operator is to do `workspace.RunOperatorOnce(operator)`"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "workspace.FeedBlob(\"X\", np.random.randn(2, 3).astype(np.float32))\n",
-    "workspace.RunOperatorOnce(op)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "After execution, let's see if the operator is doing the right thing, which is our neural network's activation function ([Relu](https://en.wikipedia.org/wiki/Rectifier_(neural_networks))) in this case."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "print(\"Current blobs in the workspace: {}\\n\".format(workspace.Blobs()))\n",
-    "print(\"X:\\n{}\\n\".format(workspace.FetchBlob(\"X\")))\n",
-    "print(\"Y:\\n{}\\n\".format(workspace.FetchBlob(\"Y\")))\n",
-    "print(\"Expected:\\n{}\\n\".format(np.maximum(workspace.FetchBlob(\"X\"), 0)))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "This is working if your Expected output matches your Y output in this example.\n",
-    "\n",
-    "Operators also take optional arguments if needed. They are specified as key-value pairs. Let's take a look at one simple example, which takes a tensor and fills it with Gaussian random variables."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "op = core.CreateOperator(\n",
-    "    \"GaussianFill\",\n",
-    "    [], # GaussianFill does not need any parameters.\n",
-    "    [\"Z\"],\n",
-    "    shape=[100, 100], # shape argument as a list of ints.\n",
-    "    mean=1.0,  # mean as a single float\n",
-    "    std=1.0, # std as a single float\n",
-    ")\n",
-    "print(\"Content of op:\\n\")\n",
-    "print(str(op))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Let's run it and see if things are as intended."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "workspace.RunOperatorOnce(op)\n",
-    "temp = workspace.FetchBlob(\"Z\")\n",
-    "pyplot.hist(temp.flatten(), bins=50)\n",
-    "pyplot.title(\"Distribution of Z\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "If you see a bell shaped curve then it worked!"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Nets\n",
-    "\n",
-    "Nets are essentially computation graphs. We keep the name `Net` for backward consistency (and also to pay tribute to neural nets). A Net is composed of multiple operators just like a program written as a sequence of commands. Let's take a look.\n",
-    "\n",
-    "When we talk about nets, we will also talk about BlobReference, which is an object that wraps around a string so we can do easy chaining of operators.\n",
-    "\n",
-    "Let's create a network that is essentially the equivalent of the following python math:\n",
-    "```\n",
-    "X = np.random.randn(2, 3)\n",
-    "W = np.random.randn(5, 3)\n",
-    "b = np.ones(5)\n",
-    "Y = X * W^T + b\n",
-    "```\n",
-    "We'll show the progress step by step. Caffe2's `core.Net` is a wrapper class around a NetDef protocol buffer."
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "When creating a network, its underlying protocol buffer is essentially empty other than the network name. Let's create the net and then show the proto content."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "net = core.Net(\"my_first_net\")\n",
-    "print(\"Current network proto:\\n\\n{}\".format(net.Proto()))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Let's create a blob called X, and use GaussianFill to fill it with some random data."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "X = net.GaussianFill([], [\"X\"], mean=0.0, std=1.0, shape=[2, 3], run_once=0)\n",
-    "print(\"New network proto:\\n\\n{}\".format(net.Proto()))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "You might have observed a few differences from the earlier `core.CreateOperator` call. Basically, when we have a net, you can direct create an operator *and* add it to the net at the same time using Python tricks: essentially, if you call `net.SomeOp` where SomeOp is a registered type string of an operator, this essentially gets translated to\n",
-    "```\n",
-    "op = core.CreateOperator(\"SomeOp\", ...)\n",
-    "net.Proto().op.append(op)\n",
-    "```\n",
-    "\n",
-    "Also, you might be wondering what X is. X is a `BlobReference` which basically records two things:\n",
-    "- what its name is. You can access the name by str(X)\n",
-    "- which net it gets created from. It is recorded by an internal variable `_from_net`, but most likely\n",
-    "you won't need that.\n",
-    "\n",
-    "Let's verify it. Also, remember, we are not actually running anything yet, so X contains nothing but a symbol. Don't expect to get any numerical values out of it right now :)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "print(\"Type of X is: {}\".format(type(X)))\n",
-    "print(\"The blob name is: {}\".format(str(X)))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Let's continue to create W and b."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "W = net.GaussianFill([], [\"W\"], mean=0.0, std=1.0, shape=[5, 3], run_once=0)\n",
-    "b = net.ConstantFill([], [\"b\"], shape=[5,], value=1.0, run_once=0)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Now, one simple code sugar: since the BlobReference objects know what net it is generated from, in addition to creating operators from net, you can also create operators from BlobReferences. Let's create the FC operator in this way."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "Y = X.FC([W, b], [\"Y\"])"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Under the hood, `X.FC(...)` simply delegates to `net.FC` by inserting `X` as the first input of the corresponding operator, so what we did above is equivalent to\n",
-    "```\n",
-    "Y = net.FC([X, W, b], [\"Y\"])\n",
-    "```\n",
-    "\n",
-    "Let's take a look at the current network."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "print(\"Current network proto:\\n\\n{}\".format(net.Proto()))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Too verbose huh? Let's try to visualize it as a graph. Caffe2 ships with a very minimal graph visualization tool for this purpose. Let's show that in ipython."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from caffe2.python import net_drawer\n",
-    "from IPython import display\n",
-    "graph = net_drawer.GetPydotGraph(net, rankdir=\"LR\")\n",
-    "display.Image(graph.create_png(), width=800)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "So we have defined a `Net`, but nothing gets executed yet. Remember that the net above is essentially a protobuf that holds the definition of the network. When we actually want to run the network, what happens under the hood is:\n",
-    "- Instantiate a C++ net object from the protobuf;\n",
-    "- Call the instantiated net's Run() function.\n",
-    "\n",
-    "Before we do anything, we should clear any earlier workspace variables with `ResetWorkspace()`.\n",
-    "\n",
-    "Then there are two ways to run a net from Python. We will do the first option in the example below.\n",
-    "\n",
-    "1. Using `workspace.RunNetOnce()`, which instantiates, runs and immediately destructs the network. \n",
-    "2. A little bit more complex and involves two steps: \n",
-    "    (a) call `workspace.CreateNet()` to create the C++ net object owned by the workspace, and\n",
-    "    (b) use `workspace.RunNet()` by passing the name of the network to it.\n",
-    "    \n"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "workspace.ResetWorkspace()\n",
-    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
-    "workspace.RunNetOnce(net)\n",
-    "print(\"Blobs in the workspace after execution: {}\".format(workspace.Blobs()))\n",
-    "# Let's dump the contents of the blobs\n",
-    "for name in workspace.Blobs():\n",
-    "    print(\"{}:\\n{}\".format(name, workspace.FetchBlob(name)))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "Now let's try the second way to create the net, and run it. First clear the variables with `ResetWorkspace()`, create the net with the workspace's net object you created earlier `CreateNet(net_object)`, and then run the net by name with `RunNet(net_name)`."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "workspace.ResetWorkspace()\n",
-    "print(\"Current blobs in the workspace: {}\".format(workspace.Blobs()))\n",
-    "workspace.CreateNet(net)\n",
-    "workspace.RunNet(net.Proto().name)\n",
-    "print(\"Blobs in the workspace after execution: {}\".format(workspace.Blobs()))\n",
-    "for name in workspace.Blobs():\n",
-    "    print(\"{}:\\n{}\".format(name, workspace.FetchBlob(name)))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "There are a few differences between `RunNetOnce` and `RunNet`, but probably the main difference is the computation time overhead. Since `RunNetOnce` involves serializing the protobuf to pass between Python and C and instantiating the network, it may take longer to run. Let's see in this case what the overhead is."
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# It seems that %timeit magic does not work well with\n",
-    "# C++ extensions so we'll basically do for loops\n",
-    "start = time.time()\n",
-    "for i in range(1000):\n",
-    "    workspace.RunNetOnce(net)\n",
-    "end = time.time()\n",
-    "print('Run time per RunNetOnce: {}'.format((end - start) / 1000))\n",
-    "\n",
-    "start = time.time()\n",
-    "for i in range(1000):\n",
-    "    workspace.RunNet(net.Proto().name)\n",
-    "end = time.time()\n",
-    "print('Run time per RunNet: {}'.format((end - start) / 1000))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {
-    "collapsed": true
-   },
-   "source": [
-    "OK, so above are a few key components if you would like to use Caffe2 from the python side. We are going to add more to the tutorial as we find more needs. For now, kindly check out the rest of the tutorials!"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_cntk.ipynb b/integration-tests/examples/test_templates/deeplearning/template_cntk.ipynb
deleted file mode 100644
index 44b7d1b..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_cntk.ipynb
+++ /dev/null
@@ -1,78 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from IPython.display import Image\n",
-    "# Figure 1\n",
-    "Image(url=\"https://www.cntk.ai/jup/cancer_data_plot.jpg\", width=400, height=400)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Figure 2\n",
-    "Image(url= \"https://www.cntk.ai/jup/cancer_classify_plot.jpg\", width=400, height=400)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Figure 3\n",
-    "Image(url= \"https://www.cntk.ai/jup/logistic_neuron.jpg\", width=300, height=200)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# Import the relevant components\n",
-    "from __future__ import print_function\n",
-    "import numpy as np\n",
-    "import sys\n",
-    "import os\n",
-    "from cntk import *\n",
-    "\n",
-    "# Select the right target device when this notebook is being tested:\n",
-    "if 'TEST_DEVICE' in os.environ:\n",
-    "    import cntk\n",
-    "    if os.environ['TEST_DEVICE'] == 'cpu':\n",
-    "        cntk.device.try_set_default_device(cntk.device.cpu())\n",
-    "    else:\n",
-    "        cntk.device.try_set_default_device(cntk.device.gpu(0))"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_keras.ipynb b/integration-tests/examples/test_templates/deeplearning/template_keras.ipynb
deleted file mode 100644
index 683e1af..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_keras.ipynb
+++ /dev/null
@@ -1,64 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "from keras.layers import Input, Embedding, LSTM, Dense\n",
-    "from keras.models import Model\n",
-    "\n",
-    "# Headline input: meant to receive sequences of 100 integers, between 1 and 10000.\n",
-    "# Note that we can name any layer by passing it a \"name\" argument.\n",
-    "main_input = Input(shape=(100,), dtype='int32', name='main_input')\n",
-    "\n",
-    "# This embedding layer will encode the input sequence\n",
-    "# into a sequence of dense 512-dimensional vectors.\n",
-    "x = Embedding(output_dim=512, input_dim=10000, input_length=100)(main_input)\n",
-    "\n",
-    "# A LSTM will transform the vector sequence into a single vector,\n",
-    "# containing information about the entire sequence\n",
-    "lstm_out = LSTM(32)(x)\n",
-    "auxiliary_output = Dense(1, activation='sigmoid', name='aux_output')(lstm_out)\n",
-    "auxiliary_input = Input(shape=(5,), name='aux_input')\n",
-    "\n",
-    "# We stack a deep densely-connected network on top\n",
-    "x = Dense(64, activation='relu')(x)\n",
-    "x = Dense(64, activation='relu')(x)\n",
-    "x = Dense(64, activation='relu')(x)\n",
-    "\n",
-    "# And finally we add the main logistic regression layer\n",
-    "main_output = Dense(1, activation='sigmoid', name='main_output')(x)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_mxnet.ipynb b/integration-tests/examples/test_templates/deeplearning/template_mxnet.ipynb
deleted file mode 100644
index 5208ce3..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_mxnet.ipynb
+++ /dev/null
@@ -1,108 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import mxnet as mx\n",
-    "a = mx.sym.Variable('a')\n",
-    "b = mx.sym.Variable('b')\n",
-    "c = a + b\n",
-    "(a, b, c)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# elemental wise times\n",
-    "d = a * b  \n",
-    "# matrix multiplication\n",
-    "e = mx.sym.dot(a, b)   \n",
-    "# reshape\n",
-    "f = mx.sym.Reshape(d+e, shape=(1,4))  \n",
-    "# broadcast\n",
-    "g = mx.sym.broadcast_to(f, shape=(2,4))  \n",
-    "mx.viz.plot_network(symbol=g)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "%matplotlib inline\n",
-    "from __future__ import print_function\n",
-    "import os\n",
-    "import time\n",
-    "# set the number of threads you want to use before importing mxnet\n",
-    "os.environ['MXNET_CPU_WORKER_NTHREADS'] = '4'\n",
-    "import mxnet as mx\n",
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# download example images\n",
-    "proxy = os.popen('cat /etc/profile | grep https_proxy | cut -f2 -d\"=\"').read()[:-1]\n",
-    "os.popen('wget -e use_proxy=yes -e http_proxy={} http://data.mxnet.io/data/test_images.tar.gz'.format(proxy)).read()\n",
-    "os.popen('tar -xf test_images.tar.gz').read()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "# opencv\n",
-    "import cv2\n",
-    "N = 1000\n",
-    "tic = time.time()\n",
-    "for i in range(N):\n",
-    "    img = cv2.imread('test_images/ILSVRC2012_val_00000001.JPEG', flags=1)\n",
-    "    img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)\n",
-    "print(N/(time.time()-tic), 'images decoded per second with opencv')\n",
-    "plt.imshow(img); plt.show()"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_preparation_tensor.ipynb b/integration-tests/examples/test_templates/deeplearning/template_preparation_tensor.ipynb
deleted file mode 100644
index 0396cb3..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_preparation_tensor.ipynb
+++ /dev/null
@@ -1,285 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import os, cv2, random\n",
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt\n",
-    "%matplotlib inline \n",
-    "from keras.models import Sequential, load_model\n",
-    "from keras.layers import Dropout, Flatten, Convolution2D, MaxPooling2D, Dense, Activation\n",
-    "from keras.optimizers import Adam\n",
-    "from keras.callbacks import Callback, EarlyStopping\n",
-    "from keras.callbacks import BaseLogger, TensorBoard"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Constants definition"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "TRAIN_DIR = '/home/dlab-user/train/'\n",
-    "TEST_DIR = '/home/dlab-user/test/'\n",
-    "ROWS = 128\n",
-    "COLS = 128\n",
-    "CHANNELS = 3\n",
-    "TRAIN_IMAGES_COUNT = 1000\n",
-    "PATH_TO_LOGS = '/home/dlab-user/logs'"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Reading and adjusting images for training"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "all_images = [TRAIN_DIR+i for i in os.listdir(TRAIN_DIR)[:TRAIN_IMAGES_COUNT]]\n",
-    "test_images =  [TEST_DIR+i for i in os.listdir(TEST_DIR)]\n",
-    "random.shuffle(all_images)\n",
-    "test_coeff = int(len (all_images) * .9)\n",
-    "\n",
-    "train_images, test_images = all_images[:test_coeff], all_images[test_coeff:]\n",
-    "\n",
-    "def read_image(file_path):\n",
-    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
-    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
-    "\n",
-    "def prepare_data(images):\n",
-    "    count = len(images)\n",
-    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
-    "\n",
-    "    for i, image_file in enumerate(images):\n",
-    "        image = read_image(image_file)\n",
-    "        data[i] = image#.T\n",
-    "    return data\n",
-    "\n",
-    "train = prepare_data(train_images)\n",
-    "test = prepare_data(test_images)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Image counts"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "print(\"Train shape: {}\".format(train.shape))\n",
-    "print(\"Test shape: {}\".format(test.shape))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Assigning labels to training images"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "labels = []\n",
-    "for i in train_images:\n",
-    "    if 'dog' in i.split(\"/\")[-1] :\n",
-    "        labels.append(1)\n",
-    "    else:\n",
-    "        labels.append(0)\n",
-    "        \n",
-    "labels_test = []\n",
-    "for i in test_images:\n",
-    "    if 'dog' in i.split(\"/\")[-1] :\n",
-    "        labels_test.append(1)\n",
-    "    else:\n",
-    "        labels_test.append(0)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Building a convnet"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "optimizer = Adam(lr=1e-6)\n",
-    "objective = 'binary_crossentropy'\n",
-    "\n",
-    "def build_model():\n",
-    "    \n",
-    "    model = Sequential()\n",
-    "\n",
-    "    model.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(ROWS, COLS, 3), activation='relu'))\n",
-    "    model.add(Convolution2D(32, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
-    "\n",
-    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
-    "    \n",
-    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
-    "\n",
-    "    model.add(Flatten())\n",
-    "    model.add(Dense(256, activation='relu'))\n",
-    "    model.add(Dropout(0.5))\n",
-    "    \n",
-    "    model.add(Dense(1))\n",
-    "    model.add(Activation('sigmoid'))\n",
-    "    \n",
-    "    model.compile(loss=objective, optimizer=optimizer, metrics=['accuracy'])\n",
-    "    return model\n",
-    "\n",
-    "\n",
-    "model = build_model()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Training the model"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "This block takes about 2.5-3 hours to execute if training on whole dataset of 22500 images"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "nb_epoch = 10\n",
-    "batch_size = 16\n",
-    "\n",
-    "class LossHistory(Callback):\n",
-    "    def on_train_begin(self, logs={}):\n",
-    "        self.losses = []\n",
-    "        self.val_losses = []\n",
-    "        \n",
-    "    def on_epoch_end(self, batch, logs={}):\n",
-    "        self.losses.append(logs.get('loss'))\n",
-    "        self.val_losses.append(logs.get('val_loss'))\n",
-    "\n",
-    "early_stopping = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto')        \n",
-    "        \n",
-    "def train_and_test_model():\n",
-    "    history = LossHistory()\n",
-    "    tensorboard = TensorBoard(log_dir=PATH_TO_LOGS)\n",
-    "    model.fit(train, labels, batch_size=batch_size, nb_epoch=nb_epoch,\n",
-    "              validation_split=0.25, verbose=2, shuffle=True, callbacks=[history, early_stopping, tensorboard])\n",
-    "    \n",
-    "\n",
-    "    predictions = model.predict(test, verbose=2)\n",
-    "    return predictions, history\n",
-    "\n",
-    "predictions, history = train_and_test_model()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Saving the model and weights"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "path_to_model = '/home/dlab-user/model_1000.json'\n",
-    "path_to_weights = '/home/dlab-user/weigths_1000.h5'\n",
-    "\n",
-    "model.save(path_to_model)\n",
-    "model.save_weights(path_to_weights)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Plotting learning curves"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "loss = history.losses\n",
-    "val_loss = history.val_losses\n",
-    "\n",
-    "plt.xlabel('Epochs')\n",
-    "plt.ylabel('Loss')\n",
-    "plt.title('VGG-16 Loss Trend')\n",
-    "plt.plot(loss, 'blue', label='Training Loss')\n",
-    "plt.plot(val_loss, 'green', label='Validation Loss')\n",
-    "plt.xticks(range(0,len(loss))[0::2])\n",
-    "plt.legend()\n",
-    "plt.show()"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_theano.ipynb b/integration-tests/examples/test_templates/deeplearning/template_theano.ipynb
deleted file mode 100644
index 815e6fe..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_theano.ipynb
+++ /dev/null
@@ -1,45 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import theano\n",
-    "from theano import tensor\n",
-    "# declare two symbolic floating-point scalars\n",
-    "a = tensor.dscalar()\n",
-    "b = tensor.dscalar()\n",
-    "# create a simple expression\n",
-    "c = a + b\n",
-    "# convert the expression into a callable object that takes (a,b)\n",
-    "# values as input and computes a value for c\n",
-    "f = theano.function([a,b], c)\n",
-    "# bind 1.5 to 'a', 2.5 to 'b', and evaluate 'c'\n",
-    "assert 4.0 == f(1.5, 2.5)"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_torch.ipynb b/integration-tests/examples/test_templates/deeplearning/template_torch.ipynb
deleted file mode 100644
index 0c476d5..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_torch.ipynb
+++ /dev/null
@@ -1,73 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "itorch.image({image.lena(), image.lena(), image.lena()})"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "itorch.html('<p><b>Hi there!</b> this is arbitrary HTML</p>')\n",
-    "window_id = itorch.html('<p>This text will be replaced in 2 seconds</p>')\n",
-    "os.execute('sleep 2')\n",
-    "itorch.html('<p>magic!</p>', window_id)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "Plot = require 'itorch.Plot'\n",
-    "x1 = torch.randn(40):mul(100)\n",
-    "y1 = torch.randn(40):mul(100)\n",
-    "x2 = torch.randn(40):mul(100)\n",
-    "y2 = torch.randn(40):mul(100)\n",
-    "x3 = torch.randn(40):mul(200)\n",
-    "y3 = torch.randn(40):mul(200)\n",
-    "\n",
-    "\n",
-    "-- scatter plots\n",
-    "plot = Plot():circle(x1, y1, 'red', 'hi'):circle(x2, y2, 'blue', 'bye'):draw()\n",
-    "plot:circle(x3,y3,'green', 'yolo'):redraw()\n",
-    "plot:title('Scatter Plot Demo'):redraw()\n",
-    "plot:xaxis('length'):yaxis('width'):redraw()\n",
-    "plot:legend(true)\n",
-    "plot:redraw()\n",
-    "-- print(plot:toHTML())\n",
-    "plot:save('out.html')"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "plot = Plot():line(x1, y1,'red','example'):legend(true):title('Line Plot Demo'):draw()"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "iTorch",
-   "language": "lua",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "name": "lua",
-   "version": "5.1"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/deeplearning/template_visualization_tensor.ipynb b/integration-tests/examples/test_templates/deeplearning/template_visualization_tensor.ipynb
deleted file mode 100644
index 92eba38..0000000
--- a/integration-tests/examples/test_templates/deeplearning/template_visualization_tensor.ipynb
+++ /dev/null
@@ -1,139 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import os, cv2\n",
-    "from keras.models import load_model\n",
-    "import matplotlib.pyplot as plt\n",
-    "%matplotlib inline\n",
-    "import numpy as np\n",
-    "import tensorflow as tf"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Pathes to model and weights"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "path_to_model = '/home/dlab-user/model_1000.json'\n",
-    "path_to_weights = '/home/dlab-user/weigths_1000.h5'"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Loading test images"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "ROWS = 128\n",
-    "COLS = 128\n",
-    "CHANNELS = 3\n",
-    "TEST_DIR = '/home/dlab-user/test/'\n",
-    "all_image_names = os.listdir(TEST_DIR)\n",
-    "all_image_names.sort()\n",
-    "test_images =  [TEST_DIR+i for i in all_image_names[6:11] + all_image_names[19:32] + all_image_names[33:34]]\n",
-    "\n",
-    "def read_image(file_path):\n",
-    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
-    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
-    "\n",
-    "def prep_data(images):\n",
-    "    count = len(images)\n",
-    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
-    "\n",
-    "    for i, image_file in enumerate(images):\n",
-    "        image = read_image(image_file)\n",
-    "        data[i] = image\n",
-    "    return data\n",
-    "test = prep_data(test_images)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Loading the model and making predictions on test data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "with tf.device('/cpu:0'):\n",
-    "    model = load_model(path_to_model)\n",
-    "    model.load_weights(path_to_weights)\n",
-    "    predictions = model.predict(test, verbose=2)     "
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Visualizing results (rendering can take about a minute)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "plt.figure(figsize=(16, 12))\n",
-    "for i in range(0, 12):\n",
-    "    plt.subplot(3, 4, i+1)\n",
-    "    if predictions[i, 0] >= 0.5: \n",
-    "        plt.title('{:.2%} Dog'.format(predictions[i][0]))\n",
-    "    else: \n",
-    "        plt.title('{:.2%} Cat'.format(1-predictions[i][0]))\n",
-    "        \n",
-    "    plt.imshow(cv2.cvtColor(test[i], cv2.COLOR_BGR2RGB))\n",
-    "    plt.axis('off')\n",
-    "plt.show()"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/jupyter/template_preparation_pyspark.ipynb b/integration-tests/examples/test_templates/jupyter/template_preparation_pyspark.ipynb
deleted file mode 100644
index d0360a1..0000000
--- a/integration-tests/examples/test_templates/jupyter/template_preparation_pyspark.ipynb
+++ /dev/null
@@ -1,198 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Flights data preparation"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "from pyspark.sql import SQLContext\n",
-    "from pyspark.sql import DataFrame\n",
-    "from pyspark.sql import Row\n",
-    "from pyspark.sql.types import *\n",
-    "import pandas as pd\n",
-    "import StringIO\n",
-    "import matplotlib.pyplot as plt\n",
-    "hc = sc._jsc.hadoopConfiguration()\n",
-    "hc.set(\"hive.execution.engine\", \"mr\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Function to parse CSV"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import csv\n",
-    "\n",
-    "def parseCsv(csvStr):\n",
-    "    f = StringIO.StringIO(csvStr)\n",
-    "    reader = csv.reader(f, delimiter=',')\n",
-    "    row = reader.next()\n",
-    "    return row\n",
-    "\n",
-    "scsv = '\"02Q\",\"Titan Airways\"'\n",
-    "row = parseCsv(scsv)\n",
-    "print row[0]\n",
-    "print row[1]\n",
-    "\n",
-    "working_storage = 'WORKING_STORAGE'\n",
-    "output_directory = 'jupyter/py2'\n",
-    "protocol_name = 'PROTOCOL_NAME://'"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Parse and convert Carrier data to parquet"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "carriersHeader = 'Code,Description'\n",
-    "carriersText = sc.textFile(protocol_name + working_storage + \"/jupyter_dataset/carriers.csv\").filter(lambda x: x != carriersHeader)\n",
-    "carriers = carriersText.map(lambda s: parseCsv(s)) \\\n",
-    "    .map(lambda s: Row(code=s[0], description=s[1])).cache().toDF()\n",
-    "carriers.write.mode(\"overwrite\").parquet(protocol_name + working_storage + \"/\" + output_directory + \"/carriers\")    \n",
-    "sqlContext.registerDataFrameAsTable(carriers, \"carriers\")\n",
-    "carriers.limit(20).toPandas()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Parse and convert to parquet Airport data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "airportsHeader= '\"iata\",\"airport\",\"city\",\"state\",\"country\",\"lat\",\"long\"'\n",
-    "airports = sc.textFile(protocol_name + working_storage + \"/jupyter_dataset/airports.csv\") \\\n",
-    "    .filter(lambda x: x != airportsHeader) \\\n",
-    "    .map(lambda s: parseCsv(s)) \\\n",
-    "    .map(lambda p: Row(iata=p[0], \\\n",
-    "                       airport=p[1], \\\n",
-    "                       city=p[2], \\\n",
-    "                       state=p[3], \\\n",
-    "                       country=p[4], \\\n",
-    "                       lat=float(p[5]), \\\n",
-    "                       longt=float(p[6])) \\\n",
-    "        ).cache().toDF()\n",
-    "airports.write.mode(\"overwrite\").parquet(protocol_name + working_storage + \"/\" + output_directory + \"/airports\")    \n",
-    "sqlContext.registerDataFrameAsTable(airports, \"airports\")\n",
-    "airports.limit(20).toPandas()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Parse and convert Flights data to parquet"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "flightsHeader = 'Year,Month,DayofMonth,DayOfWeek,DepTime,CRSDepTime,ArrTime,CRSArrTime,UniqueCarrier,FlightNum,TailNum,ActualElapsedTime,CRSElapsedTime,AirTime,ArrDelay,DepDelay,Origin,Dest,Distance,TaxiIn,TaxiOut,Cancelled,CancellationCode,Diverted,CarrierDelay,WeatherDelay,NASDelay,SecurityDelay,LateAircraftDelay'\n",
-    "flights = sc.textFile(protocol_name + working_storage + \"/jupyter_dataset/2008.csv.bz2\") \\\n",
-    "    .filter(lambda x: x!= flightsHeader) \\\n",
-    "    .map(lambda s: parseCsv(s)) \\\n",
-    "    .map(lambda p: Row(Year=int(p[0]), \\\n",
-    "                       Month=int(p[1]), \\\n",
-    "                       DayofMonth=int(p[2]), \\\n",
-    "                       DayOfWeek=int(p[3]), \\\n",
-    "                       DepTime=p[4], \\\n",
-    "                       CRSDepTime=p[5], \\\n",
-    "                       ArrTime=p[6], \\\n",
-    "                       CRSArrTime=p[7], \\\n",
-    "                       UniqueCarrier=p[8], \\\n",
-    "                       FlightNum=p[9], \\\n",
-    "                       TailNum=p[10], \\\n",
-    "                       ActualElapsedTime=p[11], \\\n",
-    "                       CRSElapsedTime=p[12], \\\n",
-    "                       AirTime=p[13], \\\n",
-    "                       ArrDelay=int(p[14].replace(\"NA\", \"0\")), \\\n",
-    "                       DepDelay=int(p[15].replace(\"NA\", \"0\")), \\\n",
-    "                       Origin=p[16], \\\n",
-    "                       Dest=p[17], \\\n",
-    "                       Distance=long(p[18]), \\\n",
-    "                       TaxiIn=p[19], \\\n",
-    "                       TaxiOut=p[20], \\\n",
-    "                       Cancelled=p[21], \\\n",
-    "                       CancellationCode=p[22], \\\n",
-    "                       Diverted=p[23], \\\n",
-    "                       CarrierDelay=int(p[24].replace(\"NA\", \"0\")), \\\n",
-    "                                              CarrierDelayStr=p[24], \\\n",
-    "                       WeatherDelay=int(p[25].replace(\"NA\", \"0\")), \\\n",
-    "                                              WeatherDelayStr=p[25], \\\n",
-    "                       NASDelay=int(p[26].replace(\"NA\", \"0\")), \\\n",
-    "                       SecurityDelay=int(p[27].replace(\"NA\", \"0\")), \\\n",
-    "                       LateAircraftDelay=int(p[28].replace(\"NA\", \"0\")))) \\\n",
-    "         .toDF()\n",
-    "\n",
-    "flights.write.mode(\"ignore\").parquet(protocol_name + working_storage + \"/\" + output_directory + \"/flights\")\n",
-    "sqlContext.registerDataFrameAsTable(flights, \"flights\")\n",
-    "flights.limit(10).toPandas()[[\"ArrDelay\",\"CarrierDelay\",\"CarrierDelayStr\",\"WeatherDelay\",\"WeatherDelayStr\",\"Distance\"]]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/integration-tests/examples/test_templates/jupyter/template_preparation_spark.ipynb b/integration-tests/examples/test_templates/jupyter/template_preparation_spark.ipynb
deleted file mode 100644
index 7b45b83..0000000
--- a/integration-tests/examples/test_templates/jupyter/template_preparation_spark.ipynb
+++ /dev/null
@@ -1,103 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "import org.apache.spark.sql._\n",
-    "\n",
-    "val working_storage = \"WORKING_STORAGE\"\n",
-    "val output_directory = \"jupyter/scala\"\n",
-    "val protocol_name = \"PROTOCOL_NAME\"\n",
-    "val sqlCtx = new SQLContext(sc)\n",
-    "val hc = sc.hadoopConfiguration\n",
-    "hc.set(\"hive.execution.engine\", \"mr\")\n",
-    "\n",
-    "def bucketPath(path: String) = {\n",
-    "    s\"$protocol_name://$working_storage/jupyter_dataset/$path\"\n",
-    "}\n",
-    "def fullPath(path: String) = {\n",
-    "    s\"$protocol_name://$working_storage/$output_directory/$path\"\n",
-    "}"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "val carriers = sqlCtx.read.\n",
-    "                        format(\"com.databricks.spark.csv\").\n",
-    "                        option(\"inferSchema\", \"true\").\n",
-    "                        option(\"header\", \"true\").\n",
-    "                        load(bucketPath(\"carriers.csv\"))\n",
-    "carriers.write.mode(SaveMode.Overwrite).parquet(fullPath(\"carriers/\"))\n",
-    "carriers.createOrReplaceTempView(\"carriers\")\n",
-    "carriers.show(20)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "val airports = sqlCtx.read.\n",
-    "                        format(\"com.databricks.spark.csv\").\n",
-    "                        option(\"inferSchema\", \"true\").\n",
-    "                        option(\"header\", \"true\").\n",
-    "                        load(bucketPath(\"airports.csv\"))\n",
-    "airports.write.mode(SaveMode.Overwrite).parquet(fullPath(\"airports/\"))\n",
-    "airports.createOrReplaceTempView(\"airports\")\n",
-    "airports.show(20)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import sqlCtx.implicits._\n",
-    "\n",
-    "val flights_w_na = sqlCtx.read.\n",
-    "                        format(\"com.databricks.spark.csv\").\n",
-    "                        option(\"inferSchema\", \"true\").\n",
-    "                        option(\"header\", \"true\").\n",
-    "                        option(\"nullValue\", \"NA\").\n",
-    "                        load(bucketPath(\"2008.csv.bz2\"))\n",
-    "val flights = flights_w_na.na.fill(0)\n",
-    "flights.write.mode(SaveMode.Overwrite).parquet(fullPath(\"flights/\"))\n",
-    "flights.createOrReplaceTempView(\"flights\")\n",
-    "flights.select($\"ArrDelay\",$\"CarrierDelay\",$\"WeatherDelay\",$\"Distance\").show(20)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Local Apache Toree - Scala (Scala-2.11.8, Spark-2.1.0)",
-   "language": "scala",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "name": "scala",
-   "version": "2.11.8"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/integration-tests/examples/test_templates/jupyter/template_preparation_sparkr.ipynb b/integration-tests/examples/test_templates/jupyter/template_preparation_sparkr.ipynb
deleted file mode 100644
index 9e23a1e..0000000
--- a/integration-tests/examples/test_templates/jupyter/template_preparation_sparkr.ipynb
+++ /dev/null
@@ -1,111 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "sparkR.session()\n",
-    "\n",
-    "working_storage <- 'WORKING_STORAGE'\n",
-    "output_directory <- 'jupyter/r'\n",
-    "protocol_name <- 'PROTOCOL_NAME'\n",
-    "\n",
-    "storage_path <- function(file_path) {\n",
-    "   sprintf('%s://%s/jupyter_dataset/%s', protocol_name, working_storage, file_path)\n",
-    "}\n",
-    "\n",
-    "full_path <- function(file_path) {\n",
-    "    sprintf('%s://%s/%s/%s', protocol_name, working_storage, output_directory, file_path)\n",
-    "}"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Parse and convert Carrier data to parquet"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "carriers <- read.df(storage_path(\"carriers.csv\"), \"csv\", header=\"true\", inferSchema=\"true\")\n",
-    "write.df(carriers, path=full_path(\"carriers\"), source=\"parquet\", mode=\"overwrite\")\n",
-    "createOrReplaceTempView(carriers, \"carriers\")\n",
-    "head(carriers, 20)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Parse and convert to parquet Airport data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "airports <- read.df(storage_path(\"airports.csv\"), \"csv\", header=\"true\", inferSchema=\"true\")\n",
-    "write.df(airports, path=full_path(\"airports\"), source=\"parquet\", mode=\"overwrite\")\n",
-    "createOrReplaceTempView(airports, \"airports\")\n",
-    "head(airports, 20)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Parse and convert Flights data to parquet"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "flights_w_na <- read.df(storage_path(\"2008.csv.bz2\"), \"csv\", header=\"true\", inferSchema=\"true\")\n",
-    "flights <- fillna(flights_w_na, 0, cols=colnames(flights_w_na)[c(15, 16, 25:29)])\n",
-    "write.df(flights, path=full_path(\"flights\"), source=\"parquet\", mode=\"overwrite\")\n",
-    "createOrReplaceTempView(flights, \"flights\")\n",
-    "colnames(flights)\n",
-    "head(flights_w_na, 5)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]\n",
-    "head(flights, 5)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Local SparkR (R-3.4.2, Spark-2.1.0)",
-   "language": "R",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": "r",
-   "file_extension": ".r",
-   "mimetype": "text/x-r-source",
-   "name": "R",
-   "pygments_lexer": "r",
-   "version": "3.4.2"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/integration-tests/examples/test_templates/jupyter/template_visualization_pyspark.ipynb b/integration-tests/examples/test_templates/jupyter/template_visualization_pyspark.ipynb
deleted file mode 100644
index 278c6c6..0000000
--- a/integration-tests/examples/test_templates/jupyter/template_visualization_pyspark.ipynb
+++ /dev/null
@@ -1,243 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": [
-    "from pyspark.sql import SQLContext\n",
-    "from pyspark.sql import DataFrame\n",
-    "from pyspark.sql import Row\n",
-    "from pyspark.sql.types import *\n",
-    "import pandas as pd\n",
-    "import StringIO\n",
-    "import matplotlib.pyplot as plt\n",
-    "hc = sc._jsc.hadoopConfiguration()\n",
-    "hc.set(\"hive.execution.engine\", \"mr\")\n",
-    "\n",
-    "working_storage = 'WORKING_STORAGE'\n",
-    "output_directory = 'jupyter/py2'\n",
-    "protocol_name = 'PROTOCOL_NAME://'"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Load Carriers data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "carriers = sqlContext.read.parquet(protocol_name + working_storage + \"/\" + output_directory + \"/carriers\").cache()   \n",
-    "sqlContext.registerDataFrameAsTable(carriers, \"carriers\")\n",
-    "carriers.printSchema()\n",
-    "carriers.limit(20).toPandas()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Load Airports data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "airports = sqlContext.read.parquet(protocol_name + working_storage + \"/\" + output_directory + \"/airports\").cache()\n",
-    "sqlContext.registerDataFrameAsTable(airports, \"airports\")\n",
-    "airports.printSchema()\n",
-    "airports.limit(20).toPandas()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Load Flights data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "flights = sqlContext.read.parquet(protocol_name + working_storage + \"/\" + output_directory + \"/flights\").cache()\n",
-    "flights.printSchema()\n",
-    "sqlContext.registerDataFrameAsTable(flights, \"flights\")\n",
-    "flights.limit(10).toPandas()[[\"ArrDelay\",\"CarrierDelay\",\"CarrierDelayStr\",\"WeatherDelay\",\"WeatherDelayStr\",\"Distance\"]]"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Taxonomy for ArrDelay, CarrierDelay, and Distance colums"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "flights.describe(\"ArrDelay\",\"CarrierDelay\",\"Distance\").toPandas()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Let's find the top 10 of the most unpunctual airlines"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "%matplotlib inline\n",
-    "import matplotlib \n",
-    "import matplotlib.pyplot as plt\n",
-    "matplotlib.style.use('ggplot')\n",
-    "\n",
-    "\n",
-    "delay = sqlContext.sql(\"select SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay from \\\n",
-    "   (select ceil( avg(f.ArrDelay + f.DepDelay) ) as WorkDayDelay, \\\n",
-    "   f.UniqueCarrier \\\n",
-    "   FROM flights f \\\n",
-    "         WHERE f.DayOfWeek < 6 \\\n",
-    "         GROUP BY f.UniqueCarrier ORDER BY WorkDayDelay desc limit 10) t \\\n",
-    "JOIN \\\n",
-    "  (select ceil( avg(f.ArrDelay + f.DepDelay) ) as WeekendDelay, \\\n",
-    "   f.UniqueCarrier \\\n",
-    "   FROM flights f \\\n",
-    "         WHERE f.DayOfWeek > 5 \\\n",
-    "         GROUP BY f.UniqueCarrier) t1 \\\n",
-    "ON t.UniqueCarrier = t1.UniqueCarrier \\\n",
-    "JOIN carriers c on t.UniqueCarrier = c.code order by WeekendDelay desc, WorkDayDelay desc \\\n",
-    "\").toPandas()\n",
-    "\n",
-    "color_range_days = [\"#2966FF\", \"#61F2FF\"]\n",
-    "delay[\"Average\"] = (delay.WorkDayDelay + delay.WeekendDelay) / 2\n",
-    "ax = delay.Average.plot(x='Carrier', linestyle='-', marker='o')\n",
-    "delay.plot(x='Carrier', y=['WorkDayDelay','WeekendDelay'], kind='bar', legend = True,  figsize=(12, 4), color=color_range_days, ax=ax);"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Number of flight performed by top companies"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "%matplotlib inline\n",
-    "import matplotlib \n",
-    "import matplotlib.pyplot as plt\n",
-    "matplotlib.style.use('ggplot')\n",
-    "\n",
-    "q = \"\"\"SELECT t.cnt as FlightsAmt, carriers.description as Carrier FROM (\n",
-    "            SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code \n",
-    "                FROM flights GROUP BY flights.UniqueCarrier LIMIT 6) t \n",
-    "            LEFT JOIN carriers ON t.carrier_code = carriers.code\"\"\"\n",
-    "\n",
-    "topFlights = sqlContext.sql(q).toPandas()\n",
-    "topFlights.plot.pie(labels=topFlights[\"Carrier\"], autopct='%.2f', legend=False, y=\"FlightsAmt\", figsize=(12,12));"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## The average Flight Distance per Company"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "%matplotlib inline\n",
-    "import matplotlib \n",
-    "import matplotlib.pyplot as plt\n",
-    "matplotlib.style.use('ggplot')\n",
-    "\n",
-    "resultDistance = sqlContext.sql(\"SELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance FROM flights f JOIN carriers c ON f.UniqueCarrier = c.code GROUP BY c.description ORDER BY distance DESC LIMIT 10\").toPandas()\n",
-    "\n",
-    "color_range =  [\"#2966FF\",\n",
-    " \t  \"#2E73FF\",\n",
-    " \t  \"#3380FF\",\n",
-    " \t  \"#388CFF\",\n",
-    " \t  \"#3D99FF\",\n",
-    " \t  \"#42A6FF\",\n",
-    " \t  \"#47B2FF\",\n",
-    " \t  \"#4CBFFF\",\n",
-    " \t  \"#52CCFF\",\n",
-    " \t  \"#57D9FF\",\n",
-    " \t  \"#5CE6FF\",\n",
-    " \t  \"#61F2FF\",\n",
-    "      \"#66FFFF\"]\n",
-    "\n",
-    "resultDistance.plot(x='Carrier',  y='Distance', kind='bar', color=color_range, legend = False);"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/integration-tests/examples/test_templates/jupyter/template_visualization_sparkr.ipynb b/integration-tests/examples/test_templates/jupyter/template_visualization_sparkr.ipynb
deleted file mode 100644
index 10f09a7..0000000
--- a/integration-tests/examples/test_templates/jupyter/template_visualization_sparkr.ipynb
+++ /dev/null
@@ -1,249 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "sparkR.session()\n",
-    "\n",
-    "full_path <- function(file_path) {\n",
-    "    working_storage <- \"WORKING_STORAGE\"\n",
-    "    output_directory <- \"jupyter/r\"\n",
-    "    protocol_name <- \"PROTOCOL_NAME://%s/%s/%s\"   \n",
-    "    sprintf(protocol_name, working_storage, output_directory, file_path)\n",
-    "}"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Load Carriers data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "carriers <- read.df(full_path(\"carriers\"), \"parquet\")\n",
-    "createOrReplaceTempView(carriers, \"carriers\")\n",
-    "printSchema(carriers)\n",
-    "head(carriers, 20)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Load Airports data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "airports <- read.df(full_path(\"airports\"), \"parquet\")\n",
-    "createOrReplaceTempView(airports, \"airports\")\n",
-    "printSchema(airports)\n",
-    "head(airports, 20)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Load Flights data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "flights <- read.df(full_path(\"flights\"), \"parquet\")\n",
-    "createOrReplaceTempView(flights, \"flights\")\n",
-    "printSchema(flights)\n",
-    "head(flights, 10)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Taxonomy for ArrDelay, CarrierDelay, and Distance colums"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "head(summary(limit(flights,1000)))[c(\"summary\", \"ArrDelay\",\"CarrierDelay\",\"Distance\")]"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Let's find the top 10 of the most unpunctual airlines"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "library(ggplot2)\n",
-    "library(reshape2)\n",
-    "\n",
-    "delay_sql <- sql(\"\n",
-    "SELECT SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay \n",
-    "FROM\n",
-    "       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WorkDayDelay, f.UniqueCarrier\n",
-    "        FROM flights f\n",
-    "        WHERE f.DayOfWeek < 6\n",
-    "        GROUP BY f.UniqueCarrier \n",
-    "        ORDER BY WorkDayDelay desc \n",
-    "        LIMIT 10) t\n",
-    "    JOIN\n",
-    "       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WeekendDelay, f.UniqueCarrier\n",
-    "        FROM flights f\n",
-    "        WHERE f.DayOfWeek > 5\n",
-    "        GROUP BY f.UniqueCarrier) t1\n",
-    "      ON t.UniqueCarrier = t1.UniqueCarrier\n",
-    "    JOIN carriers c \n",
-    "      ON t.UniqueCarrier = c.code \n",
-    "ORDER BY WeekendDelay DESC, WorkDayDelay DESC\n",
-    "\")\n",
-    "\n",
-    "delay <- collect(delay_sql)\n",
-    "delay_melt <- melt(delay[c('Carrier', 'WorkDayDelay', 'WeekendDelay')])\n",
-    "\n",
-    "color_range_days <- c(\"#2966FF\", \"#61F2FF\")\n",
-    "\n",
-    "ggplot(data=delay_melt, aes(x=Carrier, y=value, fill=variable)) +\n",
-    "    geom_bar(stat=\"identity\", width=.7, position=\"dodge\") +\n",
-    "    stat_summary(fun.y=mean, geom = \"line\", mapping = aes(group = 1), color=\"red\") +\n",
-    "    stat_summary(fun.y=mean, geom = \"point\", mapping = aes(group = 1), color=\"red\") +\n",
-    "    theme(legend.position=\"right\", axis.text.x=element_text(angle=90)) +\n",
-    "    labs(x=\"Carrier\", y=\"Minutes\", fill=\"Day Type\") +\n",
-    "    coord_fixed(ratio = .2) +\n",
-    "    scale_fill_manual(values=color_range_days) +\n",
-    "    scale_y_continuous(breaks=seq(0, 30, 5))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## Number of flight performed by top companies"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "top_flights_sql <- sql(\"\n",
-    "SELECT t.cnt as FlightsAmt, carriers.description as Carrier \n",
-    "FROM (\n",
-    "    SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code \n",
-    "    FROM flights \n",
-    "    GROUP BY flights.UniqueCarrier LIMIT 6) t \n",
-    "LEFT JOIN carriers \n",
-    "  ON t.carrier_code = carriers.code\n",
-    "\")\n",
-    "\n",
-    "top_flights <- collect(top_flights_sql)\n",
-    "\n",
-    "ggplot(transform(transform(top_flights, value=FlightsAmt/sum(FlightsAmt)), labPos=cumsum(FlightsAmt)-FlightsAmt/2), \n",
-    "       aes(x=\"\", y = FlightsAmt, fill = Carrier)) +\n",
-    "    geom_bar(width = 1, stat = \"identity\") +\n",
-    "    coord_polar(\"y\", start=0) +\n",
-    "    scale_fill_brewer(palette=\"Dark2\") +\n",
-    "    theme_bw() +\n",
-    "    theme(axis.text.x=element_blank() ,panel.grid.major=element_blank(),panel.grid.minor = element_blank(),panel.border = element_blank()) +\n",
-    "    geom_text(size=4, aes(y=labPos, label=scales::percent(value))) + \n",
-    "    geom_text(size=3, aes(x=1.8, y=labPos, label=top_flights$Carrier)) + \n",
-    "    theme(legend.position=\"none\")"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "## The average Flight Distance per Company"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": true
-   },
-   "outputs": [],
-   "source": [
-    "distance_sql = sql(\"\n",
-    "SELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance \n",
-    "FROM flights f \n",
-    "JOIN carriers c \n",
-    "  ON f.UniqueCarrier = c.code \n",
-    "GROUP BY c.description \n",
-    "ORDER BY distance DESC \n",
-    "LIMIT 10\n",
-    "\")\n",
-    "\n",
-    "distance <- collect(distance_sql)\n",
-    "\n",
-    "distance$Carrier <- factor(distance$Carrier, levels = distance$Carrier[order(-distance$Distance)])\n",
-    "\n",
-    "color_range <-  c(\"#2966FF\", \"#2E73FF\",\"#3380FF\", \"#388CFF\", \"#3D99FF\", \"#42A6FF\", \"#47B2FF\", \"#4CBFFF\", \"#52CCFF\", \n",
-    "                  \"#57D9FF\", \"#5CE6FF\", \"#61F2FF\", \"#66FFFF\")\n",
-    "\n",
-    "ggplot(data=distance, aes(x=Carrier, y=Distance, fill=Carrier)) +\n",
-    "    geom_bar(stat=\"identity\", width=.7, position=\"dodge\") +\n",
-    "    theme(axis.text.x=element_text(angle=90)) +\n",
-    "    scale_fill_manual(values=color_range) +\n",
-    "    theme(legend.position=\"none\")"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "collapsed": true
-   },
-   "outputs": [],
-   "source": []
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Local SparkR (R-3.4.2, Spark-2.1.0)",
-   "language": "R",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": "r",
-   "file_extension": ".r",
-   "mimetype": "text/x-r-source",
-   "name": "R",
-   "pygments_lexer": "r",
-   "version": "3.4.2"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 1
-}
diff --git a/integration-tests/examples/test_templates/rstudio/template_preparation.r b/integration-tests/examples/test_templates/rstudio/template_preparation.r
deleted file mode 100644
index 773a049..0000000
--- a/integration-tests/examples/test_templates/rstudio/template_preparation.r
+++ /dev/null
@@ -1,27 +0,0 @@
-sc <- sparkR.session(MASTER)
-
-working_storage <- "WORKING_STORAGE"
-output_directory <- "rstudio"
-protocol_name <- "PROTOCOL_NAME"
-
-bucket_path <- function(file_path) {
-    sprintf("%s://%s/rstudio_dataset/%s", protocol_name, working_storage, file_path)
-}
-
-full_path <- function(file_path) {
-    sprintf("%s://%s/%s/%s", protocol_name, working_storage, output_directory, file_path)
-}
-
-carriers <- read.df(bucket_path("carriers.csv"), "csv", header="true", inferSchema="true")
-write.df(carriers, path=full_path("carriers"), source="parquet", mode="overwrite")
-createOrReplaceTempView(carriers, "carriers")
-
-airports <- read.df(bucket_path("airports.csv"), "csv", header="true", inferSchema="true")
-write.df(airports, path=full_path("airports"), source="parquet", mode="overwrite")
-createOrReplaceTempView(airports, "airports")
-
-flights_w_na <- read.df(bucket_path("2008.csv.bz2"), "csv", header="true", inferSchema="true")
-flights <- fillna(flights_w_na, 0, cols=colnames(flights_w_na)[c(15, 16, 25:29)])
-write.df(flights, path=full_path("flights"), source="parquet", mode="overwrite")
-createOrReplaceTempView(flights, "flights")
-colnames(flights)
diff --git a/integration-tests/examples/test_templates/rstudio/template_visualization.r b/integration-tests/examples/test_templates/rstudio/template_visualization.r
deleted file mode 100644
index 6b38ec8..0000000
--- a/integration-tests/examples/test_templates/rstudio/template_visualization.r
+++ /dev/null
@@ -1,105 +0,0 @@
-sc <- sparkR.session(MASTER)
-
-full_path <- function(file_path) {
-    working_storage <- "WORKING_STORAGE"
-    output_directory <- "rstudio"
-    protocol_name <- "PROTOCOL_NAME"
-    sprintf("%s://%s/%s/%s", protocol_name, working_storage, output_directory, file_path)
-}
-
-carriers <- read.df(full_path("carriers"), "parquet")
-createOrReplaceTempView(carriers, "carriers")
-printSchema(carriers)
-
-airports <- read.df(full_path("airports"), "parquet")
-createOrReplaceTempView(airports, "airports")
-printSchema(airports)
-
-flights <- read.df(full_path("flights"), "parquet")
-createOrReplaceTempView(flights, "flights")
-printSchema(flights)
-
-library(ggplot2)
-library(reshape2)
-
-delay_sql <- sql("
-SELECT SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay
-FROM
-       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WorkDayDelay, f.UniqueCarrier
-        FROM flights f
-        WHERE f.DayOfWeek < 6
-        GROUP BY f.UniqueCarrier
-        ORDER BY WorkDayDelay desc
-        LIMIT 10) t
-    JOIN
-       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WeekendDelay, f.UniqueCarrier
-        FROM flights f
-        WHERE f.DayOfWeek > 5
-        GROUP BY f.UniqueCarrier) t1
-      ON t.UniqueCarrier = t1.UniqueCarrier
-    JOIN carriers c
-      ON t.UniqueCarrier = c.code
-ORDER BY WeekendDelay DESC, WorkDayDelay DESC
-")
-
-delay <- collect(delay_sql)
-delay_melt <- melt(delay[c('Carrier', 'WorkDayDelay', 'WeekendDelay')])
-
-color_range_days <- c("#2966FF", "#61F2FF")
-
-ggplot(data=delay_melt, aes(x=Carrier, y=value, fill=variable)) +
-    geom_bar(stat="identity", width=.7, position="dodge") +
-    stat_summary(fun.y=mean, geom = "line", mapping = aes(group = 1), color="red") +
-    stat_summary(fun.y=mean, geom = "point", mapping = aes(group = 1), color="red") +
-    theme(legend.position="right", axis.text.x=element_text(angle=90)) +
-    labs(x="Carrier", y="Minutes", fill="Day Type") +
-    coord_fixed(ratio = .2) +
-    scale_fill_manual(values=color_range_days) +
-    scale_y_continuous(breaks=seq(0, 30, 5))
-
-top_flights_sql <- sql("
-SELECT t.cnt as FlightsAmt, carriers.description as Carrier
-FROM (
-    SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code
-    FROM flights
-    GROUP BY flights.UniqueCarrier LIMIT 6) t
-LEFT JOIN carriers
-  ON t.carrier_code = carriers.code
-")
-
-top_flights <- collect(top_flights_sql)
-
-ggplot(transform(transform(top_flights, value=FlightsAmt/sum(FlightsAmt)), labPos=cumsum(FlightsAmt)-FlightsAmt/2),
-       aes(x="", y = FlightsAmt, fill = Carrier)) +
-    geom_bar(width = 1, stat = "identity") +
-    coord_polar("y", start=0) +
-    scale_fill_brewer(palette="Dark2") +
-    theme_bw() +
-    theme(axis.text.x=element_blank() ,panel.grid.major=element_blank(),panel.grid.minor = element_blank(),panel.border = element_blank()) +
-    geom_text(size=4, aes(y=labPos, label=scales::percent(value))) +
-    geom_text(size=3, aes(x=1.8, y=labPos, label=top_flights$Carrier)) +
-    theme(legend.position="none")
-
-distance_sql = sql("
-SELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance
-FROM flights f
-JOIN carriers c
-  ON f.UniqueCarrier = c.code
-GROUP BY c.description
-ORDER BY distance DESC
-LIMIT 10
-")
-
-distance <- collect(distance_sql)
-
-distance$Carrier <- factor(distance$Carrier, levels = distance$Carrier[order(-distance$Distance)])
-
-color_range <-  c("#2966FF", "#2E73FF","#3380FF", "#388CFF", "#3D99FF", "#42A6FF", "#47B2FF", "#4CBFFF", "#52CCFF",
-                  "#57D9FF", "#5CE6FF", "#61F2FF", "#66FFFF")
-
-ggplot(data=distance, aes(x=Carrier, y=Distance, fill=Carrier)) +
-    geom_bar(stat="identity", width=.7, position="dodge") +
-    theme(axis.text.x=element_text(angle=90)) +
-    scale_fill_manual(values=color_range) +
-    theme(legend.position="none")
-
diff --git a/integration-tests/examples/test_templates/tensor/template_preparation_tensor.ipynb b/integration-tests/examples/test_templates/tensor/template_preparation_tensor.ipynb
deleted file mode 100644
index 0396cb3..0000000
--- a/integration-tests/examples/test_templates/tensor/template_preparation_tensor.ipynb
+++ /dev/null
@@ -1,285 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import os, cv2, random\n",
-    "import numpy as np\n",
-    "import matplotlib.pyplot as plt\n",
-    "%matplotlib inline \n",
-    "from keras.models import Sequential, load_model\n",
-    "from keras.layers import Dropout, Flatten, Convolution2D, MaxPooling2D, Dense, Activation\n",
-    "from keras.optimizers import Adam\n",
-    "from keras.callbacks import Callback, EarlyStopping\n",
-    "from keras.callbacks import BaseLogger, TensorBoard"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Constants definition"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "TRAIN_DIR = '/home/dlab-user/train/'\n",
-    "TEST_DIR = '/home/dlab-user/test/'\n",
-    "ROWS = 128\n",
-    "COLS = 128\n",
-    "CHANNELS = 3\n",
-    "TRAIN_IMAGES_COUNT = 1000\n",
-    "PATH_TO_LOGS = '/home/dlab-user/logs'"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Reading and adjusting images for training"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "all_images = [TRAIN_DIR+i for i in os.listdir(TRAIN_DIR)[:TRAIN_IMAGES_COUNT]]\n",
-    "test_images =  [TEST_DIR+i for i in os.listdir(TEST_DIR)]\n",
-    "random.shuffle(all_images)\n",
-    "test_coeff = int(len (all_images) * .9)\n",
-    "\n",
-    "train_images, test_images = all_images[:test_coeff], all_images[test_coeff:]\n",
-    "\n",
-    "def read_image(file_path):\n",
-    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
-    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
-    "\n",
-    "def prepare_data(images):\n",
-    "    count = len(images)\n",
-    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
-    "\n",
-    "    for i, image_file in enumerate(images):\n",
-    "        image = read_image(image_file)\n",
-    "        data[i] = image#.T\n",
-    "    return data\n",
-    "\n",
-    "train = prepare_data(train_images)\n",
-    "test = prepare_data(test_images)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Image counts"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "print(\"Train shape: {}\".format(train.shape))\n",
-    "print(\"Test shape: {}\".format(test.shape))"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Assigning labels to training images"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "labels = []\n",
-    "for i in train_images:\n",
-    "    if 'dog' in i.split(\"/\")[-1] :\n",
-    "        labels.append(1)\n",
-    "    else:\n",
-    "        labels.append(0)\n",
-    "        \n",
-    "labels_test = []\n",
-    "for i in test_images:\n",
-    "    if 'dog' in i.split(\"/\")[-1] :\n",
-    "        labels_test.append(1)\n",
-    "    else:\n",
-    "        labels_test.append(0)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Building a convnet"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "optimizer = Adam(lr=1e-6)\n",
-    "objective = 'binary_crossentropy'\n",
-    "\n",
-    "def build_model():\n",
-    "    \n",
-    "    model = Sequential()\n",
-    "\n",
-    "    model.add(Convolution2D(32, 3, 3, border_mode='same', input_shape=(ROWS, COLS, 3), activation='relu'))\n",
-    "    model.add(Convolution2D(32, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
-    "\n",
-    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(Convolution2D(64, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
-    "    \n",
-    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(Convolution2D(128, 3, 3, border_mode='same', activation='relu'))\n",
-    "    model.add(MaxPooling2D(pool_size=(2, 2)))\n",
-    "\n",
-    "    model.add(Flatten())\n",
-    "    model.add(Dense(256, activation='relu'))\n",
-    "    model.add(Dropout(0.5))\n",
-    "    \n",
-    "    model.add(Dense(1))\n",
-    "    model.add(Activation('sigmoid'))\n",
-    "    \n",
-    "    model.compile(loss=objective, optimizer=optimizer, metrics=['accuracy'])\n",
-    "    return model\n",
-    "\n",
-    "\n",
-    "model = build_model()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Training the model"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "This block takes about 2.5-3 hours to execute if training on whole dataset of 22500 images"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "nb_epoch = 10\n",
-    "batch_size = 16\n",
-    "\n",
-    "class LossHistory(Callback):\n",
-    "    def on_train_begin(self, logs={}):\n",
-    "        self.losses = []\n",
-    "        self.val_losses = []\n",
-    "        \n",
-    "    def on_epoch_end(self, batch, logs={}):\n",
-    "        self.losses.append(logs.get('loss'))\n",
-    "        self.val_losses.append(logs.get('val_loss'))\n",
-    "\n",
-    "early_stopping = EarlyStopping(monitor='val_loss', patience=5, verbose=1, mode='auto')        \n",
-    "        \n",
-    "def train_and_test_model():\n",
-    "    history = LossHistory()\n",
-    "    tensorboard = TensorBoard(log_dir=PATH_TO_LOGS)\n",
-    "    model.fit(train, labels, batch_size=batch_size, nb_epoch=nb_epoch,\n",
-    "              validation_split=0.25, verbose=2, shuffle=True, callbacks=[history, early_stopping, tensorboard])\n",
-    "    \n",
-    "\n",
-    "    predictions = model.predict(test, verbose=2)\n",
-    "    return predictions, history\n",
-    "\n",
-    "predictions, history = train_and_test_model()"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Saving the model and weights"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "path_to_model = '/home/dlab-user/model_1000.json'\n",
-    "path_to_weights = '/home/dlab-user/weigths_1000.h5'\n",
-    "\n",
-    "model.save(path_to_model)\n",
-    "model.save_weights(path_to_weights)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Plotting learning curves"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "loss = history.losses\n",
-    "val_loss = history.val_losses\n",
-    "\n",
-    "plt.xlabel('Epochs')\n",
-    "plt.ylabel('Loss')\n",
-    "plt.title('VGG-16 Loss Trend')\n",
-    "plt.plot(loss, 'blue', label='Training Loss')\n",
-    "plt.plot(val_loss, 'green', label='Validation Loss')\n",
-    "plt.xticks(range(0,len(loss))[0::2])\n",
-    "plt.legend()\n",
-    "plt.show()"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/tensor/template_visualization_tensor.ipynb b/integration-tests/examples/test_templates/tensor/template_visualization_tensor.ipynb
deleted file mode 100644
index 92eba38..0000000
--- a/integration-tests/examples/test_templates/tensor/template_visualization_tensor.ipynb
+++ /dev/null
@@ -1,139 +0,0 @@
-{
- "cells": [
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "import os, cv2\n",
-    "from keras.models import load_model\n",
-    "import matplotlib.pyplot as plt\n",
-    "%matplotlib inline\n",
-    "import numpy as np\n",
-    "import tensorflow as tf"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Pathes to model and weights"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "path_to_model = '/home/dlab-user/model_1000.json'\n",
-    "path_to_weights = '/home/dlab-user/weigths_1000.h5'"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Loading test images"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "ROWS = 128\n",
-    "COLS = 128\n",
-    "CHANNELS = 3\n",
-    "TEST_DIR = '/home/dlab-user/test/'\n",
-    "all_image_names = os.listdir(TEST_DIR)\n",
-    "all_image_names.sort()\n",
-    "test_images =  [TEST_DIR+i for i in all_image_names[6:11] + all_image_names[19:32] + all_image_names[33:34]]\n",
-    "\n",
-    "def read_image(file_path):\n",
-    "    img = cv2.imread(file_path, cv2.IMREAD_COLOR)\n",
-    "    return cv2.resize(img, (ROWS, COLS), interpolation=cv2.INTER_CUBIC).reshape(ROWS, COLS, CHANNELS)\n",
-    "\n",
-    "def prep_data(images):\n",
-    "    count = len(images)\n",
-    "    data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.uint8)\n",
-    "\n",
-    "    for i, image_file in enumerate(images):\n",
-    "        image = read_image(image_file)\n",
-    "        data[i] = image\n",
-    "    return data\n",
-    "test = prep_data(test_images)"
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Loading the model and making predictions on test data"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {},
-   "outputs": [],
-   "source": [
-    "with tf.device('/cpu:0'):\n",
-    "    model = load_model(path_to_model)\n",
-    "    model.load_weights(path_to_weights)\n",
-    "    predictions = model.predict(test, verbose=2)     "
-   ]
-  },
-  {
-   "cell_type": "markdown",
-   "metadata": {},
-   "source": [
-    "# Visualizing results (rendering can take about a minute)"
-   ]
-  },
-  {
-   "cell_type": "code",
-   "execution_count": null,
-   "metadata": {
-    "scrolled": false
-   },
-   "outputs": [],
-   "source": [
-    "plt.figure(figsize=(16, 12))\n",
-    "for i in range(0, 12):\n",
-    "    plt.subplot(3, 4, i+1)\n",
-    "    if predictions[i, 0] >= 0.5: \n",
-    "        plt.title('{:.2%} Dog'.format(predictions[i][0]))\n",
-    "    else: \n",
-    "        plt.title('{:.2%} Cat'.format(1-predictions[i][0]))\n",
-    "        \n",
-    "    plt.imshow(cv2.cvtColor(test[i], cv2.COLOR_BGR2RGB))\n",
-    "    plt.axis('off')\n",
-    "plt.show()"
-   ]
-  }
- ],
- "metadata": {
-  "kernelspec": {
-   "display_name": "Python 2",
-   "language": "python",
-   "name": "KERNEL_NAME"
-  },
-  "language_info": {
-   "codemirror_mode": {
-    "name": "ipython",
-    "version": 2
-   },
-   "file_extension": ".py",
-   "mimetype": "text/x-python",
-   "name": "python",
-   "nbconvert_exporter": "python",
-   "pygments_lexer": "ipython2",
-   "version": "2.7.13"
-  }
- },
- "nbformat": 4,
- "nbformat_minor": 2
-}
diff --git a/integration-tests/examples/test_templates/zeppelin/template_preparation_pyspark.json b/integration-tests/examples/test_templates/zeppelin/template_preparation_pyspark.json
deleted file mode 100644
index abf99f9..0000000
--- a/integration-tests/examples/test_templates/zeppelin/template_preparation_pyspark.json
+++ /dev/null
@@ -1 +0,0 @@
-{"paragraphs":[{"title":"Init Spark","text":"%INTERPRETER_NAME\nfrom pyspark.sql import SQLContext\nfrom pyspark.sql import DataFrame\nfrom pyspark.sql import Row\nfrom pyspark.sql.types import *\nimport pandas as pd\nimport StringIO\nimport matplotlib.pyplot as plt\nhc = sc._jsc.hadoopConfiguration()\nhc.set(\"hive.execution.engine\", \"mr\")","dateUpdated":"2018-01-03T14:13:24+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514975882432_-581784801","id":"20170116-185159_818793480","dateCreated":"2018-01-03T10:38:02+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:2275"},{"title":"Define functions","text":"%INTERPRETER_NAME\r\nimport csv\r\n\r\nworking_storage = \"WORKING_STORAGE\"\r\noutput_directory = 'zeppelin/py2'\r\nprotocol_name = 'PROTOCOL_NAME'\r\n\r\ndef parseCsv(csvStr):\r\n    f = StringIO.StringIO(csvStr)\r\n    reader = csv.reader(f, delimiter=',')\r\n    row = reader.next()\r\n    return row\r\n\r\ndef bucket_path(part_path):\r\n    return '{}://{}/zeppelin_dataset/{}'.format(protocol_name, working_storage, part_path)\r\n\r\ndef full_path(part_path):\r\n    return '{}://{}/{}/{}'.format(protocol_name, working_storage, output_directory, part_path)\r\n\r\nscsv = '\"02Q\",\"Titan Airways\"'\r\nrow = parseCsv(scsv)\r\nprint row[0]\r\nprint row[1]","dateUpdated":"2018-01-03T14:14:20+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514975882433_-582169550","id":"20170116-193003_477574066","dateCreated":"2018-01-03T10:38:02+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:2276"},{"title":"Parse and convert Carrier data to parquet","text":"%INTERPRETER_NAME\r\n\r\ncarriersHeader = 'Code,Description'\r\ncarriersText = sc.textFile(bucket_path(\"carriers.csv\")).filter(lambda x: x != carriersHeader)\r\ncarriers = carriersText.map(lambda s: parseCsv(s)) \\\r\n    .map(lambda s: Row(code=s[0], description=s[1])).cache().toDF()\r\ncarriers.write.mode(\"overwrite\").parquet(full_path(\"carriers\"))    \r\nsqlContext.registerDataFrameAsTable(carriers, \"carriers\")\r\ncarriers.limit(20).toPandas()","dateUpdated":"2018-01-03T14:13:25+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514975882433_-582169550","id":"20170116-193845_1563104751","dateCreated":"2018-01-03T10:38:02+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:2277"},{"title":"Parse and convert to parquet Airport data","text":"%INTERPRETER_NAME\n\nairportsHeader= '\"iata\",\"airport\",\"city\",\"state\",\"country\",\"lat\",\"long\"'\nairports = sc.textFile(bucket_path(\"airports.csv\")) \\\n    .filter(lambda x: x != airportsHeader) \\\n    .map(lambda s: parseCsv(s)) \\\n    .map(lambda p: Row(iata=p[0], \\\n                       airport=p[1], \\\n                       city=p[2], \\\n                       state=p[3], \\\n                       country=p[4], \\\n                       lat=float(p[5]), \\\n                       longt=float(p[6])) \\\n        ).cache().toDF()\nairports.write.mode(\"overwrite\").parquet(full_path(\"airports\"))    \nsqlContext.registerDataFrameAsTable(airports, \"airports\")\nairports.limit(20).toPandas()","dateUpdated":"2018-01-03T14:13:26+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514975882434_-581015303","id":"20170116-194608_52076348","dateCreated":"2018-01-03T10:38:02+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:2278"},{"title":"Parse and convert Flights data to parquet","text":"%INTERPRETER_NAME\n\nflightsHeader = 'Year,Month,DayofMonth,DayOfWeek,DepTime,CRSDepTime,ArrTime,CRSArrTime,UniqueCarrier,FlightNum,TailNum,ActualElapsedTime,CRSElapsedTime,AirTime,ArrDelay,DepDelay,Origin,Dest,Distance,TaxiIn,TaxiOut,Cancelled,CancellationCode,Diverted,CarrierDelay,WeatherDelay,NASDelay,SecurityDelay,LateAircraftDelay'\nflights = sc.textFile(bucket_path(\"2008.csv.bz2\")) \\\n    .filter(lambda x: x!= flightsHeader) \\\n    .map(lambda s: parseCsv(s)) \\\n    .map(lambda p: Row(Year=int(p[0]), \\\n                       Month=int(p[1]), \\\n                       DayofMonth=int(p[2]), \\\n                       DayOfWeek=int(p[3]), \\\n                       DepTime=p[4], \\\n                       CRSDepTime=p[5], \\\n                       ArrTime=p[6], \\\n                       CRSArrTime=p[7], \\\n                       UniqueCarrier=p[8], \\\n                       FlightNum=p[9], \\\n                       TailNum=p[10], \\\n                       ActualElapsedTime=p[11], \\\n                       CRSElapsedTime=p[12], \\\n                       AirTime=p[13], \\\n                       ArrDelay=int(p[14].replace(\"NA\", \"0\")), \\\n                       DepDelay=int(p[15].replace(\"NA\", \"0\")), \\\n                       Origin=p[16], \\\n                       Dest=p[17], \\\n                       Distance=long(p[18]), \\\n                       TaxiIn=p[19], \\\n                       TaxiOut=p[20], \\\n                       Cancelled=p[21], \\\n                       CancellationCode=p[22], \\\n                       Diverted=p[23], \\\n                       CarrierDelay=int(p[24].replace(\"NA\", \"0\")), \\\n                                              CarrierDelayStr=p[24], \\\n                       WeatherDelay=int(p[25].replace(\"NA\", \"0\")), \\\n                                              WeatherDelayStr=p[25], \\\n                       NASDelay=int(p[26].replace(\"NA\", \"0\")), \\\n                       SecurityDelay=int(p[27].replace(\"NA\", \"0\")), \\\n                       LateAircraftDelay=int(p[28].replace(\"NA\", \"0\")))) \\\n         .toDF()\n\nflights.write.mode(\"ignore\").parquet(full_path(\"flights\"))\nsqlContext.registerDataFrameAsTable(flights, \"flights\")\nflights.limit(10).toPandas()[[\"ArrDelay\",\"CarrierDelay\",\"CarrierDelayStr\",\"WeatherDelay\",\"WeatherDelayStr\",\"Distance\"]]","dateUpdated":"2018-01-03T14:13:31+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514975882434_-581015303","id":"20170116-194514_1558643741","dateCreated":"2018-01-03T10:38:02+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:2279"},{"text":"","dateUpdated":"2018-01-03T10:38:02+0000","config":{"editorSetting":{"language":"scala"},"colWidth":12,"editorMode":"ace/mode/scala","results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514975882435_-581400052","id":"20170116-200314_1592643376","dateCreated":"2018-01-03T10:38:02+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:2280"}],"name":"Python 2 data preparation","id":"2D3UHPA39","angularObjects":{"2C6RJRBD2:shared_process":[],"2C6RJRBD1:shared_process":[]},"config":{"looknfeel":"default","personalizedMode":"false"},"info":{}}
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_preparation_spark.json b/integration-tests/examples/test_templates/zeppelin/template_preparation_spark.json
deleted file mode 100644
index 7d8e7ec..0000000
--- a/integration-tests/examples/test_templates/zeppelin/template_preparation_spark.json
+++ /dev/null
@@ -1 +0,0 @@
-{"paragraphs":[{"title":"Initialize. Connect to bucket","text":"%INTERPRETER_NAME\r\nimport org.apache.spark.sql._\r\n\r\nval workingStorage = \"WORKING_STORAGE\"\r\nval output_directory = \"zeppelin/scala\"\r\nval protocolName = \"PROTOCOL_NAME\"\r\nval sqlCtx = new SQLContext(sc)\r\nval hc = sc.hadoopConfiguration\r\nhc.set(\"hive.execution.engine\", \"mr\")\r\n","dateUpdated":"2018-01-03T14:29:14+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989613681_742800119","id":"20170120-150939_1549034406","dateCreated":"2018-01-03T14:26:53+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:1012"},{"title":"process CARRIERS data","text":"%INTERPRETER_NAME\ndef bucketPath(path: String) = {\n    s\"$protocolName://$workingStorage/zeppelin_dataset/$path\"\n}\ndef fullPath(path: String) = {\n    s\"$protocolName://$workingStorage/$output_directory/$path\"\n}\n\nval carriers = sqlCtx.read.\n                        format(\"com.databricks.spark.csv\").\n                        //option(\"inferSchema\", \"true\").\n                        option(\"header\", \"true\").\n                        load(bucketPath(\"carriers.csv\"))\ncarriers.write.mode(SaveMode.Overwrite).parquet(fullPath(\"carriers/\"))\ncarriers.createOrReplaceTempView(\"carriers\")\ncarriers.show(20)","dateUpdated":"2018-01-03T15:19:04+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989613682_743954366","id":"20170120-151133_1703773120","dateCreated":"2018-01-03T14:26:53+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:1013"},{"title":"process AIRPORTS data","text":"%INTERPRETER_NAME\nval airports = sqlCtx.read.\n                        format(\"com.databricks.spark.csv\").\n                        //option(\"inferSchema\", \"true\").\n                        option(\"header\", \"true\").\n                        load(bucketPath(\"airports.csv\"))\nairports.write.mode(SaveMode.Overwrite).parquet(fullPath(\"airports/\"))\nairports.createOrReplaceTempView(\"airports\")\nairports.show(20)","dateUpdated":"2018-01-03T14:27:54+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989613682_743954366","id":"20170124-212934_103706299","dateCreated":"2018-01-03T14:26:53+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:1014"},{"title":"process FLIGHTS data (~100Mb)","text":"%INTERPRETER_NAME\nimport sqlCtx.implicits._\n\nval flights_w_na = sqlCtx.read.\n                        format(\"com.databricks.spark.csv\").\n                        //option(\"inferSchema\", \"true\").\n                        option(\"header\", \"true\").\n                        option(\"nullValue\", \"NA\").\n                        load(bucketPath(\"2008.csv.bz2\"))\nval flights = flights_w_na.na.fill(0)\nflights.write.mode(SaveMode.Overwrite).parquet(fullPath(\"flights/\"))\nflights.createOrReplaceTempView(\"flights\")\nflights.select($\"ArrDelay\",$\"CarrierDelay\",$\"WeatherDelay\",$\"Distance\").show(20)","dateUpdated":"2018-01-03T14:28:00+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989613682_743954366","id":"20170124-214513_801806968","dateCreated":"2018-01-03T14:26:53+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:1015"}],"name":"Scala_data_preparation","id":"2D4V61673","angularObjects":{"2C6RJRBD2:shared_process":[],"2C6RJRBD1:shared_process":[]},"config":{"looknfeel":"default","personalizedMode":"false"},"info":{}}
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_preparation_sparkr.json b/integration-tests/examples/test_templates/zeppelin/template_preparation_sparkr.json
deleted file mode 100644
index 367bd74..0000000
--- a/integration-tests/examples/test_templates/zeppelin/template_preparation_sparkr.json
+++ /dev/null
@@ -1 +0,0 @@
-{"paragraphs":[{"text":"%INTERPRETER_NAME\nsparkR.session()\n\nworking_storage <- \"WORKING_STORAGE\"\noutput_directory <- \"zeppelin/r\"\nprotocol_name <- 'PROTOCOL_NAME'\nfull_path <- function(file_path) {\n    sprintf('%s://%s/%s/%s', protocol_name, working_storage, output_directory, file_path)\n}\nbucket_path <- function(file_path) {\n    sprintf('%s://%s/zeppelin_dataset/%s', protocol_name, working_storage, file_path)\n}\ncarriers <- read.df(bucket_path(\"carriers.csv\"), \"csv\", header=\"true\", inferSchema=\"true\")\nwrite.df(carriers, path=full_path(\"carriers\"), source=\"parquet\", mode=\"overwrite\")\ncreateOrReplaceTempView(carriers, \"carriers\")\nhead(carriers, 20)","dateUpdated":"2018-01-03T14:24:46+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989446718_-373603271","id":"20170224-132230_714744516","dateCreated":"2018-01-03T14:24:06+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:334"},{"text":"%INTERPRETER_NAME\nairports <- read.df(bucket_path(\"airports.csv\"), \"csv\", header=\"true\", inferSchema=\"true\")\nwrite.df(airports, path=full_path(\"airports\"), source=\"parquet\", mode=\"overwrite\")\ncreateOrReplaceTempView(airports, \"airports\")\nhead(airports, 20)","dateUpdated":"2018-01-03T14:24:48+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989446719_-373988020","id":"20170224-132306_851966551","dateCreated":"2018-01-03T14:24:06+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:335"},{"text":"%INTERPRETER_NAME\nflights_w_na <- read.df(bucket_path(\"2008.csv.bz2\"), \"csv\", header=\"true\", inferSchema=\"true\")\nflights <- fillna(flights_w_na, 0, cols=colnames(flights_w_na)[c(15, 16, 25:29)])\nwrite.df(flights, path=full_path(\"flights\"), source=\"parquet\", mode=\"overwrite\")\ncreateOrReplaceTempView(flights, \"flights\")\ncolnames(flights)\nhead(flights_w_na, 5)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]\nhead(flights, 5)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]","dateUpdated":"2018-01-03T14:24:49+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989446719_-373988020","id":"20170224-132326_1724524583","dateCreated":"2018-01-03T14:24:06+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:336"}],"name":"R_data_preparation","id":"2D57NEGDB","angularObjects":{"2C6RJRBD2:shared_process":[],"2C6RJRBD1:shared_process":[]},"config":{"looknfeel":"default","personalizedMode":"false"},"info":{}}
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_visualization_pyspark.json b/integration-tests/examples/test_templates/zeppelin/template_visualization_pyspark.json
deleted file mode 100644
index b363748..0000000
--- a/integration-tests/examples/test_templates/zeppelin/template_visualization_pyspark.json
+++ /dev/null
@@ -1 +0,0 @@
-{"paragraphs":[{"text":"%INTERPRETER_NAME\r\n\r\nfrom pyspark.sql import SQLContext\r\nfrom pyspark.sql import DataFrame\r\nfrom pyspark.sql import Row\r\nfrom pyspark.sql.types import *\r\nimport pandas as pd\r\nimport StringIO\r\nimport matplotlib\r\nmatplotlib.style.use('ggplot')\r\nimport matplotlib.pyplot as plt\r\nplt.switch_backend('WebAgg')\r\nimport os\r\nos.system(\"export DISPLAY=:0\")\r\nhc = sc._jsc.hadoopConfiguration()\r\nhc.set(\"hive.execution.engine\", \"mr\")\r\n\r\n\r\nworking_storage = \"WORKING_STORAGE\"\r\noutput_directory = \"zeppelin/py2\"\r\nprotocol_name = \"PROTOCOL_NAME\"\r\n\r\ndef full_path(part_path):\r\n    return '{}://{}/{}/{}'.format(protocol_name, working_storage, output_directory, part_path)\r\n    \r\ndef show(p, width):\r\n    img = StringIO.StringIO()\r\n    p.savefig(img, format='svg')\r\n    img.seek(0)\r\n    print \"%html <div style='display:inline-block;width:{}px'>{}</div>\".format(width, img.buf)\r\n","dateUpdated":"2018-01-03T14:22:49+0000","config":{"colWidth":6,"editorMode":"ace/mode/text","results":[],"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275351_1699779548","id":"20170116-204611_369809929","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:4139"},{"title":"Load Carriers data","text":"%INTERPRETER_NAME\r\n\r\ncarriers = sqlContext.read.parquet(full_path(\"carriers\")).cache()   \r\nsqlContext.registerDataFrameAsTable(carriers, \"carriers\")\r\ncarriers.printSchema()\r\ncarriers.limit(20).toPandas()","dateUpdated":"2018-01-03T14:21:42+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"table","height":354,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275351_1699779548","id":"20170116-204422_966931320","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4140"},{"title":"Load Airports data","text":"%INTERPRETER_NAME\r\n\r\nairports = sqlContext.read.parquet(full_path(\"airports\")).cache()\r\nsqlContext.registerDataFrameAsTable(airports, \"airports\")\r\nairports.printSchema()\r\nairports.limit(20).toPandas()","dateUpdated":"2018-01-03T14:21:43+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275351_1699779548","id":"20170116-210059_125873577","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4141"},{"title":"Load Flights data","text":"%INTERPRETER_NAME\r\n\r\nflights = sqlContext.read.parquet(full_path(\"flights\"))\r\nflights.printSchema()\r\nsqlContext.registerDataFrameAsTable(flights, \"flights\")\r\nflights.limit(10).toPandas()[[\"ArrDelay\",\"CarrierDelay\",\"CarrierDelayStr\",\"WeatherDelay\",\"WeatherDelayStr\",\"Distance\"]]","dateUpdated":"2018-01-03T14:21:45+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275352_1697855803","id":"20170116-210120_275435368","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4142"},{"title":"Taxonomy for ArrDelay, CarrierDelay, and Distance colums","text":"%INTERPRETER_NAME\nsummary = flights.describe(\"ArrDelay\",\"CarrierDelay\",\"Distance\")\nz.show(summary)","dateUpdated":"2018-01-03T14:21:46+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[{"name":"summary","index":0,"aggr":"sum"}],"values":[{"name":"ArrDelay","index":1,"aggr":"sum"}],"groups":[],"scatter":{"xAxis":{"name":"summary","index":0,"aggr":"sum"},"yAxis":{"name":"ArrDelay","index":1,"aggr":"sum"}}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275352_1697855803","id":"20170124-005707_1932994206","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4143"},{"title":"Let's find the top 10 of the most unpunctual airlines","text":"%INTERPRETER_NAME\r\nimport matplotlib.pyplot as plt\r\nplt.switch_backend('WebAgg')\r\n\r\ndelay_sql = sqlContext.sql(\"\"\"select SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay from\r\n   (select ceil( avg(f.ArrDelay + f.DepDelay) ) as WorkDayDelay,\r\n   f.UniqueCarrier\r\n   FROM flights f\r\n         WHERE f.DayOfWeek < 6\r\n         GROUP BY f.UniqueCarrier ORDER BY WorkDayDelay desc limit 10) t\r\nJOIN \r\n  (select ceil( avg(f.ArrDelay + f.DepDelay) ) as WeekendDelay, \r\n   f.UniqueCarrier \r\n   FROM flights f \r\n         WHERE f.DayOfWeek > 5 \r\n         GROUP BY f.UniqueCarrier) t1 \r\nON t.UniqueCarrier = t1.UniqueCarrier \r\nJOIN carriers c on t.UniqueCarrier = c.code order by WeekendDelay desc, WorkDayDelay desc \r\n\"\"\")\r\n\r\ndelay = delay_sql.toPandas()\r\n\r\nfig = plt.figure()\r\ncolor_range_days = [\"#2966FF\", \"#61F2FF\"]\r\ndelay[\"Average\"] = (delay.WorkDayDelay + delay.WeekendDelay) / 2\r\nax = delay.Average.plot(x='Carrier', linestyle='-', marker='o')\r\nplot = delay.plot(x='Carrier', y=['WorkDayDelay','WeekendDelay'], kind='bar', legend = True,  figsize=(12, 4), color=color_range_days, ax=ax)\r\nfig.add_axes(plot)\r\nshow(plt, 1400)\r\n","dateUpdated":"2018-01-03T14:21:48+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275353_1697471054","id":"20170116-204836_1503115757","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4144"},{"title":"Same top 10 least punctual airlines using built-in visualization tools","text":"%INTERPRETER_NAME\n\nz.show(delay_sql)","dateUpdated":"2018-01-03T14:21:49+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":12,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"multiBarChart","height":324,"optionOpen":true,"keys":[{"name":"Carrier","index":0,"aggr":"sum","$$hashKey":"object:26353"}],"values":[{"name":"WorkDayDelay","index":1,"aggr":"sum","$$hashKey":"object:26356"},{"name":"WeekendDelay","index":2,"aggr":"sum","$$hashKey":"object:26357"}],"groups":[],"scatter":{"xAxis":{"name":"Carrier","index":0,"aggr":"sum"},"yAxis":{"name":"WorkDayDelay","index":1,"aggr":"sum"}}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275353_1697471054","id":"20170116-205529_682194031","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4145"},{"title":"Number of flight performed by top companies","text":"%INTERPRETER_NAME\r\n\r\nimport matplotlib.pyplot as plt\r\nplt.switch_backend('WebAgg')\r\n\r\nq = \"\"\"SELECT t.cnt as FlightsAmt, carriers.description as Carrier FROM (\r\n            SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code \r\n                FROM flights GROUP BY flights.UniqueCarrier LIMIT 6) t \r\n            LEFT JOIN carriers ON t.carrier_code = carriers.code\"\"\"\r\n\r\n\r\ntopFlights = sqlContext.sql(q).toPandas()\r\n\r\nfig1, ax1 = plt.subplots()\r\nax1.pie(topFlights[\"FlightsAmt\"], labels=topFlights[\"Carrier\"], autopct='%1.1f%%')\r\nax1.axis('equal')\r\nshow(plt, 900)","dateUpdated":"2018-01-03T14:21:50+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275353_1697471054","id":"20170116-212002_1259500001","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4146"},{"title":"The average Flight Distance per Company","text":"%INTERPRETER_NAME\n\nN = 10\n\nresultDistance = sqlContext.sql(\"SELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance FROM flights f JOIN carriers c ON f.UniqueCarrier = c.code GROUP BY c.description ORDER BY distance DESC LIMIT {}\".format(N)).toPandas()\n\ncolor_range =  [\"#2966FF\",\n \t  \"#2E73FF\",\n \t  \"#3380FF\",\n \t  \"#388CFF\",\n \t  \"#3D99FF\",\n \t  \"#42A6FF\",\n \t  \"#47B2FF\",\n \t  \"#4CBFFF\",\n \t  \"#52CCFF\",\n \t  \"#57D9FF\",\n \t  \"#5CE6FF\",\n \t  \"#61F2FF\",\n      \"#66FFFF\"]\n\nx = range(N)\n_, ax = plt.subplots()\nax.bar(x, resultDistance['Distance'], color=color_range, tick_label=resultDistance['Carrier'])\nax.set_xlabel('Carrier')\nplt.xticks(rotation=70)\nshow(plt, 800)","dateUpdated":"2018-01-03T14:21:55+0000","config":{"editorSetting":{"language":"text","editOnDblClick":false},"colWidth":6,"editorMode":"ace/mode/text","title":true,"results":[{"graph":{"mode":"table","height":589,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}}}],"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275354_1698625301","id":"20170116-213403_614421941","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4147"},{"text":"","dateUpdated":"2018-01-03T14:21:15+0000","config":{"editorSetting":{"language":"scala"},"colWidth":12,"editorMode":"ace/mode/scala","results":{},"graph":{"mode":"table","height":300,"optionOpen":false,"keys":[],"values":[],"groups":[],"scatter":{}},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1514989275354_1698625301","id":"20170123-231559_1974437472","dateCreated":"2018-01-03T14:21:15+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:4148"}],"name":"Python 2 data visualization","id":"2D5MV4UFW","angularObjects":{"2C6RJRBD2:shared_process":[],"2C6RJRBD1:shared_process":[]},"config":{"looknfeel":"default","personalizedMode":"false"},"info":{}}
\ No newline at end of file
diff --git a/integration-tests/examples/test_templates/zeppelin/template_visualization_sparkr.json b/integration-tests/examples/test_templates/zeppelin/template_visualization_sparkr.json
deleted file mode 100644
index 62c1400..0000000
--- a/integration-tests/examples/test_templates/zeppelin/template_visualization_sparkr.json
+++ /dev/null
@@ -1 +0,0 @@
-{"paragraphs":[{"text":"%INTERPRETER_NAME\nsparkR.session()\n\nfull_path <- function(file_path) {\n    working_storage <- \"WORKING_STORAGE\"\n    output_directory <- \"zeppelin/r\"\n    protocol_name <- 'PROTOCOL_NAME'\n    sprintf('%s://%s/%s/%s', protocol_name, working_storage, output_directory, file_path)\n}","dateUpdated":"2018-01-04T09:24:59+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823463_-1635169484","id":"20170329-112414_1472595813","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:372"},{"text":"%INTERPRETER_NAME\ncarriers <- read.df(full_path(\"carriers\"), \"parquet\")\ncreateOrReplaceTempView(carriers, \"carriers\")\nprintSchema(carriers)\nhead(carriers, 20)","dateUpdated":"2018-01-04T09:24:07+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823469_-1639016973","id":"20170329-112449_1638412317","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:373"},{"text":"%INTERPRETER_NAME\nairports <- read.df(full_path(\"airports\"), \"parquet\")\ncreateOrReplaceTempView(airports, \"airports\")\nprintSchema(airports)\nhead(airports, 20)","dateUpdated":"2018-01-04T09:24:08+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823470_-1637862726","id":"20170329-112510_202152993","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:374"},{"text":"%INTERPRETER_NAME\nflights <- read.df(full_path(\"flights\"), \"parquet\")\ncreateOrReplaceTempView(flights, \"flights\")\nprintSchema(flights)\nhead(flights, 10)[c(\"ArrDelay\",\"CarrierDelay\",\"WeatherDelay\",\"Distance\")]","dateUpdated":"2018-01-04T09:24:10+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823471_-1638247475","id":"20170329-112523_1571758659","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:375"},{"text":"%INTERPRETER_NAME\nhead(summary(limit(flights,10)))[c(\"summary\", \"ArrDelay\",\"CarrierDelay\",\"Distance\")]","dateUpdated":"2018-01-04T09:24:10+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823472_-1627859255","id":"20170329-112535_1375397859","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:376"},{"text":"%INTERPRETER_NAME\nlibrary(ggplot2)\nlibrary(reshape2)\n\ndelay_sql <- sql(\"\nSELECT SUBSTR(c.description, 0, 15) as Carrier, WorkDayDelay, WeekendDelay \nFROM\n       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WorkDayDelay, f.UniqueCarrier\n        FROM flights f\n        WHERE f.DayOfWeek < 6\n        GROUP BY f.UniqueCarrier \n        ORDER BY WorkDayDelay desc \n        LIMIT 10) t\n    JOIN\n       (SELECT CEIL( AVG(f.ArrDelay + f.DepDelay) ) as WeekendDelay, f.UniqueCarrier\n        FROM flights f\n        WHERE f.DayOfWeek > 5\n        GROUP BY f.UniqueCarrier) t1\n      ON t.UniqueCarrier = t1.UniqueCarrier\n    JOIN carriers c \n      ON t.UniqueCarrier = c.code \nORDER BY WeekendDelay DESC, WorkDayDelay DESC\n\")\n\ndelay <- collect(delay_sql)\ndelay_melt <- melt(delay[c('Carrier', 'WorkDayDelay', 'WeekendDelay')])\n\ncolor_range_days <- c(\"#2966FF\", \"#61F2FF\")\n\nggplot(data=delay_melt, aes(x=Carrier, y=value, fill=variable)) +\n    geom_bar(stat=\"identity\", width=.7, position=\"dodge\") +\n    stat_summary(fun.y=mean, geom = \"line\", mapping = aes(group = 1), color=\"red\") +\n    stat_summary(fun.y=mean, geom = \"point\", mapping = aes(group = 1), color=\"red\") +\n    theme(legend.position=\"right\", axis.text.x=element_text(angle=90)) +\n    labs(x=\"Carrier\", y=\"Minutes\", fill=\"Day Type\") +\n    coord_fixed(ratio = .2) +\n    scale_fill_manual(values=color_range_days) +\n    scale_y_continuous(breaks=seq(0, 30, 5))","dateUpdated":"2018-01-04T09:24:12+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823472_-1627859255","id":"20170329-112549_2110062261","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:377"},{"text":"%INTERPRETER_NAME\ntop_flights_sql <- sql(\"\nSELECT t.cnt as FlightsAmt, carriers.description as Carrier \nFROM (\n    SELECT count(*) as cnt, flights.UniqueCarrier as carrier_code \n    FROM flights \n    GROUP BY flights.UniqueCarrier LIMIT 6) t \nLEFT JOIN carriers \n  ON t.carrier_code = carriers.code\n\")\n\ntop_flights <- collect(top_flights_sql)\n\nggplot(transform(transform(top_flights, value=FlightsAmt/sum(FlightsAmt)), labPos=cumsum(FlightsAmt)-FlightsAmt/2), \n       aes(x=\"\", y = FlightsAmt, fill = Carrier)) +\n    geom_bar(width = 1, stat = \"identity\") +\n    coord_polar(\"y\", start=0) +\n    scale_fill_brewer(palette=\"Dark2\") +\n    theme_bw() +\n    theme(axis.text.x=element_blank() ,panel.grid.major=element_blank(),panel.grid.minor = element_blank(),panel.border = element_blank()) +\n    geom_text(size=4, aes(y=labPos, label=scales::percent(value))) + \n    geom_text(size=3, aes(x=1.8, y=labPos, label=top_flights$Carrier)) + \n    theme(legend.position=\"none\")","dateUpdated":"2018-01-04T09:24:14+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823473_-1628244004","id":"20170329-112607_812774791","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:378"},{"text":"%INTERPRETER_NAME\ndistance_sql = sql(\"\nSELECT SUBSTR(c.description, 0, 15) as Carrier, COUNT(Distance) AS Distance \nFROM flights f \nJOIN carriers c \n  ON f.UniqueCarrier = c.code \nGROUP BY c.description \nORDER BY distance DESC \nLIMIT 10\n\")\n\ndistance <- collect(distance_sql)\n\ndistance$Carrier <- factor(distance$Carrier, levels = distance$Carrier[order(-distance$Distance)])\n\ncolor_range <-  c(\"#2966FF\", \"#2E73FF\",\"#3380FF\", \"#388CFF\", \"#3D99FF\", \"#42A6FF\", \"#47B2FF\", \"#4CBFFF\", \"#52CCFF\", \n                  \"#57D9FF\", \"#5CE6FF\", \"#61F2FF\", \"#66FFFF\")\n\nggplot(data=distance, aes(x=Carrier, y=Distance, fill=Carrier)) +\n    geom_bar(stat=\"identity\", width=.7, position=\"dodge\") +\n    theme(axis.text.x=element_text(angle=90)) +\n    scale_fill_manual(values=color_range) +\n    theme(legend.position=\"none\")","dateUpdated":"2018-01-04T09:24:16+0000","config":{"colWidth":12,"editorMode":"ace/mode/text","results":{},"enabled":true,"editorSetting":{"language":"text","editOnDblClick":false}},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1515057823474_-1627089757","id":"20170329-112623_1822577399","dateCreated":"2018-01-04T09:23:43+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"$$hashKey":"object:379"}],"name":"Flights_Visualization_SparkR","id":"2D3B7XFMR","angularObjects":{"2C6RJRBD2:shared_process":[],"2C6RJRBD1:shared_process":[]},"config":{"looknfeel":"default","personalizedMode":"false"},"info":{}}
\ No newline at end of file
diff --git a/integration-tests/pom.xml b/integration-tests/pom.xml
deleted file mode 100644
index 9ac5284..0000000
--- a/integration-tests/pom.xml
+++ /dev/null
@@ -1,315 +0,0 @@
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
-    <modelVersion>4.0.0</modelVersion>
-    <groupId>com.epam.dlab</groupId>
-    <artifactId>integration-tests</artifactId>
-    <version>1.0</version>
-    <packaging>jar</packaging>
-
-	<scm>
-    	<connection>scm:git:file://.</connection>
-    	<url>scm:git:file://.</url>
-    	<tag>HEAD</tag>
-	</scm>
-
-    <properties>
-        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
-        <java.version>1.8</java.version>
-        <maven-compiler-plugin.version>3.5.1</maven-compiler-plugin.version>
-        
-        <properties-maven-plugin.version>1.0.0</properties-maven-plugin.version>
-        <buildnumber-maven-plugin.version>1.4</buildnumber-maven-plugin.version>
-        <maven-shade-plugin.version>2.4.3</maven-shade-plugin.version>
-        <maven.build.timestamp.format>yyyy-MM-dd HH:mm:ss</maven.build.timestamp.format>
-    </properties>
-
-    <dependencies>
-        <dependency>
-            <groupId>org.testng</groupId>
-            <artifactId>testng</artifactId>
-            <version>6.10</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.jayway.restassured</groupId>
-            <artifactId>rest-assured</artifactId>
-            <version>2.9.0</version>
-        </dependency>
-        
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-annotations</artifactId>
-            <version>2.9.7</version>
-        </dependency>
-        
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-core</artifactId>
-            <version>2.9.7</version>
-        </dependency>
-        
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-databind</artifactId>
-            <version>2.9.7</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.amazonaws</groupId>
-            <artifactId>aws-java-sdk-ec2</artifactId>
-            <version>1.9.13</version>
-        </dependency>
-        
-		<dependency>
-    		<groupId>com.amazonaws</groupId>
-    		<artifactId>aws-java-sdk-s3</artifactId>
-    		<version>1.9.13</version>
-		</dependency>
-
-		<dependency>
-			<groupId>org.apache.logging.log4j</groupId>
-			<artifactId>log4j-api</artifactId>
-			<version>2.8.2</version>
-		</dependency>
-		<dependency>
-			<groupId>org.apache.logging.log4j</groupId>
-			<artifactId>log4j-core</artifactId>
-			<version>2.8.2</version>
-		</dependency>
-		<dependency>
-			<groupId>org.slf4j</groupId>
-			<artifactId>slf4j-simple</artifactId>
-			<version>1.7.25</version>
-		</dependency>
-
-        <dependency>
-            <groupId>com.github.docker-java</groupId>
-            <artifactId>docker-java</artifactId>
-            <version>3.0.6</version>
-        </dependency>
-        
-        <dependency>
-            <groupId>org.scijava</groupId>
-            <artifactId>native-lib-loader</artifactId>
-            <version>2.0.2</version>
-        </dependency>
-        
-        <dependency>
-            <groupId>com.jcraft</groupId>
-            <artifactId>jsch</artifactId>
-            <version>0.1.54</version>
-        </dependency>
-
-        <dependency>
-            <groupId>org.mockito</groupId>
-            <artifactId>mockito-core</artifactId>
-            <version>2.13.0</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.microsoft.azure</groupId>
-            <artifactId>azure</artifactId>
-            <version>1.5.1</version>
-        </dependency>
-        <dependency>
-            <groupId>com.microsoft.azure</groupId>
-            <artifactId>azure-mgmt-compute</artifactId>
-            <version>1.5.1</version>
-        </dependency>
-        <dependency>
-            <groupId>com.microsoft.azure</groupId>
-            <artifactId>azure-mgmt-resources</artifactId>
-            <version>1.5.1</version>
-        </dependency>
-        <dependency>
-            <groupId>com.microsoft.azure</groupId>
-            <artifactId>azure-mgmt-network</artifactId>
-            <version>1.5.1</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.google.cloud</groupId>
-            <artifactId>google-cloud-compute</artifactId>
-            <version>0.34.0-alpha</version>
-        </dependency>
-
-        <dependency>
-            <groupId>com.squareup.okio</groupId>
-            <artifactId>okio</artifactId>
-            <version>1.13.0</version>
-        </dependency>
-        <dependency>
-            <groupId>com.nimbusds</groupId>
-            <artifactId>nimbus-jose-jwt</artifactId>
-            <version>5.2</version>
-        </dependency>
-        <dependency>
-            <groupId>net.minidev</groupId>
-            <artifactId>json-smart</artifactId>
-            <version>2.3</version>
-        </dependency>
-        <dependency>
-            <groupId>javax.mail</groupId>
-            <artifactId>javax.mail-api</artifactId>
-            <version>1.6.0</version>
-        </dependency>
-        <dependency>
-            <groupId>org.projectlombok</groupId>
-            <artifactId>lombok</artifactId>
-            <version>1.16.18</version>
-        </dependency>
-    </dependencies>
-
-    <build>
-    	<finalName>${project.artifactId}-${dlab.version}</finalName>
-        <plugins>
-            <plugin>
-                <artifactId>maven-compiler-plugin</artifactId>
-                <version>${maven-compiler-plugin.version}</version>
-                <configuration>
-                    <source>${java.version}</source>
-                    <target>${java.version}</target>
-                    
-                </configuration>
-            </plugin>
-            
-        	<plugin>
-				<groupId>org.codehaus.mojo</groupId>
-				<artifactId>properties-maven-plugin</artifactId>
-				<version>${properties-maven-plugin.version}</version>
-				<executions>
-					<execution>
-						<phase>initialize</phase>
-						<goals>
-							<goal>read-project-properties</goal>
-						</goals>
-						<configuration>
-							<files>
-								<file>../build.properties</file>
-							</files>
-						</configuration>
-					</execution>
-				</executions>
-			</plugin>
-			
-			<plugin>
-				<groupId>org.codehaus.mojo</groupId>
-				<artifactId>buildnumber-maven-plugin</artifactId>
-				<version>${buildnumber-maven-plugin.version}</version>
-				<executions>
-					<execution>
-						<phase>validate</phase>
-						<goals>
-							<goal>create</goal>
-						</goals>
-					</execution>
-				</executions>
-				<configuration>
-					<doCheck>false</doCheck>
-					<doUpdate>false</doUpdate>
-					<getRevisionOnlyOnce>true</getRevisionOnlyOnce>
-					<revisionOnScmFailure>none</revisionOnScmFailure>
-				</configuration>
-      		</plugin>
-			
-            <plugin>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>${maven-shade-plugin.version}</version>
-                <executions>
-                    <execution>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>shade</goal>
-                        </goals>
-                        <configuration>
-                            <createDependencyReducedPom>false</createDependencyReducedPom>
-                            <transformers>
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-<!--                                     <mainClass>com.epam.dlab.automation</mainClass> -->
-									<manifestEntries>
-										<Created-By>&lt;EPAM&gt; Systems</Created-By>
-										<Name>com/epam/dlab/automation</Name>
-										<Implementation-Title>DLab Integration Tests</Implementation-Title>
-										<Implementation-Version>${dlab.version}</Implementation-Version>
-										<Implementation-Vendor>&lt;EPAM&gt; Systems</Implementation-Vendor>
-										<Build-Time>${maven.build.timestamp}</Build-Time>
-										<Build-OS>${os.name}</Build-OS>
-										<GIT-Branch>${scmBranch}</GIT-Branch>
-										<GIT-Commit>${buildNumber}</GIT-Commit>
-									</manifestEntries>
-                                </transformer>
-                            </transformers>
-                            <filters>
-                            	<filter>
-                            		<artifact>*:*</artifact>
-                            		<excludes>
-										<exclude>META-INF/*.SF</exclude>
-				                		<exclude>META-INF/*.DSA</exclude>
-				                		<exclude>META-INF/*.RSA</exclude>
-			                		</excludes>
-                            	</filter>
-                            </filters>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-
-            <plugin>
-                <groupId>org.apache.rat</groupId>
-                <artifactId>apache-rat-plugin</artifactId>
-                <version>0.7</version>
-                <configuration>
-                    <excludes>
-                        <exclude>.git/**</exclude>
-                        <exclude>.idea/**</exclude>
-                        <exclude>**/.gitignore</exclude>
-                        <exclude>**/*.ini</exclude>
-                        <exclude>**/*.json</exclude>
-                        <exclude>**/__init__.py</exclude>
-                        <exclude>**/*.conf</exclude>
-                        <exclude>**/.gitkeep</exclude>
-                        <exclude>**/*.lst</exclude>
-                        <exclude>**/*template</exclude>
-                    </excludes>
-                </configuration>
-                <executions>
-                    <execution>
-                        <phase>verify</phase>
-                        <goals>
-                            <goal>check</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-                <configuration>
-                    <useSystemClassLoader>false</useSystemClassLoader>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-</project>
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/VirtualMachineStatusChecker.java b/integration-tests/src/main/java/com/epam/dlab/automation/cloud/VirtualMachineStatusChecker.java
deleted file mode 100644
index 0ecff1d..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/VirtualMachineStatusChecker.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.cloud;
-
-import com.epam.dlab.automation.cloud.aws.AmazonHelper;
-import com.epam.dlab.automation.cloud.aws.AmazonInstanceState;
-import com.epam.dlab.automation.cloud.azure.AzureHelper;
-import com.epam.dlab.automation.cloud.gcp.GcpHelper;
-import com.epam.dlab.automation.cloud.gcp.GcpInstanceState;
-import com.epam.dlab.automation.helper.CloudProvider;
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.microsoft.azure.management.compute.PowerState;
-import org.testng.Assert;
-
-import java.io.IOException;
-
-public class VirtualMachineStatusChecker {
-
-	private static final String UNKNOWN_CLOUD_PROVIDER = "Unknown cloud provider";
-
-	private VirtualMachineStatusChecker(){}
-
-    public static void checkIfRunning(String tagNameValue, boolean restrictionMode)
-			throws InterruptedException, IOException {
-
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                AmazonHelper.checkAmazonStatus(tagNameValue, AmazonInstanceState.RUNNING);
-                break;
-            case CloudProvider.AZURE_PROVIDER:
-                AzureHelper.checkAzureStatus(tagNameValue, PowerState.RUNNING, restrictionMode);
-                break;
-            case CloudProvider.GCP_PROVIDER:
-                GcpHelper.checkGcpStatus(tagNameValue, ConfigPropertyValue.getGcpDlabProjectId(),
-                        GcpInstanceState.RUNNING, restrictionMode,
-                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDlabProjectId()));
-                break;
-            default:
-                Assert.fail(UNKNOWN_CLOUD_PROVIDER);
-        }
-
-    }
-
-    public static void checkIfTerminated(String tagNameValue, boolean restrictionMode)
-			throws InterruptedException, IOException {
-
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                AmazonHelper.checkAmazonStatus(tagNameValue, AmazonInstanceState.TERMINATED);
-                break;
-            case CloudProvider.AZURE_PROVIDER:
-                AzureHelper.checkAzureStatus(tagNameValue, PowerState.STOPPED, restrictionMode);
-                break;
-            case CloudProvider.GCP_PROVIDER:
-                GcpHelper.checkGcpStatus(tagNameValue, ConfigPropertyValue.getGcpDlabProjectId(),
-                        GcpInstanceState.TERMINATED, restrictionMode,
-                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDlabProjectId()));
-                break;
-            default:
-                Assert.fail(UNKNOWN_CLOUD_PROVIDER);
-        }
-
-    }
-
-    public static void checkIfStopped(String tagNameValue, boolean restrictionMode)
-			throws InterruptedException, IOException {
-
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                AmazonHelper.checkAmazonStatus(tagNameValue, AmazonInstanceState.STOPPED);
-                break;
-            case CloudProvider.AZURE_PROVIDER:
-                AzureHelper.checkAzureStatus(tagNameValue, PowerState.DEALLOCATED, restrictionMode);
-                break;
-            case CloudProvider.GCP_PROVIDER:
-                GcpHelper.checkGcpStatus(tagNameValue, ConfigPropertyValue.getGcpDlabProjectId(),
-                        GcpInstanceState.TERMINATED, restrictionMode,
-                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDlabProjectId()));
-                break;
-            default:
-                Assert.fail(UNKNOWN_CLOUD_PROVIDER);
-        }
-
-    }
-
-    public static String getStartingStatus() {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return AmazonInstanceState.STARTING.toString();
-            case CloudProvider.AZURE_PROVIDER:
-                return PowerState.STARTING.toString();
-            case CloudProvider.GCP_PROVIDER:
-				return GcpInstanceState.STARTING.toString();
-            default:
-                return "";
-        }
-
-    }
-
-    public static String getRunningStatus(){
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return AmazonInstanceState.RUNNING.toString();
-            case CloudProvider.AZURE_PROVIDER:
-                return PowerState.RUNNING.toString();
-            case CloudProvider.GCP_PROVIDER:
-                return GcpInstanceState.RUNNING.toString();
-            default:
-                return "";
-        }
-
-    }
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/aws/AmazonHelper.java b/integration-tests/src/main/java/com/epam/dlab/automation/cloud/aws/AmazonHelper.java
deleted file mode 100644
index 117f3ee..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/aws/AmazonHelper.java
+++ /dev/null
@@ -1,148 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.cloud.aws;
-
-import com.amazonaws.auth.AWSCredentials;
-import com.amazonaws.auth.BasicAWSCredentials;
-import com.amazonaws.regions.Region;
-import com.amazonaws.regions.Regions;
-import com.amazonaws.services.ec2.AmazonEC2;
-import com.amazonaws.services.ec2.AmazonEC2Client;
-import com.amazonaws.services.ec2.model.*;
-import com.amazonaws.services.s3.AmazonS3;
-import com.amazonaws.services.s3.AmazonS3Client;
-import com.amazonaws.services.s3.model.AccessControlList;
-import com.amazonaws.services.s3.model.Grant;
-import com.epam.dlab.automation.exceptions.CloudException;
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.epam.dlab.automation.helper.NamingHelper;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-
-public class AmazonHelper {
-
-    private static final Logger LOGGER = LogManager.getLogger(AmazonHelper.class);
-	private static final Duration CHECK_TIMEOUT = Duration.parse("PT10m");
-	private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
-
-	private AmazonHelper(){}
-	
-	private static AWSCredentials getCredentials() {
-		return new BasicAWSCredentials(ConfigPropertyValue.getAwsAccessKeyId(), ConfigPropertyValue.getAwsSecretAccessKey());
-	}
-	
-	private static Region getRegion() {
-		return Region.getRegion(Regions.fromName(ConfigPropertyValue.getAwsRegion()));
-	}
-
-	private static List<Instance> getInstances(String instanceName) {
-		AWSCredentials credentials = getCredentials();
-		AmazonEC2 ec2 = new AmazonEC2Client(credentials);
-		ec2.setRegion(getRegion());
-
-		List<String> valuesT1 = new ArrayList<>();
-		valuesT1.add(instanceName + "*");
-		Filter filter = new Filter("tag:" + NamingHelper.getServiceBaseName() + "-tag", valuesT1);
-
-		DescribeInstancesRequest describeInstanceRequest = new DescribeInstancesRequest().withFilters(filter);
-		DescribeInstancesResult describeInstanceResult = ec2.describeInstances(describeInstanceRequest);
-
-		List<Reservation> reservations = describeInstanceResult.getReservations();
-
-		if (reservations.isEmpty()) {
-			throw new CloudException("Instance " + instanceName + " in Amazon not found");
-		}
-
-		List<Instance> instances = reservations.get(0).getInstances();
-		if (instances.isEmpty()) {
-			throw new CloudException("Instance " + instanceName + " in Amazon not found");
-		}
-
-		return instances;
-	}
-
-	public static Instance getInstance(String instanceName) {
-    	return (ConfigPropertyValue.isRunModeLocal() ?
-    			new Instance()
-            		.withPrivateDnsName("localhost")
-            		.withPrivateIpAddress(LOCALHOST_IP)
-            		.withPublicDnsName("localhost")
-            		.withPublicIpAddress(LOCALHOST_IP)
-            		.withTags(new Tag()
-            					.withKey("Name")
-            					.withValue(instanceName)) :
-            	getInstances(instanceName).get(0));
-    }
-
-	public static void checkAmazonStatus(String instanceName, AmazonInstanceState expAmazonState) throws
-			InterruptedException {
-        LOGGER.info("Check status of instance {} on Amazon: {}", instanceName);
-        if (ConfigPropertyValue.isRunModeLocal()) {
-        	LOGGER.info("Amazon instance {} fake state is {}", instanceName, expAmazonState);
-        	return;
-        }
-        
-        String instanceState;
-        long requestTimeout = ConfigPropertyValue.getAwsRequestTimeout().toMillis();
-    	long timeout = CHECK_TIMEOUT.toMillis();
-        long expiredTime = System.currentTimeMillis() + timeout;
-        Instance instance;
-        while (true) {
-        	instance = AmazonHelper.getInstance(instanceName);
-        	instanceState = instance.getState().getName();
-        	if (!instance.getState().getName().equals("shutting-down")) {
-        		break;
-        	}
-        	if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-                LOGGER.info("Amazon instance {} state is {}", instanceName, instanceState);
-        		throw new CloudException("Timeout has been expired for check amazon instance " + instanceState);
-            }
-            Thread.sleep(requestTimeout);
-        }
-        
-        for (Instance i : AmazonHelper.getInstances(instanceName)) {
-            LOGGER.info("Amazon instance {} state is {}. Instance id {}, private IP {}, public IP {}",
-            		instanceName, instanceState, i.getInstanceId(), i.getPrivateIpAddress(), i.getPublicIpAddress());
-		}
-        Assert.assertEquals(instanceState, expAmazonState.toString(), "Amazon instance " + instanceName + " state is not correct. Instance id " +
-        		instance.getInstanceId() + ", private IP " + instance.getPrivateIpAddress() + ", public IP " + instance.getPublicIpAddress());
-    }
-
-    public static void printBucketGrants(String bucketName){
-        LOGGER.info("Print grants for bucket {} on Amazon: " , bucketName);
-        if (ConfigPropertyValue.isRunModeLocal()) {
-        	LOGGER.info("  action skipped for run in local mode");
-        	return;
-        }
-        AWSCredentials credentials = getCredentials();
-        AmazonS3 s3 = new AmazonS3Client(credentials);
-        
-        s3.setRegion(getRegion());
-        AccessControlList acl = s3.getBucketAcl(bucketName);
-        for (Grant grant : acl.getGrants()) {
-            LOGGER.info(grant);
-		}
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/aws/AmazonInstanceState.java b/integration-tests/src/main/java/com/epam/dlab/automation/cloud/aws/AmazonInstanceState.java
deleted file mode 100644
index 576be9d..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/aws/AmazonInstanceState.java
+++ /dev/null
@@ -1,32 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.cloud.aws;
-
-public enum AmazonInstanceState {
-    STARTING,
-    RUNNING,
-    TERMINATED,
-    STOPPED;
-	
-    @Override
-    public String toString() {
-    	return super.toString().toLowerCase();
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/azure/AzureHelper.java b/integration-tests/src/main/java/com/epam/dlab/automation/cloud/azure/AzureHelper.java
deleted file mode 100644
index 25fb388..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/azure/AzureHelper.java
+++ /dev/null
@@ -1,154 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.cloud.azure;
-
-import com.epam.dlab.automation.exceptions.CloudException;
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.microsoft.azure.management.Azure;
-import com.microsoft.azure.management.compute.PowerState;
-import com.microsoft.azure.management.compute.VirtualMachine;
-import com.microsoft.azure.management.network.NetworkInterface;
-import com.microsoft.azure.management.network.PublicIPAddress;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.io.File;
-import java.io.IOException;
-import java.time.Duration;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-
-public class AzureHelper{
-
-    private static final Logger LOGGER = LogManager.getLogger(AzureHelper.class);
-    private static final Duration CHECK_TIMEOUT = Duration.parse("PT10m");
-    private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
-
-	private static Azure azure = getAzureInstance();
-
-    private AzureHelper(){}
-
-	private static Azure getAzureInstance() {
-		if (!ConfigPropertyValue.isRunModeLocal() && Objects.isNull(azure)) {
-			try {
-				return Azure.configure().authenticate(
-						new File(ConfigPropertyValue.getAzureAuthFileName())).withDefaultSubscription();
-			} catch (IOException e) {
-				LOGGER.info("An exception occured: {}", e);
-			}
-		}
-		return azure;
-	}
-
-    private static List<VirtualMachine> getVirtualMachines(){
-        return !azure.virtualMachines().list().isEmpty() ? new ArrayList<>(azure.virtualMachines().list()) : null;
-    }
-
-    public static List<VirtualMachine> getVirtualMachinesByName(String name, boolean restrictionMode){
-        if(ConfigPropertyValue.isRunModeLocal()){
-
-            List<VirtualMachine> vmLocalModeList = new ArrayList<>();
-            VirtualMachine mockedVM = mock(VirtualMachine.class);
-            PublicIPAddress mockedIPAddress = mock(PublicIPAddress.class);
-            NetworkInterface mockedNetworkInterface = mock(NetworkInterface.class);
-            when(mockedVM.getPrimaryPublicIPAddress()).thenReturn(mockedIPAddress);
-            when(mockedIPAddress.ipAddress()).thenReturn(LOCALHOST_IP);
-            when(mockedVM.getPrimaryNetworkInterface()).thenReturn(mockedNetworkInterface);
-            when(mockedNetworkInterface.primaryPrivateIP()).thenReturn(LOCALHOST_IP);
-            vmLocalModeList.add(mockedVM);
-
-            return vmLocalModeList;
-
-        }
-        List<VirtualMachine> vmList = getVirtualMachines();
-        if(vmList == null){
-            LOGGER.warn("There is not any virtual machine in Azure");
-            return vmList;
-        }
-        if(restrictionMode){
-            vmList.removeIf(vm -> !hasName(vm, name));
-        }
-        else{
-            vmList.removeIf(vm -> !containsName(vm, name));
-        }
-        return !vmList.isEmpty() ? vmList : null;
-    }
-
-    private static boolean hasName(VirtualMachine vm, String name){
-        return vm.name().equals(name);
-    }
-
-    private static boolean containsName(VirtualMachine vm, String name){
-        return vm.name().contains(name);
-    }
-
-    private static PowerState getStatus(VirtualMachine vm){
-        return vm.powerState();
-    }
-
-	public static void checkAzureStatus(String virtualMachineName, PowerState expAzureState, boolean restrictionMode)
-			throws InterruptedException {
-        LOGGER.info("Check status of virtual machine with name {} on Azure", virtualMachineName);
-        if (ConfigPropertyValue.isRunModeLocal()) {
-            LOGGER.info("Azure virtual machine with name {} fake state is {}", virtualMachineName, expAzureState);
-            return;
-        }
-        List<VirtualMachine> vmsWithName = getVirtualMachinesByName(virtualMachineName, restrictionMode);
-        if(vmsWithName == null){
-            LOGGER.warn("There is not any virtual machine in Azure with name {}", virtualMachineName);
-            return;
-        }
-
-        PowerState virtualMachineState;
-        long requestTimeout = ConfigPropertyValue.getAzureRequestTimeout().toMillis();
-        long timeout = CHECK_TIMEOUT.toMillis();
-        long expiredTime = System.currentTimeMillis() + timeout;
-        VirtualMachine virtualMachine = vmsWithName.get(0);
-        while (true) {
-            virtualMachineState = getStatus(virtualMachine);
-            if (virtualMachineState == expAzureState) {
-                break;
-            }
-            if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-                LOGGER.info("Azure virtual machine with name {} state is {}", virtualMachineName, getStatus(virtualMachine));
-                throw new CloudException("Timeout has been expired for check state of azure virtual machine with name " + virtualMachineName);
-            }
-            Thread.sleep(requestTimeout);
-        }
-
-        for (VirtualMachine  vm : vmsWithName) {
-            LOGGER.info("Azure virtual machine with name {} state is {}. Virtual machine id {}, private IP {}, public IP {}",
-                    virtualMachineName, getStatus(vm), vm.vmId(), vm.getPrimaryNetworkInterface().primaryPrivateIP(),
-                    vm.getPrimaryPublicIPAddress() != null ? vm.getPrimaryPublicIPAddress().ipAddress() : "doesn't exist for this resource type");
-        }
-        Assert.assertEquals(virtualMachineState, expAzureState, "Azure virtual machine with name " + virtualMachineName +
-                " state is not correct. Virtual machine id " +
-                virtualMachine.vmId() + ", private IP " + virtualMachine.getPrimaryNetworkInterface().primaryPrivateIP() +
-                ", public IP " +
-                (virtualMachine.getPrimaryPublicIPAddress() != null ? virtualMachine.getPrimaryPublicIPAddress().ipAddress() : "doesn't exist for this resource type" ));
-    }
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/gcp/GcpHelper.java b/integration-tests/src/main/java/com/epam/dlab/automation/cloud/gcp/GcpHelper.java
deleted file mode 100644
index 7240464..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/gcp/GcpHelper.java
+++ /dev/null
@@ -1,236 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.cloud.gcp;
-
-import com.epam.dlab.automation.exceptions.CloudException;
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.google.api.client.googleapis.auth.oauth2.GoogleCredential;
-import com.google.api.client.googleapis.javanet.GoogleNetHttpTransport;
-import com.google.api.client.http.HttpTransport;
-import com.google.api.client.json.JsonFactory;
-import com.google.api.client.json.jackson2.JacksonFactory;
-import com.google.api.services.compute.Compute;
-import com.google.api.services.compute.model.*;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.security.GeneralSecurityException;
-import java.time.Duration;
-import java.util.*;
-import java.util.function.Function;
-import java.util.stream.Collectors;
-
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
-
-public class GcpHelper {
-
-	private static final Logger LOGGER = LogManager.getLogger(GcpHelper.class);
-	private static final Duration CHECK_TIMEOUT = Duration.parse("PT10m");
-	private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
-	private static final String NOT_EXIST = "doesn't exist for this resource type";
-
-	private GcpHelper() {
-	}
-
-	private static List<Instance> getInstances(String projectId, List<String> zones) throws IOException {
-		List<Instance> instanceList = new ArrayList<>();
-		for (String zone : zones) {
-			Compute.Instances.List request = ComputeService.getInstance().instances().list(projectId, zone);
-			InstanceList response;
-			do {
-				response = request.execute();
-				if (response.getItems() == null) {
-					continue;
-				}
-				instanceList.addAll(response.getItems());
-				request.setPageToken(response.getNextPageToken());
-			} while (response.getNextPageToken() != null);
-
-		}
-		return !instanceList.isEmpty() ? instanceList : null;
-	}
-
-	public static List<String> getInstancePrivateIps(Instance instance) {
-		return instance.getNetworkInterfaces().stream().filter(Objects::nonNull)
-				.map(NetworkInterface::getNetworkIP).filter(Objects::nonNull)
-				.collect(Collectors.toList());
-	}
-
-	public static List<String> getInstancePublicIps(Instance instance) {
-		return instance.getNetworkInterfaces()
-				.stream().filter(Objects::nonNull)
-				.map(NetworkInterface::getAccessConfigs)
-				.filter(Objects::nonNull).map(Collection::stream)
-				.flatMap(Function.identity()).filter(Objects::nonNull)
-				.map(AccessConfig::getNatIP).filter(Objects::nonNull)
-				.collect(Collectors.toList());
-	}
-
-
-	public static List<Instance> getInstancesByName(String name, String projectId, boolean restrictionMode,
-													List<String> zones) throws IOException {
-		if (ConfigPropertyValue.isRunModeLocal()) {
-			List<Instance> mockedInstanceList = new ArrayList<>();
-			Instance mockedInstance = mock(Instance.class);
-			NetworkInterface mockedNetworkInterface = mock(NetworkInterface.class);
-			when(mockedInstance.getNetworkInterfaces()).thenReturn(Collections.singletonList(mockedNetworkInterface));
-			when(mockedInstance.getNetworkInterfaces().get(0).getNetworkIP()).thenReturn(LOCALHOST_IP);
-			AccessConfig mockedAccessConfig = mock(AccessConfig.class);
-			when(mockedInstance.getNetworkInterfaces().get(0).getAccessConfigs())
-					.thenReturn(Collections.singletonList(mockedAccessConfig));
-			when(mockedInstance.getNetworkInterfaces().get(0).getAccessConfigs().get(0).getNatIP())
-					.thenReturn(LOCALHOST_IP);
-			mockedInstanceList.add(mockedInstance);
-			return mockedInstanceList;
-		}
-		List<Instance> instanceList = getInstances(projectId, zones);
-		if (instanceList == null) {
-			LOGGER.warn("There is not any virtual machine in GCP for project with id {}", projectId);
-			return instanceList;
-		}
-		if (restrictionMode) {
-			instanceList.removeIf(instance -> !hasName(instance, name));
-		} else {
-			instanceList.removeIf(instance -> !containsName(instance, name));
-		}
-		return !instanceList.isEmpty() ? instanceList : null;
-	}
-
-	private static boolean hasName(Instance instance, String name) {
-		return instance.getName().equals(name);
-	}
-
-	private static boolean containsName(Instance instance, String name) {
-		return instance.getName().contains(name);
-	}
-
-	private static String getStatus(Instance instance) {
-		return instance.getStatus().toLowerCase();
-	}
-
-	public static void checkGcpStatus(String instanceName, String projectId, GcpInstanceState expGcpStatus, boolean
-			restrictionMode, List<String> zones) throws InterruptedException, IOException {
-
-		LOGGER.info("Check status of instance with name {} on GCP", instanceName);
-		if (ConfigPropertyValue.isRunModeLocal()) {
-			LOGGER.info("GCP instance with name {} fake status is {}", instanceName, expGcpStatus);
-			return;
-		}
-		List<Instance> instancesWithName = getInstancesByName(instanceName, projectId, restrictionMode, zones);
-		if (instancesWithName == null) {
-			LOGGER.warn("There is not any instance in GCP with name {}", instanceName);
-			return;
-		}
-
-		String instanceStatus;
-		long requestTimeout = ConfigPropertyValue.getGcpRequestTimeout().toMillis();
-		long timeout = CHECK_TIMEOUT.toMillis();
-		long expiredTime = System.currentTimeMillis() + timeout;
-		Instance instance = instancesWithName.get(0);
-		while (true) {
-			instanceStatus = getStatus(instance);
-			if (instanceStatus.equalsIgnoreCase(expGcpStatus.toString())) {
-				break;
-			}
-			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-				LOGGER.info("GCP instance with name {} state is {}", instanceName, getStatus(instance));
-				throw new CloudException("Timeout has been expired for check status of GCP instance with " +
-						"name " + instanceName);
-			}
-			Thread.sleep(requestTimeout);
-		}
-
-		for (Instance inst : instancesWithName) {
-			LOGGER.info("GCP instance with name {} status is {}. Instance id {}, private IP {}, public " +
-							"IP {}",
-					instanceName, getStatus(inst), inst.getId(), (!getInstancePrivateIps(inst).isEmpty() ?
-							getInstancePrivateIps(inst).get(0) : NOT_EXIST),
-					(!getInstancePublicIps(inst).isEmpty() ? getInstancePublicIps(inst).get(0) : NOT_EXIST));
-		}
-		Assert.assertEquals(instanceStatus, expGcpStatus.toString(), "GCP instance with name " + instanceName +
-				" status is not correct. Instance id " + instance.getId() + ", private IP " +
-				(!getInstancePrivateIps(instance).isEmpty() ? getInstancePrivateIps(instance).get(0) : NOT_EXIST) +
-				", public IP " +
-				(!getInstancePublicIps(instance).isEmpty() ? getInstancePublicIps(instance).get(0) : NOT_EXIST));
-	}
-
-	public static List<String> getAvailableZonesForProject(String projectId) throws IOException {
-		if (ConfigPropertyValue.isRunModeLocal()) {
-			return Collections.emptyList();
-		}
-		List<Zone> zoneList = new ArrayList<>();
-		Compute.Zones.List request = ComputeService.getInstance().zones().list(projectId);
-		ZoneList response;
-		do {
-			response = request.execute();
-			if (response.getItems() == null) {
-				continue;
-			}
-			zoneList.addAll(response.getItems());
-			request.setPageToken(response.getNextPageToken());
-		} while (response.getNextPageToken() != null);
-		return zoneList.stream().map(Zone::getDescription).collect(Collectors.toList());
-	}
-
-	private static class ComputeService {
-
-		private static Compute instance;
-
-		private ComputeService() {
-		}
-
-		static synchronized Compute getInstance() throws IOException {
-			if (!ConfigPropertyValue.isRunModeLocal() && instance == null) {
-				try {
-					instance = createComputeService();
-				} catch (GeneralSecurityException e) {
-					LOGGER.info("An exception occured: {}", e);
-				}
-			}
-			return instance;
-		}
-
-		private static Compute createComputeService() throws IOException, GeneralSecurityException {
-			HttpTransport httpTransport = GoogleNetHttpTransport.newTrustedTransport();
-			JsonFactory jsonFactory = JacksonFactory.getDefaultInstance();
-
-			GoogleCredential credential =
-					GoogleCredential.fromStream(new FileInputStream(ConfigPropertyValue.getGcpAuthFileName()));
-			if (credential.createScopedRequired()) {
-				credential = credential.createScoped(
-						Collections.singletonList("https://www.googleapis.com/auth/cloud-platform"));
-			}
-
-			return new Compute.Builder(httpTransport, jsonFactory, credential)
-					.setApplicationName("Google-ComputeSample/0.1")
-					.build();
-		}
-
-	}
-
-}
-
-
-
-
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/gcp/GcpInstanceState.java b/integration-tests/src/main/java/com/epam/dlab/automation/cloud/gcp/GcpInstanceState.java
deleted file mode 100644
index 5c084c1..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/cloud/gcp/GcpInstanceState.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.cloud.gcp;
-
-public enum GcpInstanceState {
-	STARTING,
-	RUNNING,
-	TERMINATED,
-	STOPPED;
-
-	@Override
-	public String toString() {
-		return super.toString().toLowerCase();
-	}
-}
-
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/AckStatus.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/AckStatus.java
deleted file mode 100644
index df4b7e7..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/AckStatus.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-public class AckStatus {
-    private int status;
-    
-    private String message;
-
-    public AckStatus() {
-        status = 0;
-        message = "";
-    }
-
-	AckStatus(int status, String message) {
-        this.status = status;
-        this.message = message;
-    }
-
-    public int getStatus() {
-        return status;
-    }
-
-    public String getMessage() {
-        return message;
-    }
-
-    public boolean isOk() {
-        return status == 0;
-    }
-
-    @Override
-    public String toString() {
-        return isOk() ? "OK" : message;
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Bridge.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/Bridge.java
deleted file mode 100644
index 8cc6015..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Bridge.java
+++ /dev/null
@@ -1,160 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-public class Bridge {
-
-	@JsonProperty("IPAMConfig")
-	private Object ipamConfig;
-
-	@JsonProperty("Links")
-	private Object links;
-
-	@JsonProperty("Aliases")
-	private Object aliases;
-
-	@JsonProperty("NetworkID")
-	private String networkId;
-
-	@JsonProperty("EndpointID")
-	private String endpointId;
-
-	@JsonProperty("Gateway")
-	private String gateway;
-
-	@JsonProperty("IPAddress")
-	private String ipAddress;
-
-	@JsonProperty("IPPrefixLen")
-	private int ipPrefixLen;
-
-	@JsonProperty("IPv6Gateway")
-	private String ipv6Gateway;
-
-	@JsonProperty("GlobalIPv6Address")
-	private String globalIpv6Address;
-
-	@JsonProperty("GlobalIPv6PrefixLen")
-	private int globalIpv6PrefixLen;
-
-	@JsonProperty("MacAddress")
-	private String macAddress;
-
-
-	public Object getIpamConfig() {
-		return ipamConfig;
-	}
-
-	public void setIpamConfig(Object ipamConfig) {
-		this.ipamConfig = ipamConfig;
-	}
-
-	public Object getLinks() {
-		return links;
-	}
-
-	public void setLinks(Object links) {
-		this.links = links;
-	}
-
-	public Object getAliases() {
-		return aliases;
-	}
-
-	public void setAliases(Object aliases) {
-		this.aliases = aliases;
-	}
-
-	public String getNetworkId() {
-		return networkId;
-	}
-
-	public void setNetworkId(String networkId) {
-		this.networkId = networkId;
-	}
-
-	public String getEndpointId() {
-		return endpointId;
-	}
-
-	public void setEndpointId(String endpointId) {
-		this.endpointId = endpointId;
-	}
-
-	public String getGateway() {
-		return gateway;
-	}
-
-	public void setGateway(String gateway) {
-		this.gateway = gateway;
-	}
-
-	public String getIpAddress() {
-		return ipAddress;
-	}
-
-	public void setIpAddress(String ipAddress) {
-		this.ipAddress = ipAddress;
-	}
-
-	public int getIpPrefixLen() {
-		return ipPrefixLen;
-	}
-
-	public void setIpPrefixLen(int ipPrefixLen) {
-		this.ipPrefixLen = ipPrefixLen;
-	}
-
-	public String getIpv6Gateway() {
-		return ipv6Gateway;
-	}
-
-	public void setIpv6Gateway(String ipv6Gateway) {
-		this.ipv6Gateway = ipv6Gateway;
-	}
-
-	public String getGlobalIpv6Address() {
-		return globalIpv6Address;
-	}
-
-	public void setGlobalIpv6Address(String globalIpv6Address) {
-		this.globalIpv6Address = globalIpv6Address;
-	}
-
-	public int getGlobalIpv6PrefixLen() {
-		return globalIpv6PrefixLen;
-	}
-
-	public void setGlobalIpv6PrefixLen(int globalIpv6PrefixLen) {
-		this.globalIpv6PrefixLen = globalIpv6PrefixLen;
-	}
-
-	public String getMacAddress() {
-		return macAddress;
-	}
-
-	public void setMacAddress(String macAddress) {
-		this.macAddress = macAddress;
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Docker.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/Docker.java
deleted file mode 100644
index 012edaf..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Docker.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-import com.epam.dlab.automation.exceptions.DockerException;
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.jcraft.jsch.ChannelExec;
-import com.jcraft.jsch.JSchException;
-import com.jcraft.jsch.Session;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.util.ArrayList;
-import java.util.List;
-
-public class Docker {
-    private static final Logger LOGGER = LogManager.getLogger(Docker.class);
-    
-    private static final String GET_CONTAINERS = "echo -e \"GET /containers/json?all=1 HTTP/1.0\\r\\n\" | nc -U /var/run/docker.sock";
-    
-    private static final String DOCKER_STATUS_EXIT0 = "Exited (0)";
-
-    private Docker(){}
-
-    public static void checkDockerStatus(String containerName, String ip)
-			throws IOException, InterruptedException, JSchException {
-        
-        LOGGER.info("Check docker status for instance {} and container {}", ip, containerName);
-        if (ConfigPropertyValue.isRunModeLocal()) {
-        	LOGGER.info("  check skipped for run in local mode");
-        	return;
-        }
-
-        Session session = SSHConnect.getConnect(ConfigPropertyValue.getClusterOsUser(), ip, 22);
-        ChannelExec getResult = SSHConnect.setCommand(session, GET_CONTAINERS);
-        InputStream in = getResult.getInputStream();
-        List<DockerContainer> dockerContainerList = getDockerContainerList(in);
-        AckStatus status = SSHConnect.checkAck(getResult);
-        Assert.assertTrue(status.isOk());
-        
-        DockerContainer dockerContainer = getDockerContainer(dockerContainerList, containerName);
-        LOGGER.debug("Docker container for {} has id {} and status {}", containerName, dockerContainer.getId(), dockerContainer.getStatus());
-        Assert.assertEquals(dockerContainer.getStatus().contains(DOCKER_STATUS_EXIT0), true, "Status of container is not Exited (0)");
-        LOGGER.info("Docker container {} has status {}", containerName, DOCKER_STATUS_EXIT0);
-    }
-
-    private static List<DockerContainer> getDockerContainerList(InputStream in) throws IOException {
-        
-        BufferedReader reader = new BufferedReader(new InputStreamReader(in));         
-        String line;
-        List<DockerContainer> dockerContainerList = null;
-
-        TypeReference<List<DockerContainer>> typeRef = new TypeReference<List<DockerContainer>>() { };
-        ObjectMapper mapper = new ObjectMapper();
-
-		List<String> result = new ArrayList<>();
-        while ((line = reader.readLine()) != null) {      
-             result.add(line);
-             if (line.contains("Id")) {
-            	 LOGGER.trace("Add docker container: {}", line);
-                 dockerContainerList = mapper.readValue(line, typeRef);
-             }       
-        }
-        
-        return dockerContainerList;
-    }
-
-	private static DockerContainer getDockerContainer(List<DockerContainer> dockerContainerList, String
-			containerName) {
-		for (DockerContainer dockerContainer : dockerContainerList) {
-			String name = dockerContainer.getNames().get(0);
-			if (name.contains(containerName)) {
-				return dockerContainer;
-			}
-		}
-        
-        final String msg = "Docker container for " + containerName + " not found";
-        LOGGER.error(msg);
-		StringBuilder containers = new StringBuilder("Container list:");
-		for (DockerContainer dockerContainer : dockerContainerList) {
-			containers.append(System.lineSeparator()).append(dockerContainer.getNames().get(0));
-		}
-		LOGGER.debug(containers.toString());
-
-		throw new DockerException("Docker container for " + containerName + " not found");
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/DockerContainer.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/DockerContainer.java
deleted file mode 100644
index c22a688..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/DockerContainer.java
+++ /dev/null
@@ -1,171 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-import java.util.List;
-
-public class DockerContainer {
-
-	@JsonProperty("Id")
-	private String id;
-
-	@JsonProperty("Names")
-	private List<String> names;
-
-	@JsonProperty("Image")
-	private String image;
-
-	@JsonProperty("ImageID")
-	private String imageID;
-
-	@JsonProperty("Command")
-	private String command;
-
-	@JsonProperty("Created")
-	private int created;
-
-	@JsonProperty("Ports")
-	private List<Object> ports;
-
-	@JsonProperty("Labels")
-	private Labels labels;
-
-	@JsonProperty("State")
-	private String state;
-
-	@JsonProperty("Status")
-	private String status;
-
-	@JsonProperty("HostConfig")
-	private HostConfig hostConfig;
-
-	@JsonProperty("NetworkSettings")
-	private NetworkSettings networkSettings;
-
-	@JsonProperty("Mounts")
-	private List<Object> mounts;
-
-
-    public String getId() {
-		return id;
-	}
-
-	public void setId(String id) {
-		this.id = id;
-	}
-
-	public List<String> getNames() {
-		return names;
-	}
-
-	public void setNames(List<String> names) {
-		this.names = names;
-	}
-
-	public String getImage() {
-		return image;
-	}
-
-	public void setImage(String image) {
-		this.image = image;
-	}
-
-	public String getImageID() {
-		return imageID;
-	}
-
-	public void setImageID(String imageID) {
-		this.imageID = imageID;
-	}
-
-	public String getCommand() {
-		return command;
-	}
-
-	public void setCommand(String command) {
-		this.command = command;
-	}
-
-	public int getCreated() {
-		return created;
-	}
-
-	public void setCreated(int created) {
-		this.created = created;
-	}
-
-	public List<Object> getPorts() {
-		return ports;
-	}
-
-	public void setPorts(List<Object> ports) {
-		this.ports = ports;
-	}
-
-	public Labels getLabels() {
-		return labels;
-	}
-
-	public void setLabels(Labels labels) {
-		this.labels = labels;
-	}
-
-	public String getState() {
-		return state;
-	}
-
-	public void setState(String state) {
-		this.state = state;
-	}
-
-	public String getStatus() {
-		return status;
-	}
-
-	public void setStatus(String status) {
-		this.status = status;
-	}
-
-	public HostConfig getHostConfig() {
-		return hostConfig;
-	}
-
-	public void setHostConfig(HostConfig hostConfig) {
-		this.hostConfig = hostConfig;
-	}
-
-	public NetworkSettings getNetworkSettings() {
-		return networkSettings;
-	}
-
-	public void setNetworkSettings(NetworkSettings networkSettings) {
-		this.networkSettings = networkSettings;
-	}
-
-	public List<Object> getMounts() {
-		return mounts;
-	}
-
-	public void setMounts(List<Object> mounts) {
-		this.mounts = mounts;
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/HostConfig.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/HostConfig.java
deleted file mode 100644
index c2d5db0..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/HostConfig.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class HostConfig {
-
-	@JsonProperty("NetworkMode")
-	private String networkMode;
-
-    public String getNetworkMode() {
-		return networkMode;
-    }
-
-    public void setNetworkMode(String networkMode) {
-		this.networkMode = networkMode;
-    }
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/NetworkSettings.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/NetworkSettings.java
deleted file mode 100644
index 295c217..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/NetworkSettings.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class NetworkSettings {
-
-	@JsonProperty("Networks")
-	private Networks networks;
-
-    public Networks getNetworks() {
-		return networks;
-    }
-
-    public void setNetworks(Networks networks) {
-		this.networks = networks;
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Networks.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/Networks.java
deleted file mode 100644
index 2679fa3..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Networks.java
+++ /dev/null
@@ -1,34 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-public class Networks {
-
-	private Bridge bridge;
-
-    public Bridge getBridge() {
-		return bridge;
-    }
-
-    public void setBridge(Bridge bridge) {
-		this.bridge = bridge;
-    }
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/SSHConnect.java b/integration-tests/src/main/java/com/epam/dlab/automation/docker/SSHConnect.java
deleted file mode 100644
index fa369fa..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/SSHConnect.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.docker;
-
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.jcraft.jsch.*;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.util.Properties;
-
-import static java.lang.System.err;
-import static java.lang.System.out;
-
-public class SSHConnect {
-	private static final Logger LOGGER = LogManager.getLogger(SSHConnect.class);
-	private static final String LOCALHOST_IP = ConfigPropertyValue.get("LOCALHOST_IP");
-	private static final String STRICT_HOST_KEY_CHECKING = "StrictHostKeyChecking";
-
-	private SSHConnect() {
-	}
-
-    public static Session getConnect(String username, String host, int port) throws JSchException {
-        Session session;
-        JSch jsch = new JSch();
-
-        Properties config = new Properties();
-		config.put(STRICT_HOST_KEY_CHECKING, "no");
-        
-        jsch.addIdentity(ConfigPropertyValue.getAccessKeyPrivFileName());
-        session = jsch.getSession(username, host, port);
-        session.setConfig(config);
-
-        LOGGER.info("Connecting as {} to {}:{}", username, host, port);
-        session.connect();
-
-        LOGGER.info("Getting connected to {}:{}", host, port);
-        return session;
-    }
-
-    public static Session getSession(String username, String host, int port) throws JSchException {
-        Session session;
-        JSch jsch = new JSch();
-
-        Properties config = new Properties();
-		config.put(STRICT_HOST_KEY_CHECKING, "no");
-
-        jsch.addIdentity(ConfigPropertyValue.getAccessKeyPrivFileName());
-        session = jsch.getSession(username, host, port);
-        session.setConfig(config);
-        session.connect();
-
-
-        LOGGER.info("Getting connected to {}:{}", host, port);
-        return session;
-    }
-
-    public static ChannelSftp getChannelSftp(Session session) throws JSchException {
-        Channel channel = session.openChannel("sftp");
-        channel.connect();
-		return (ChannelSftp) channel;
-    }
-
-    public static Session getForwardedConnect(String username, String hostAlias, int port) throws JSchException {
-        Session session;
-        JSch jsch = new JSch();
-        Properties config = new Properties();
-		config.put(STRICT_HOST_KEY_CHECKING, "no");
-
-        jsch.addIdentity(ConfigPropertyValue.getAccessKeyPrivFileName());
-        session = jsch.getSession(username, LOCALHOST_IP, port);
-        session.setConfig(config);
-        session.setHostKeyAlias(hostAlias);
-        session.connect();
-        LOGGER.info("Getting connected to {} through {}:{}", hostAlias, LOCALHOST_IP, port);
-        return session;
-    }
-
-	public static ChannelExec setCommand(Session session, String command) throws JSchException {
-        LOGGER.info("Setting command: {}", command);
-
-        ChannelExec channelExec = (ChannelExec)session.openChannel("exec");
-        channelExec.setCommand(command);
-        channelExec.connect();
-
-        return channelExec;
-    }
-
-	public static AckStatus checkAck(ChannelExec channel) throws InterruptedException {
-		channel.setOutputStream(out, true);
-		channel.setErrStream(err, true);
-
-        int status;
-        while(channel.getExitStatus() == -1) {
-            Thread.sleep(1000);
-        }
-        status = channel.getExitStatus();
-
-        return new AckStatus(status, "");
-    }
-
-	public static AckStatus checkAck(ChannelSftp channel) throws InterruptedException {
-		channel.setOutputStream(out, true);
-
-        int status;
-        while(channel.getExitStatus() == -1) {
-            Thread.sleep(1000);
-        }
-        status = channel.getExitStatus();
-
-        return new AckStatus(status, "");
-    }
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/CloudException.java b/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/CloudException.java
deleted file mode 100644
index b576931..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/CloudException.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.exceptions;
-
-public class CloudException extends RuntimeException {
-
-	private static final long serialVersionUID = 1L;
-
-    public CloudException(String message){
-        super(message);
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/DockerException.java b/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/DockerException.java
deleted file mode 100644
index ae8d7df..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/DockerException.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.exceptions;
-
-public class DockerException extends RuntimeException {
-
-	private static final long serialVersionUID = 1L;
-
-	public DockerException(String message) {
-		super(message);
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/JenkinsException.java b/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/JenkinsException.java
deleted file mode 100644
index 4b70836..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/JenkinsException.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.exceptions;
-
-public class JenkinsException extends RuntimeException {
-
-	private static final long serialVersionUID = 1L;
-
-	public JenkinsException(String message) {
-		super(message);
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/LoadFailException.java b/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/LoadFailException.java
deleted file mode 100644
index 16d4f20..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/exceptions/LoadFailException.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.exceptions;
-
-public class LoadFailException extends RuntimeException {
-
-	private static final long serialVersionUID = 1L;
-
-	public LoadFailException(String message, Exception cause) {
-		super(message, cause);
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/helper/CloudHelper.java b/integration-tests/src/main/java/com/epam/dlab/automation/helper/CloudHelper.java
deleted file mode 100644
index 4e0894f..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/helper/CloudHelper.java
+++ /dev/null
@@ -1,220 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.helper;
-
-import com.amazonaws.services.ec2.model.Instance;
-import com.amazonaws.services.ec2.model.Tag;
-import com.epam.dlab.automation.cloud.aws.AmazonHelper;
-import com.epam.dlab.automation.cloud.azure.AzureHelper;
-import com.epam.dlab.automation.cloud.gcp.GcpHelper;
-import com.epam.dlab.automation.exceptions.CloudException;
-import com.epam.dlab.automation.model.DeployClusterDto;
-import com.epam.dlab.automation.model.DeployDataProcDto;
-import com.epam.dlab.automation.model.DeployEMRDto;
-import com.epam.dlab.automation.model.NotebookConfig;
-import org.apache.commons.lang3.StringUtils;
-
-import java.io.IOException;
-import java.util.List;
-
-public class CloudHelper {
-
-    private CloudHelper(){}
-
-	public static String getInstancePublicIP(String name, boolean restrictionMode) throws IOException {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return AmazonHelper.getInstance(name).getPublicIpAddress();
-            case CloudProvider.AZURE_PROVIDER:
-                if(AzureHelper.getVirtualMachinesByName(name, restrictionMode) != null){
-                    return AzureHelper.getVirtualMachinesByName(name, restrictionMode).get(0)
-                            .getPrimaryPublicIPAddress().ipAddress();
-                } else return null;
-            case CloudProvider.GCP_PROVIDER:
-                List<com.google.api.services.compute.model.Instance> instanceList =
-                        GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDlabProjectId(),
-                                restrictionMode,
-                                GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDlabProjectId()));
-                if (instanceList != null && !GcpHelper.getInstancePublicIps(instanceList.get(0)).isEmpty()) {
-                    return GcpHelper.getInstancePublicIps(instanceList.get(0)).get(0);
-                } else return null;
-            default:
-                return null;
-        }
-    }
-
-	public static String getInstancePrivateIP(String name, boolean restrictionMode) throws IOException {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return AmazonHelper.getInstance(name).getPrivateIpAddress();
-            case CloudProvider.AZURE_PROVIDER:
-                if(AzureHelper.getVirtualMachinesByName(name, restrictionMode) != null){
-                    return AzureHelper.getVirtualMachinesByName(name, restrictionMode).get(0)
-                            .getPrimaryNetworkInterface().primaryPrivateIP();
-                } else return null;
-            case CloudProvider.GCP_PROVIDER:
-                List<com.google.api.services.compute.model.Instance> instanceList =
-                        GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDlabProjectId(), restrictionMode,
-                                GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDlabProjectId()));
-                if (instanceList != null && !GcpHelper.getInstancePrivateIps(instanceList.get(0)).isEmpty()) {
-                    return GcpHelper.getInstancePrivateIps(instanceList.get(0)).get(0);
-                } else return null;
-            default:
-                return null;
-        }
-    }
-
-	static String getInstanceNameByCondition(String name, boolean restrictionMode) throws IOException {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                Instance instance = AmazonHelper.getInstance(name);
-                for (Tag tag : instance.getTags()) {
-                    if (tag.getKey().equals("Name")) {
-                        return tag.getValue();
-                    }
-                }
-                throw new CloudException("Could not detect name for instance " + name);
-            case CloudProvider.AZURE_PROVIDER:
-                if(AzureHelper.getVirtualMachinesByName(name, restrictionMode) != null){
-                    return AzureHelper.getVirtualMachinesByName(name, restrictionMode).get(0).name();
-                } else return null;
-            case CloudProvider.GCP_PROVIDER:
-                if (GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDlabProjectId(), restrictionMode,
-                        GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDlabProjectId())) != null) {
-                    return GcpHelper.getInstancesByName(name, ConfigPropertyValue.getGcpDlabProjectId(),
-                            restrictionMode,
-                            GcpHelper.getAvailableZonesForProject(ConfigPropertyValue.getGcpDlabProjectId()))
-                            .get(0).getName();
-                }
-                else return null;
-            default:
-                return null;
-        }
-    }
-
-    public static String getClusterConfFileLocation(){
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return PropertiesResolver.getClusterEC2ConfFileLocation();
-            case CloudProvider.AZURE_PROVIDER:
-                return PropertiesResolver.getClusterAzureConfFileLocation();
-            case CloudProvider.GCP_PROVIDER:
-                return PropertiesResolver.getClusterGcpConfFileLocation();
-            default:
-                return null;
-        }
-    }
-
-
-    public static String getPythonTestingScript(){
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return "/usr/bin/python %s --storage %s --cloud aws --cluster_name %s --os_user %s";
-            case CloudProvider.AZURE_PROVIDER:
-                if(ConfigPropertyValue.getAzureDatalakeEnabled().equalsIgnoreCase("true")){
-                    return "/usr/bin/python %s --storage %s --cloud azure --cluster_name %s --os_user %s --azure_datalake_account "
-                            + ConfigPropertyValue.getAzureDatalakeSharedAccount();
-                }
-                else return "/usr/bin/python %s --storage %s --cloud azure --cluster_name %s --os_user %s --azure_storage_account "
-                        + ConfigPropertyValue.getAzureStorageSharedAccount();
-            case CloudProvider.GCP_PROVIDER:
-                return "/usr/bin/python %s --storage %s --cloud gcp --cluster_name %s --os_user %s";
-            default:
-                return null;
-        }
-    }
-
-    public static String getPythonTestingScript2(){
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-				return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud aws";
-            case CloudProvider.AZURE_PROVIDER:
-                if(ConfigPropertyValue.getAzureDatalakeEnabled().equalsIgnoreCase("true")){
-					return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud azure " +
-							"--azure_datalake_account " + ConfigPropertyValue.getAzureDatalakeSharedAccount();
-                } else return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud azure " +
-						"--azure_storage_account " + ConfigPropertyValue.getAzureStorageSharedAccount();
-            case CloudProvider.GCP_PROVIDER:
-				return "/usr/bin/python /home/%s/%s --storage %s --notebook %s --cloud gcp";
-            default:
-                return null;
-        }
-    }
-
-	static String getStorageNameAppendix() {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return "bucket";
-            case CloudProvider.AZURE_PROVIDER:
-                if(ConfigPropertyValue.getAzureDatalakeEnabled().equalsIgnoreCase("true")){
-                    return "folder";
-                }
-                else return "container";
-            case CloudProvider.GCP_PROVIDER:
-                return "bucket";
-            default:
-                return null;
-        }
-    }
-
-	public static String getDockerTemplateFileForDES(boolean isSpotRequired) {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-				return isSpotRequired ? "EMR_spot.json" : "EMR.json";
-            case CloudProvider.GCP_PROVIDER:
-                return "dataproc.json";
-            default:
-                return null;
-        }
-    }
-
-    public static Class<? extends DeployClusterDto> getDeployClusterClass() {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-                return DeployEMRDto.class;
-            case CloudProvider.GCP_PROVIDER:
-                return DeployDataProcDto.class;
-            default:
-                return null;
-        }
-    }
-
-	public static DeployClusterDto populateDeployClusterDto(DeployClusterDto deployClusterDto,
-															NotebookConfig nbConfig) {
-		if (nbConfig.getDataEngineType().equals(NamingHelper.DATA_ENGINE_SERVICE) &&
-				ConfigPropertyValue.getCloudProvider().equals(CloudProvider.AWS_PROVIDER)) {
-			DeployEMRDto emrDto = (DeployEMRDto) deployClusterDto;
-			if (!StringUtils.isEmpty(nbConfig.getDesVersion())) {
-				emrDto.setEmrVersion(nbConfig.getDesVersion());
-			}
-			if (nbConfig.isDesSpotRequired() && nbConfig.getDesSpotPrice() > 0) {
-				emrDto.setEmrSlaveInstanceSpot(nbConfig.isDesSpotRequired());
-				emrDto.setEmrSlaveInstanceSpotPctPrice(nbConfig.getDesSpotPrice());
-			}
-			return emrDto;
-		} else return deployClusterDto;
-	}
-
-	static String getGcpDataprocClusterName(String gcpDataprocMasterNodeName) {
-        return gcpDataprocMasterNodeName != null ?
-                gcpDataprocMasterNodeName.substring(0, gcpDataprocMasterNodeName.lastIndexOf('-')) : null;
-	}
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/helper/CloudProvider.java b/integration-tests/src/main/java/com/epam/dlab/automation/helper/CloudProvider.java
deleted file mode 100644
index f5241a4..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/helper/CloudProvider.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.helper;
-
-public class CloudProvider {
-
-	public static final String AWS_PROVIDER = "aws";
-	public static final String AZURE_PROVIDER = "azure";
-	public static final String GCP_PROVIDER = "gcp";
-
-	private CloudProvider() {
-	}
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/helper/ConfigPropertyValue.java b/integration-tests/src/main/java/com/epam/dlab/automation/helper/ConfigPropertyValue.java
deleted file mode 100644
index aeb6036..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/helper/ConfigPropertyValue.java
+++ /dev/null
@@ -1,387 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.helper;
-
-import com.epam.dlab.automation.exceptions.LoadFailException;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.io.File;
-import java.io.FileReader;
-import java.nio.file.Paths;
-import java.time.Duration;
-import java.util.Properties;
-
-public class ConfigPropertyValue {
-
-	private static final Logger LOGGER = LogManager.getLogger(ConfigPropertyValue.class);
-	private static final String CONFIG_FILE_NAME;
-
-    public static final String JENKINS_USERNAME="JENKINS_USERNAME";
-	public static final String JENKINS_PASS = "JENKINS_PASSWORD";
-	private static final String USERNAME="USERNAME";
-	private static final String PASS = "PASSWORD";
-	private static final String NOT_IAM_USERNAME="NOT_IAM_USERNAME";
-	private static final String NOT_IAM_PASS = "NOT_IAM_PASSWORD";
-	private static final String NOT_DLAB_USERNAME="NOT_DLAB_USERNAME";
-	private static final String NOT_DLAB_PASS = "NOT_DLAB_PASSWORD";
-	private static final String JENKINS_JOB_URL="JENKINS_JOB_URL";
-	private static final String USER_FOR_ACTIVATE_KEY="USER_FOR_ACTIVATE_KEY";
-	private static final String PASS_FOR_ACTIVATE_KEY = "PASSWORD_FOR_ACTIVATE_KEY";
-	private static final String ACCESS_KEY_PRIV_FILE_NAME="ACCESS_KEY_PRIV_FILE_NAME";
-	private static final String ACCESS_KEY_PUB_FILE_NAME="ACCESS_KEY_PUB_FILE_NAME";
-
-	private static final  String CLOUD_PROVIDER="CLOUD_PROVIDER";
-    
-    private static final String AWS_ACCESS_KEY_ID="AWS_ACCESS_KEY_ID";
-    private static final String AWS_SECRET_ACCESS_KEY="AWS_SECRET_ACCESS_KEY";
-    private static final String AWS_REGION="AWS_REGION";
-    private static final String AWS_REQUEST_TIMEOUT="AWS_REQUEST_TIMEOUT";
-
-    private static final String AZURE_REGION="AZURE_REGION";
-    private static final String AZURE_REQUEST_TIMEOUT="AZURE_REQUEST_TIMEOUT";
-    private static final String AZURE_DATALAKE_ENABLED="AZURE_DATALAKE_ENABLED";
-    private static final String AZURE_DATALAKE_SHARED_ACCOUNT="AZURE_DATALAKE_SHARED_ACCOUNT";
-    private static final String AZURE_STORAGE_SHARED_ACCOUNT="AZURE_STORAGE_SHARED_ACCOUNT";
-	private static final String AZURE_AUTHENTICATION_FILE = "AZURE_AUTHENTICATION_FILE";
-
-	private static final String GCP_DLAB_PROJECT_ID = "GCP_DLAB_PROJECT_ID";
-    private static final String GCP_REGION="GCP_REGION";
-	private static final String GCP_REQUEST_TIMEOUT = "GCP_REQUEST_TIMEOUT";
-	private static final String GCP_AUTHENTICATION_FILE = "GCP_AUTHENTICATION_FILE";
-
-    private static final String TIMEOUT_JENKINS_AUTOTEST="TIMEOUT_JENKINS_AUTOTEST";
-    private static final String TIMEOUT_UPLOAD_KEY="TIMEOUT_UPLOAD_KEY";
-    private static final String TIMEOUT_SSN_STARTUP="TIMEOUT_SSN_STARTUP";
-
-    private static final String CLUSTER_OS_USERNAME = "CLUSTER_OS_USERNAME";
-    private static final String CLUSTER_OS_FAMILY = "CLUSTER_OS_FAMILY";
-    private static final String CONF_TAG_RESOURCE_ID = "CONF_TAG_RESOURCE_ID";
-
-	private static final String JUPYTER_SCENARIO_FILES = "JUPYTER_SCENARIO_FILES";
-	private static final String NOTEBOOKS_TO_TEST = "NOTEBOOKS_TO_TEST";
-	private static final String SKIPPED_LIBS = "SKIPPED_LIBS";
-	private static final String EXECUTION_TREADS = "execution.threads";
-
-    private static final String USE_JENKINS = "USE_JENKINS";
-    private static final String SSN_URL = "SSN_URL";
-    private static final String SERVICE_BASE_NAME = "SERVICE_BASE_NAME";
-    private static final String RUN_MODE_LOCAL = "RUN_MODE_LOCAL";
-    private static final String LOCALHOST_IP = "LOCALHOST_IP";
-
-    private static String jenkinsBuildNumber;
-
-
-    private static final Properties props = new Properties();
-
-    static {
-        CONFIG_FILE_NAME = PropertiesResolver.getConfFileLocation();
-        jenkinsBuildNumber = System.getProperty("jenkins.buildNumber", "");
-        if (jenkinsBuildNumber.isEmpty()) {
-            jenkinsBuildNumber = null;
-            LOGGER.info("Jenkins build number missed");
-        }
-        
-    	loadProperties();
-    }
-    
-    private ConfigPropertyValue() { }
-	
-    private static Duration getDuration(String duaration) {
-    	return Duration.parse("PT" + duaration);
-    }
-    
-	public static String get(String propertyName) {
-		return get(propertyName, "");
-	}
-
-	public static String get(String propertyName, String defaultValue) {
-		return props.getProperty(propertyName, defaultValue);
-	}
-
-	private static int getInt(String value) {
-        return Integer.parseInt(value);
-    }
-	
-	public static int get(String propertyName, int defaultValue) {
-		if (props.values().isEmpty()) {
-			loadProperties();
-		}
-		String s = props.getProperty(propertyName, String.valueOf(defaultValue)); 
-		return Integer.parseInt(s);
-	}
-	
-	private static void printProperty(String propertyName) {
-        LOGGER.info("{} is {}", propertyName , props.getProperty(propertyName));
-	}
-	
-	private static void setKeyProperty(String propertyName) {
-		String s = props.getProperty(propertyName, "");
-		if (!s.isEmpty()) {
-            s = Paths.get(PropertiesResolver.getKeysLocation(), s).toAbsolutePath().toString();
-            props.setProperty(propertyName, s);
-        }
-	}
-	
-	private static void loadProperties() {
-        try (FileReader fin = new FileReader(new File(CONFIG_FILE_NAME))) {
-
-            props.load(fin);
-
-            PropertiesResolver.overlapProperty(props, CLUSTER_OS_USERNAME, true);
-            PropertiesResolver.overlapProperty(props, CLUSTER_OS_FAMILY, true);
-            PropertiesResolver.overlapProperty(props, AWS_REGION, true);
-            PropertiesResolver.overlapProperty(props, AZURE_REGION, true);
-			PropertiesResolver.overlapProperty(props, GCP_DLAB_PROJECT_ID, true);
-            PropertiesResolver.overlapProperty(props, GCP_REGION, true);
-            PropertiesResolver.overlapProperty(props, NOTEBOOKS_TO_TEST, false);
-			PropertiesResolver.overlapProperty(props, SKIPPED_LIBS, true);
-			PropertiesResolver.overlapProperty(props, USE_JENKINS, true);
-            PropertiesResolver.overlapProperty(props, JENKINS_JOB_URL, !isUseJenkins());
-            PropertiesResolver.overlapProperty(props, SSN_URL, isUseJenkins());
-            PropertiesResolver.overlapProperty(props, SERVICE_BASE_NAME, isUseJenkins());
-            PropertiesResolver.overlapProperty(props, RUN_MODE_LOCAL, true);
-            
-            setKeyProperty(ACCESS_KEY_PRIV_FILE_NAME);
-            setKeyProperty(ACCESS_KEY_PUB_FILE_NAME);
-        } catch (Exception e) {
-        	LOGGER.fatal("Load properties from file {} fails.", CONFIG_FILE_NAME, e);
-			throw new LoadFailException("Load properties from \"" + CONFIG_FILE_NAME + "\" fails. " +
-					e.getLocalizedMessage(), e);
-        }
-        
-        printProperty(JENKINS_USERNAME);
-		printProperty(JENKINS_PASS);
-        printProperty(USERNAME);
-		printProperty(PASS);
-        printProperty(NOT_IAM_USERNAME);
-		printProperty(NOT_IAM_PASS);
-        printProperty(NOT_DLAB_USERNAME);
-		printProperty(NOT_DLAB_PASS);
-        printProperty(JENKINS_JOB_URL);
-        printProperty(USER_FOR_ACTIVATE_KEY);
-		printProperty(PASS_FOR_ACTIVATE_KEY);
-        printProperty(ACCESS_KEY_PRIV_FILE_NAME);
-        printProperty(ACCESS_KEY_PUB_FILE_NAME);
-        
-        printProperty(TIMEOUT_JENKINS_AUTOTEST);
-        printProperty(TIMEOUT_UPLOAD_KEY);
-        printProperty(TIMEOUT_SSN_STARTUP);
-
-        printProperty(JUPYTER_SCENARIO_FILES);
-        printProperty(CLOUD_PROVIDER);
-
-        printProperty(AZURE_DATALAKE_ENABLED);
-        printProperty(AZURE_DATALAKE_SHARED_ACCOUNT);
-        printProperty(AZURE_STORAGE_SHARED_ACCOUNT);
-        printProperty(NOTEBOOKS_TO_TEST);
-		printProperty(SKIPPED_LIBS);
-		printProperty(CLUSTER_OS_USERNAME);
-        printProperty(CLUSTER_OS_FAMILY);
-        printProperty(CONF_TAG_RESOURCE_ID);
-
-        printProperty(USE_JENKINS);
-        printProperty(RUN_MODE_LOCAL);
-        printProperty(LOCALHOST_IP);
-	}
-    
-    
-    public static String getJenkinsBuildNumber() {
-    	return jenkinsBuildNumber;
-    }
-
-    public static void setJenkinsBuildNumber(String jenkinsBuildNumber) {
-    	ConfigPropertyValue.jenkinsBuildNumber = jenkinsBuildNumber;
-    }
-
-    public static String getJenkinsUsername() {
-    	return get(JENKINS_USERNAME);
-    }
-    
-    public static String getJenkinsPassword() {
-		return get(JENKINS_PASS);
-    }
-
-    public static String getUsername() {
-    	return get(USERNAME);
-    }
-    
-    public static String getUsernameSimple() {
-    	String s = get(USERNAME);
-		int i = s.indexOf('@');
-		return (i == -1 ? s : s.substring(0, i).toLowerCase());
-	}
-
-    public static String getPassword() {
-		return get(PASS);
-    }
-
-    public static String getNotIAMUsername() {
-    	return get(NOT_IAM_USERNAME);
-    }
-
-    public static String getNotIAMPassword() {
-		return get(NOT_IAM_PASS);
-    }
-
-    public static String getNotDLabUsername() {
-    	return get(NOT_DLAB_USERNAME);
-    }
-
-    public static String getNotDLabPassword() {
-		return get(NOT_DLAB_PASS);
-    }
-
-    public static String getJenkinsJobURL() {
-    	return get(JENKINS_JOB_URL);
-    }
-
-    public static String getUserForActivateKey() {
-    	return get(USER_FOR_ACTIVATE_KEY);
-    }
-
-    public static String getPasswordForActivateKey() {
-		return get(PASS_FOR_ACTIVATE_KEY);
-    }
-
-
-    public static String getAccessKeyPrivFileName() {
-    	File file = new File(get(ACCESS_KEY_PRIV_FILE_NAME));
-        return file.getAbsolutePath();
-    }
-
-    public static String getAccessKeyPubFileName() {
-    	File file = new File(get(ACCESS_KEY_PUB_FILE_NAME));
-        return file.getAbsolutePath();
-    }
-
-    public static String getCloudProvider(){
-        return get(CLOUD_PROVIDER);
-    }
-
-    public static String getAzureAuthFileName(){
-        File file = new File(get(AZURE_AUTHENTICATION_FILE));
-        return file.getAbsolutePath();
-    }
-
-	public static String getGcpAuthFileName() {
-		File file = new File(get(GCP_AUTHENTICATION_FILE));
-		return file.getAbsolutePath();
-	}
-
-    public static String getAwsAccessKeyId() {
-        return get(AWS_ACCESS_KEY_ID);
-    }
-
-    public static String getAwsSecretAccessKey() {
-        return get(AWS_SECRET_ACCESS_KEY);
-    }
-
-	public static String getAwsRegion() {
-	    return get(AWS_REGION);
-	}
-
-	public static Duration getAwsRequestTimeout() {
-    	return getDuration(get(AWS_REQUEST_TIMEOUT, "10s"));
-    }
-
-    public static String getAzureRegion() {
-        return get(AZURE_REGION);
-    }
-
-    public static String getAzureDatalakeEnabled() {
-        return get(AZURE_DATALAKE_ENABLED);
-    }
-
-    public static String getAzureDatalakeSharedAccount() {
-        return get(AZURE_DATALAKE_SHARED_ACCOUNT);
-    }
-
-    public static String getAzureStorageSharedAccount() {
-        return get(AZURE_STORAGE_SHARED_ACCOUNT);
-    }
-
-	public static String getGcpDlabProjectId() {
-		return get(GCP_DLAB_PROJECT_ID);
-	}
-
-    public static String getGcpRegion() {
-        return get(GCP_REGION);
-    }
-
-	public static Duration getGcpRequestTimeout() {
-		return getDuration(get(GCP_REQUEST_TIMEOUT, "10s"));
-	}
-
-    public static Duration getAzureRequestTimeout() {
-        return getDuration(get(AZURE_REQUEST_TIMEOUT, "10s"));
-    }
-
-    public static Duration getTimeoutJenkinsAutotest() {
-    	return getDuration(get(TIMEOUT_JENKINS_AUTOTEST, "0s"));
-    }
-
-    public static int getExecutionThreads() {
-        return getInt(get(EXECUTION_TREADS, "-1"));
-    }
-
-    public static Duration getTimeoutUploadKey() {
-    	return getDuration(get(TIMEOUT_UPLOAD_KEY, "0s"));
-    }
-
-    public static Duration getTimeoutSSNStartup() {
-    	return getDuration(get(TIMEOUT_SSN_STARTUP, "0s"));
-    }
-
-
-    public static String getClusterOsUser() {
-    	return get(CLUSTER_OS_USERNAME);
-    }
-
-    public static String getClusterOsFamily() {
-    	return get(CLUSTER_OS_FAMILY);
-    }
-
-    public static String getNotebookTemplates() {
-    	return get(NOTEBOOKS_TO_TEST);
-    }
-
-	public static String getSkippedLibs() {
-		return get(SKIPPED_LIBS, "[]");
-	}
-
-	public static boolean isUseJenkins() {
-        String s = get(USE_JENKINS, "true");
-    	return Boolean.valueOf(s);
-    }
-    
-    public static String getSsnUrl() {
-        return get(SSN_URL);
-    }
-    
-    public static String getServiceBaseName() {
-        return get(SERVICE_BASE_NAME);
-    }
-    
-    public static boolean isRunModeLocal() {
-    	String s = get(RUN_MODE_LOCAL, "false");
-    	return Boolean.valueOf(s);
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/helper/NamingHelper.java b/integration-tests/src/main/java/com/epam/dlab/automation/helper/NamingHelper.java
deleted file mode 100644
index 3094f1c..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/helper/NamingHelper.java
+++ /dev/null
@@ -1,283 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.helper;
-
-import java.io.IOException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.atomic.AtomicInteger;
-
-public class NamingHelper {
-	public static final String CLUSTER_ABSENT = "cluster_absent";
-	public static final String DATA_ENGINE = "dataengine";
-	public static final String DATA_ENGINE_SERVICE = "dataengine-service";
-	public static final String DEEPLEARNING = "deeplearning";
-	public static final String JUPYTER = "jupyter";
-	public static final String TENSOR = "tensor";
-	public static final String RSTUDIO = "rstudio";
-	public static final String ZEPPELIN = "zeppelin";
-
-	private static final Map<String, String> SIMPLE_NOTEBOOK_NAMES = new HashMap<>();
-
-    private static AtomicInteger idCounter = new AtomicInteger(0);
-    
-    private static String serviceBaseName;
-    private static String ssnURL;
-    private static String ssnIp;
-    private static String ssnToken;
-
-	static {
-		SIMPLE_NOTEBOOK_NAMES.put(DEEPLEARNING, "dlr");
-		SIMPLE_NOTEBOOK_NAMES.put(JUPYTER, "jup");
-		SIMPLE_NOTEBOOK_NAMES.put(TENSOR, "tfl");
-		SIMPLE_NOTEBOOK_NAMES.put(RSTUDIO, "rst");
-		SIMPLE_NOTEBOOK_NAMES.put(ZEPPELIN, "zep");
-	}
-
-    private NamingHelper(){}
-
-	public static Map<String, String> getSimpleNotebookNames() {
-		return SIMPLE_NOTEBOOK_NAMES;
-	}
-
-	public static String getServiceBaseName() {
-    	return serviceBaseName;
-    }
-    
-    public static void setServiceBaseName(String serviceBaseName) {
-    	if (NamingHelper.serviceBaseName != null) {
-    		throw new IllegalArgumentException("Field serviceBaseName already has a value");
-    	}
-    	NamingHelper.serviceBaseName = serviceBaseName;
-    }
-    
-    public static String getSsnURL() {
-    	return ssnURL;
-    }
-    
-    public static void setSsnURL(String ssnURL) {
-    	if (NamingHelper.ssnURL != null) {
-    		throw new IllegalArgumentException("Field ssnURL already has a value");
-    	}
-    	NamingHelper.ssnURL = ssnURL;
-    }
-
-    public static String getSsnName() {
-    	return serviceBaseName + "-ssn";
-    }
-    
-    public static String getSsnIp() {
-    	return ssnIp;
-    }
-    
-    public static void setSsnIp(String ssnIp) {
-    	if (NamingHelper.ssnIp != null) {
-    		throw new IllegalArgumentException("Field ssnIp already has a value");
-    	}
-    	NamingHelper.ssnIp = ssnIp;
-    }
-
-    public static String getSsnToken() {
-    	return ssnToken;
-    }
-    
-    public static void setSsnToken(String ssnToken) {
-    	if (NamingHelper.ssnToken != null) {
-    		throw new IllegalArgumentException("Field ssnToken already has a value");
-    	}
-    	NamingHelper.ssnToken = ssnToken;
-    }
-    
-    public static String getEdgeName() {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "edge");
-            case CloudProvider.AZURE_PROVIDER:
-			case CloudProvider.GCP_PROVIDER:
-				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "edge")
-                        .replace('_', '-');
-			default:
-                return null;
-        }
-    }
-    
-    public static String getNotebookInstanceName(String notebookName) {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "nb", notebookName);
-            case CloudProvider.AZURE_PROVIDER:
-			case CloudProvider.GCP_PROVIDER:
-				return String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(), "nb", notebookName)
-                        .replace('_', '-');
-			default:
-                return null;
-        }
-    }
-    
-    public static String getClusterInstanceName(String notebookName, String clusterName, String dataEngineType) {
-		if (DATA_ENGINE.equals(dataEngineType)) {
-            switch (ConfigPropertyValue.getCloudProvider()) {
-                case CloudProvider.AWS_PROVIDER:
-					return String.join("-", getClusterInstanceNameForTestDES(notebookName, clusterName,
-							dataEngineType), "m");
-                case CloudProvider.AZURE_PROVIDER:
-				case CloudProvider.GCP_PROVIDER:
-					return String.join("-", getClusterInstanceNameForTestDES(notebookName, clusterName,
-							dataEngineType), "m").replace('_', '-');
-				default:
-                    return null;
-            }
-    	}
-    	else {
-    		return getClusterInstanceNameForTestDES(notebookName,clusterName,dataEngineType);
-    	}
-    }
-    
-    public static String getClusterInstanceNameForTestDES(String notebookName, String clusterName, String dataEngineType) {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-				return DATA_ENGINE.equals(dataEngineType) ?
-						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
-								"de", notebookName, clusterName) :
-						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
-								"des", notebookName, clusterName);
-
-            case CloudProvider.AZURE_PROVIDER:
-				return DATA_ENGINE.equals(dataEngineType) ?
-						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
-								"de", notebookName, clusterName).replace('_', '-') :
-						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
-								"des", notebookName, clusterName).replace('_', '-');
-
-			case CloudProvider.GCP_PROVIDER:
-				return DATA_ENGINE.equals(dataEngineType) ?
-						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
-								"de", notebookName, clusterName).replace('_', '-') :
-						String.join("-", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
-								"des", notebookName, clusterName, "m").replace('_', '-');
-			default:
-                return null;
-        }
-
-    }
-
-	public static String getNotebookContainerName(String notebookName, String action) {
-    	return String.join("_", ConfigPropertyValue.getUsernameSimple(), action, "exploratory", notebookName);
-    }
-
-	public static String getClusterContainerName(String notebookName, String clusterName, String action) {
-		return String.join("_", ConfigPropertyValue.getUsernameSimple(), action, "computational",
-				notebookName, clusterName);
-    }
-    
-    public static String generateRandomValue() {
-		SimpleDateFormat df = new SimpleDateFormat("yyyyMMddhmmss");
-        return String.join("_",  "ITest", df.format(new Date()), String.valueOf(idCounter.incrementAndGet()));
-    }
-
-    public static String generateRandomValue(String notebokTemplateName) {
-		return String.join("_", SIMPLE_NOTEBOOK_NAMES.get(notebokTemplateName),
-				String.valueOf(idCounter.incrementAndGet()));
-    }
-    
-    public static String getSelfServiceURL(String path) {
-        return ssnURL + path;
-    }
-    
-    public static String getStorageName() {
-        switch (ConfigPropertyValue.getCloudProvider()) {
-            case CloudProvider.AWS_PROVIDER:
-			case CloudProvider.GCP_PROVIDER:
-                return String.format("%s-%s-%s", serviceBaseName, ConfigPropertyValue.getUsernameSimple(),
-                        CloudHelper.getStorageNameAppendix()).replace('_', '-').toLowerCase();
-            case CloudProvider.AZURE_PROVIDER:
-                return String.format("%s-%s-%s", serviceBaseName, "shared",
-                        CloudHelper.getStorageNameAppendix()).replace('_', '-').toLowerCase();
-			default:
-                return null;
-        }
-    }
-
-	public static String getClusterName(String clusterInstanceName, String dataEngineType, boolean restrictionMode)
-			throws IOException {
-		switch (ConfigPropertyValue.getCloudProvider()) {
-			case CloudProvider.AWS_PROVIDER:
-			case CloudProvider.AZURE_PROVIDER:
-				return DATA_ENGINE.equals(dataEngineType) ? clusterInstanceName :
-						CloudHelper.getInstanceNameByCondition(clusterInstanceName, restrictionMode);
-
-			case CloudProvider.GCP_PROVIDER:
-				return DATA_ENGINE.equals(dataEngineType) ? clusterInstanceName :
-						CloudHelper.getGcpDataprocClusterName(
-								CloudHelper.getInstanceNameByCondition(clusterInstanceName, restrictionMode));
-			default:
-				return null;
-		}
-    }
-
-	public static String getNotebookTestTemplatesPath(String notebookName) {
-		if (notebookName.contains(getSimpleNotebookNames().get(DEEPLEARNING))) {
-            return "test_templates/deeplearning/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(JUPYTER))) {
-            return "test_templates/jupyter/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(RSTUDIO))) {
-            return "test_templates/rstudio/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(TENSOR))) {
-            return "test_templates/tensor/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(ZEPPELIN))) {
-            return "test_templates/zeppelin/";
-        }
-        else return "";
-
-    }
-
-    public static String getNotebookType(String notebookName){
-		if (notebookName.contains(getSimpleNotebookNames().get(DEEPLEARNING))) {
-			return DEEPLEARNING + "/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(JUPYTER))) {
-			return JUPYTER + "/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(RSTUDIO))) {
-			return RSTUDIO + "/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(TENSOR))) {
-			return TENSOR + "/";
-		} else if (notebookName.contains(getSimpleNotebookNames().get(ZEPPELIN))) {
-			return ZEPPELIN + "/";
-        }
-        else return "";
-
-    }
-
-	public static boolean isClusterRequired(String notebookName) {
-		if (notebookName.contains(getSimpleNotebookNames().get(DEEPLEARNING))) {
-			return false;
-		} else if (notebookName.contains(getSimpleNotebookNames().get(JUPYTER))) {
-			return true;
-		} else if (notebookName.contains(getSimpleNotebookNames().get(RSTUDIO))) {
-			return true;
-		} else if (notebookName.contains(getSimpleNotebookNames().get(TENSOR))) {
-			return false;
-		} else if (notebookName.contains(getSimpleNotebookNames().get(ZEPPELIN))) {
-			return true;
-		}
-		return true;
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/helper/PropertiesResolver.java b/integration-tests/src/main/java/com/epam/dlab/automation/helper/PropertiesResolver.java
deleted file mode 100644
index f71ccc3..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/helper/PropertiesResolver.java
+++ /dev/null
@@ -1,217 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.helper;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.file.Paths;
-import java.util.Properties;
-
-public class PropertiesResolver {
-
-    private static final Logger LOGGER = LogManager.getLogger(PropertiesResolver.class);
-    public static final boolean DEV_MODE;
-	private static final String CONFIG_FILE_NAME = "application.properties";
-	public static final String NOTEBOOK_SCENARIO_FILES_LOCATION_PROPERTY_TEMPLATE = "scenario.%s.files.location";
-	public static final String NOTEBOOK_TEST_TEMPLATES_LOCATION = "%s.test.templates.location";
-	public static final String NOTEBOOK_CONFIGURATION_FILE_TEMPLATE = "%s/%s-notebook.json";
-
-    //keys from application.properties(dev-application.properties)
-	private static final String CONF_FILE_LOCATION_PROPERTY = "conf.file.location";
-	private static final String KEYS_DIRECTORY_LOCATION_PROPERTY = "keys.directory.location";
-	private static final String NOTEBOOK_TEST_DATA_COPY_SCRIPT = "notebook.test.data.copy.script";
-	private static final String NOTEBOOK_TEST_LIB_LOCATION = "notebook.test.lib.location";
-
-	private static final String SCENARIO_JUPYTER_FILES_LOCATION_PROPERTY = "scenario.jupyter.files.location";
-	private static final String SCENARIO_RSTUDIO_FILES_LOCATION_PROPERTY = "scenario.rstudio.files.location";
-	private static final String SCENARIO_ZEPPELIN_FILES_LOCATION_PROPERTY = "scenario.zeppelin.files.location";
-	private static final String SCENARIO_TENSOR_FILES_LOCATION_PROPERTY = "scenario.tensor.files.location";
-	private static final String SCENARIO_DEEPLEARNING_FILES_LOCATION_PROPERTY = "scenario.deeplearning.files.location";
-
-	private static final String JUPYTER_TEST_TEMPLATES_LOCATION_PROPERTY = "jupyter.test.templates.location";
-	private static final String RSTUDIO_TEST_TEMPLATES_LOCATION_PROPERTY = "rstudio.test.templates.location";
-	private static final String ZEPPELIN_TEST_TEMPLATES_LOCATION_PROPERTY = "zeppelin.test.templates.location";
-	private static final String TENSOR_TEST_TEMPLATES_LOCATION_PROPERTY = "tensor.test.templates.location";
-	private static final String DEEPLEARNING_TEST_TEMPLATES_LOCATION_PROPERTY = "deeplearning.test.templates.location";
-
-	private static final String CLUSTER_CONFIG_FILE_LOCATION_PROPERTY = "ec2.config.files.location";
-	private static final String AZURE_CONFIG_FILE_LOCATION_PROPERTY = "azure.config.files.location";
-	private static final String GCP_CONFIG_FILE_LOCATION_PROPERTY = "gcp.config.files.location";
-
-	private PropertiesResolver() {
-	}
-
-    public static String getJupyterTestTemplatesLocationProperty() {
-        return JUPYTER_TEST_TEMPLATES_LOCATION_PROPERTY;
-    }
-
-    public static String getRstudioTestTemplatesLocationProperty() {
-        return RSTUDIO_TEST_TEMPLATES_LOCATION_PROPERTY;
-    }
-
-    public static String getZeppelinTestTemplatesLocationProperty() {
-        return ZEPPELIN_TEST_TEMPLATES_LOCATION_PROPERTY;
-    }
-
-    public static String getTensorTestTemplatesLocationProperty() {
-        return TENSOR_TEST_TEMPLATES_LOCATION_PROPERTY;
-    }
-
-    public static String getDeeplearningTestTemplatesLocationProperty() {
-        return DEEPLEARNING_TEST_TEMPLATES_LOCATION_PROPERTY;
-    }
-
-    private static Properties properties = new Properties();
-
-    static {
-        DEV_MODE = System.getProperty("run.mode", "remote").equalsIgnoreCase("dev");
-        loadApplicationProperties();
-    }
-
-	private static String getProperty(String propertyName, boolean isOptional) {
-		String s = System.getProperty(propertyName, "");
-		if (s.isEmpty() && !isOptional) {
-        	throw new IllegalArgumentException("Missed required JVM argument -D" + propertyName);
-        }
-        return s;
-	}
-	
-	public static void overlapProperty(Properties props, String propertyName, boolean isOptional) {
-		String argName = StringUtils.replaceChars(propertyName, '_', '.').toLowerCase();
-		String s = System.getProperty(argName, "");
-		if (!s.isEmpty()) {
-            props.setProperty(propertyName, s);
-        }
-		if(!isOptional && props.getProperty(propertyName, "").isEmpty()) {
-        	throw new IllegalArgumentException("Missed required argument -D" + argName + " or property " + propertyName);
-        }
-	}
-
-
-    private static String getConfRootPath() {
-    	return getProperty("conf.root.path", false);
-    }
-
-    private static void loadApplicationProperties() {
-        InputStream input = null;
-
-        try {
-            input = PropertiesResolver.class.getClassLoader().getResourceAsStream(CONFIG_FILE_NAME);
-
-            // load a properties file
-            properties.load(input);
-            String rootPath = getConfRootPath();
-            for (String key : properties.keySet().toArray(new String[0])) {
-            	String path = StringUtils.replace(properties.getProperty(key), "${CONF_ROOT_PATH}", rootPath);
-            	path = Paths.get(path).toAbsolutePath().toString();
-            	properties.setProperty(key, path);
-            }
-            overlapProperty(properties, CONF_FILE_LOCATION_PROPERTY, false);
-
-            // get the property value and print it out
-            LOGGER.info(properties.getProperty(CONF_FILE_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(KEYS_DIRECTORY_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(NOTEBOOK_TEST_DATA_COPY_SCRIPT));
-            LOGGER.info(properties.getProperty(NOTEBOOK_TEST_LIB_LOCATION));
-            LOGGER.info(properties.getProperty(SCENARIO_JUPYTER_FILES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(SCENARIO_RSTUDIO_FILES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(SCENARIO_ZEPPELIN_FILES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(SCENARIO_TENSOR_FILES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(SCENARIO_DEEPLEARNING_FILES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(JUPYTER_TEST_TEMPLATES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(RSTUDIO_TEST_TEMPLATES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(ZEPPELIN_TEST_TEMPLATES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(TENSOR_TEST_TEMPLATES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(DEEPLEARNING_TEST_TEMPLATES_LOCATION_PROPERTY));
-            LOGGER.info(properties.getProperty(CLUSTER_CONFIG_FILE_LOCATION_PROPERTY));
-
-        } catch (IOException ex) {
-            LOGGER.error(ex);
-            LOGGER.error("Application configuration file could not be found by the path: {}", CONFIG_FILE_NAME);
-            System.exit(0);
-        } finally {
-            if (input != null) {
-                try {
-                    input.close();
-                } catch (IOException e) {
-                    LOGGER.error(e);
-                    LOGGER.error("Application configuration file could not be found by the path: {}", CONFIG_FILE_NAME);
-                }
-            }
-        }
-    }
-
-
-    public static String getConfFileLocation() {
-        return properties.getProperty(CONF_FILE_LOCATION_PROPERTY);
-    }
-
-    public static String getKeysLocation() {
-        return properties.getProperty(KEYS_DIRECTORY_LOCATION_PROPERTY);
-    }
-
-    public static String getNotebookTestDataCopyScriptLocation() {
-        return properties.getProperty(NOTEBOOK_TEST_DATA_COPY_SCRIPT);
-    }
-
-    public static String getNotebookTestLibLocation() {
-        return properties.getProperty(NOTEBOOK_TEST_LIB_LOCATION);
-    }
-
-    public static String getScenarioJupyterFilesLocation() {
-        return properties.getProperty(SCENARIO_JUPYTER_FILES_LOCATION_PROPERTY);
-    }
-
-    public static String getScenarioRstudioFilesLocation() {
-        return properties.getProperty(SCENARIO_RSTUDIO_FILES_LOCATION_PROPERTY);
-    }
-
-    public static String getScenarioZeppelinFilesLocation() {
-        return properties.getProperty(SCENARIO_ZEPPELIN_FILES_LOCATION_PROPERTY);
-    }
-
-    public static String getScenarioTensorFilesLocation() {
-        return properties.getProperty(SCENARIO_TENSOR_FILES_LOCATION_PROPERTY);
-    }
-
-    public static String getScenarioDeeplearningFilesLocation() {
-        return properties.getProperty(SCENARIO_DEEPLEARNING_FILES_LOCATION_PROPERTY);
-    }
-
-    public static String getClusterEC2ConfFileLocation() {
-        return properties.getProperty(CLUSTER_CONFIG_FILE_LOCATION_PROPERTY );
-    }
-
-    public static String getClusterAzureConfFileLocation() {
-        return properties.getProperty(AZURE_CONFIG_FILE_LOCATION_PROPERTY );
-    }
-
-    public static String getClusterGcpConfFileLocation() {
-        return properties.getProperty(GCP_CONFIG_FILE_LOCATION_PROPERTY);
-    }
-
-    public static String getPropertyByName(String propertyName) {
-        return properties.getProperty(propertyName);
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/helper/WaitForStatus.java b/integration-tests/src/main/java/com/epam/dlab/automation/helper/WaitForStatus.java
deleted file mode 100644
index 4cb4129..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/helper/WaitForStatus.java
+++ /dev/null
@@ -1,205 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.helper;
-
-import com.epam.dlab.automation.http.ContentType;
-import com.epam.dlab.automation.http.HttpRequest;
-import com.epam.dlab.automation.http.HttpStatusCode;
-import com.jayway.restassured.path.json.JsonPath;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.time.Duration;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.function.Function;
-import java.util.function.Predicate;
-import java.util.stream.Stream;
-
-public class WaitForStatus {
-
-	private static final Logger LOGGER = LogManager.getLogger(WaitForStatus.class);
-	private static final String EXPLORATORY_PATH = "exploratory";
-
-	private static long getSsnRequestTimeout() {
-		return ConfigPropertyValue.isRunModeLocal() ? 1000 : 10000;
-	}
-
-	private WaitForStatus() {
-	}
-
-	public static boolean selfService(Duration duration) throws InterruptedException {
-		HttpRequest request = new HttpRequest();
-		int actualStatus;
-		long timeout = duration.toMillis();
-		long expiredTime = System.currentTimeMillis() + timeout;
-
-		while ((actualStatus = request.webApiGet(NamingHelper.getSsnURL(), ContentType.TEXT).statusCode()) !=
-				HttpStatusCode.OK) {
-			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-				break;
-			}
-			Thread.sleep(getSsnRequestTimeout());
-		}
-
-		if (actualStatus != HttpStatusCode.OK) {
-			LOGGER.info("ERROR: Timeout has been expired for SSN available. Timeout was {}", duration);
-			return false;
-		} else {
-			LOGGER.info("Current status code for SSN is {}", actualStatus);
-		}
-
-		return true;
-	}
-
-	public static int uploadKey(String url, String token, int status, Duration duration)
-			throws InterruptedException {
-		LOGGER.info(" Waiting until status code {} with URL {} with token {}", status, url, token);
-		HttpRequest request = new HttpRequest();
-		int actualStatus;
-		long timeout = duration.toMillis();
-		long expiredTime = System.currentTimeMillis() + timeout;
-
-		while ((actualStatus = request.webApiGet(url, token).getStatusCode()) == status) {
-			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-				break;
-			}
-			Thread.sleep(getSsnRequestTimeout());
-		}
-
-		if (actualStatus == status) {
-			LOGGER.info("ERROR: {}: Timeout has been expired for request.");
-			LOGGER.info("  URL is {}", url);
-			LOGGER.info("  token is {}", token);
-			LOGGER.info("  status is {}", status);
-			LOGGER.info("  timeout is {}", duration);
-		} else {
-			LOGGER.info(" Current status code for {} is {}", url, actualStatus);
-		}
-
-		return actualStatus;
-	}
-
-	public static String notebook(String url, String token, String notebookName, String status, Duration duration)
-			throws InterruptedException {
-		LOGGER.info("Waiting for status {} with URL {} with token {} for notebook {}", status, url, token,
-				notebookName);
-		HttpRequest request = new HttpRequest();
-		String actualStatus;
-		long timeout = duration.toMillis();
-		long expiredTime = System.currentTimeMillis() + timeout;
-
-		do {
-			actualStatus = getNotebookStatus(request.webApiGet(url, token)
-					.getBody()
-					.jsonPath(), notebookName);
-			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-				break;
-			}
-			Thread.sleep(getSsnRequestTimeout());
-		}
-		while (status.contains(actualStatus));
-
-		if (status.contains(actualStatus)) {
-			LOGGER.info("ERROR: {}: Timeout has been expired for request.", notebookName);
-			LOGGER.info("  {}: URL is {}", notebookName, url);
-			LOGGER.info("  {}: token is {}", notebookName, token);
-			LOGGER.info("  {}: status is {}", notebookName, status);
-			LOGGER.info("  {}: timeout is {}", notebookName, duration);
-		} else {
-			LOGGER.info("{}: Current state for Notebook {} is {}", notebookName, notebookName, actualStatus);
-		}
-
-		return actualStatus;
-	}
-
-	public static String cluster(String url, String token, String notebookName, String computationalName, String
-			status, Duration duration)
-			throws InterruptedException {
-		LOGGER.info("{}: Waiting until status {} with URL {} with token {} for computational {} on notebook {}",
-				notebookName, status, url, token, computationalName, notebookName);
-		HttpRequest request = new HttpRequest();
-		String actualStatus;
-		long timeout = duration.toMillis();
-		long expiredTime = System.currentTimeMillis() + timeout;
-
-		do {
-			actualStatus = getClusterStatus(request.webApiGet(url, token)
-					.getBody()
-					.jsonPath(), notebookName, computationalName);
-			if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-				break;
-			}
-			Thread.sleep(getSsnRequestTimeout());
-		}
-		while (actualStatus.contains(status));
-
-		if (actualStatus.contains(status)) {
-			LOGGER.info("ERROR: Timeout has been expired for request.");
-			LOGGER.info("  URL is {}", url);
-			LOGGER.info("  token is {}", token);
-			LOGGER.info("  status is {}", status);
-			LOGGER.info("  timeout is {}", duration);
-		} else {
-			LOGGER.info("{}: Current state for cluster {} on notebook is {}", notebookName, computationalName,
-					actualStatus);
-		}
-
-		return actualStatus;
-	}
-
-	@SuppressWarnings("unchecked")
-	public static String getClusterStatus(JsonPath json, String notebookName, String computationalName) {
-		return (String) json.getList(EXPLORATORY_PATH)
-				.stream()
-				.filter(exploratoryNamePredicate(notebookName))
-				.flatMap(computationalResourcesStream())
-				.filter(computationalNamePredicate(computationalName))
-				.map(statusFieldPredicate())
-				.findAny()
-				.orElse(StringUtils.EMPTY);
-	}
-
-	private static String getNotebookStatus(JsonPath json, String notebookName) {
-		List<Map<String, String>> notebooks = json.getList(EXPLORATORY_PATH);
-		return notebooks.stream().filter(exploratoryNamePredicate(notebookName))
-				.map(e -> e.get("status"))
-				.findAny()
-				.orElse(StringUtils.EMPTY);
-	}
-
-	private static Function<Object, Object> statusFieldPredicate() {
-		return cr -> (((HashMap) cr).get("status"));
-	}
-
-	private static Predicate<Object> computationalNamePredicate(String computationalName) {
-		return cr -> computationalName.equals(((HashMap) cr).get("computational_name"));
-	}
-
-	private static Function<Object, Stream<?>> computationalResourcesStream() {
-		return d -> ((List) ((HashMap) d).get("computational_resources")).stream();
-	}
-
-	private static Predicate<Object> exploratoryNamePredicate(String notebookName) {
-		return d -> notebookName.equals(((HashMap) d).get("exploratory_name"));
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/http/ApiPath.java b/integration-tests/src/main/java/com/epam/dlab/automation/http/ApiPath.java
deleted file mode 100644
index c3dc9f1..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/http/ApiPath.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.http;
-
-public class ApiPath {
-
-    public static final String LOGIN = "/api/user/login";
-    public static final String LOGOUT = "/api/user/logout";
-    public static final String UPLOAD_KEY = "/api/user/access_key"; 
-    public static final String AUTHORIZE_USER = "/api/user/authorize";
-    public static final String EXP_ENVIRONMENT = "/api/infrastructure_provision/exploratory_environment";
-    public static final String PROVISIONED_RES = "/api/infrastructure/info";
-    public static final String COMPUTATIONAL_RES = "/api/infrastructure_provision/computational_resources/dataengine-service";
-    public static final String COMPUTATIONAL_RES_SPARK = "/api/infrastructure_provision/computational_resources/dataengine";
-    private static final String STOP_NOTEBOOK = EXP_ENVIRONMENT + "/%s/stop";
-    private static final String TERMINATE_CLUSTER =
-			"/api/infrastructure_provision/computational_resources/%s/%s/terminate";
-	private static final String START_CLUSTER = "/api/infrastructure_provision/computational_resources/%s/%s/start";
-	private static final String STOP_CLUSTER = "/api/infrastructure_provision/computational_resources/%s/%s/stop";
-    private static final String TERMINATE_NOTEBOOK = EXP_ENVIRONMENT + "/%s/terminate";
-    public static final String LIB_GROUPS = "/api/infrastructure_provision/exploratory_environment/lib_groups";
-    public static final String LIB_LIST = "/api/infrastructure_provision/exploratory_environment/search/lib_list";
-    public static final String LIB_INSTALL = "/api/infrastructure_provision/exploratory_environment/lib_install";
-    public static final String LIB_LIST_EXPLORATORY_FORMATTED = "/api/infrastructure_provision/exploratory_environment/lib_list/formatted";
-    public static final String IMAGE_CREATION = "/api/infrastructure_provision/exploratory_environment/image";
-
-    private ApiPath(){}
-
-
-    private static String configureURL(String url, Object... args) {
-        return String.format(url, args);        
-    }
-    
-    public static String getStopNotebookUrl(String serviceBaseName) {
-        return configureURL(STOP_NOTEBOOK, serviceBaseName);
-    }
-    
-    public static String getTerminateClusterUrl(String notebookName, String desName) {
-        return configureURL(TERMINATE_CLUSTER, notebookName, desName);
-    }
-    
-    public static String getTerminateNotebookUrl(String serviceBaseName) {
-        return configureURL(TERMINATE_NOTEBOOK, serviceBaseName);
-    }
-
-	public static String getStartClusterUrl(String notebookName, String desName) {
-		return configureURL(START_CLUSTER, notebookName, desName);
-	}
-
-	public static String getStopClusterUrl(String notebookName, String desName) {
-		return configureURL(STOP_CLUSTER, notebookName, desName);
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/http/ContentType.java b/integration-tests/src/main/java/com/epam/dlab/automation/http/ContentType.java
deleted file mode 100644
index e2a482c..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/http/ContentType.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.http;
-
-public class ContentType{
-    public static final String FORMDATA = "multipart/form-data";
-    public static final String JSON = "application/json";
-    public static final String ANY = "*/*";
-    public static final String TEXT = "text/html";
-
-	private ContentType() {
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/http/HttpRequest.java b/integration-tests/src/main/java/com/epam/dlab/automation/http/HttpRequest.java
deleted file mode 100644
index 2ef936f..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/http/HttpRequest.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.http;
-
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
-
-import java.io.File;
-import java.util.Map;
-
-import static com.jayway.restassured.RestAssured.given;
-
-public class HttpRequest {
-
-	private static final String AUTHORIZATION = "Authorization";
-	private static final String BEARER = "Bearer ";
-
-	private void addHeader(String headerType, String headerValue) {
-		given().header(headerType, headerValue);
-	}
-
-	public void addAuthorizationBearer(String token) {
-		this.addHeader(AUTHORIZATION, BEARER + token);
-	}
-
-	public Response webApiGet(String url) {
-		return given().contentType(ContentType.JSON).when().get(url);
-	}
-
-	public Response webApiGet(String url, String token) {
-		return given().header(AUTHORIZATION, BEARER + token).contentType(ContentType.JSON).when().get(url);
-	}
-	
-	public Response webApiGet(String url, String token, Map<String,?> params) {
-		return given().header(AUTHORIZATION, BEARER + token).contentType(ContentType.JSON).params(params).when().get
-				(url);
-	}
-
-	public Response webApiPost(String url, String contentType, Object body) {
-		return given().contentType(contentType).body(body).when().post(url);
-	}
-
-	public Response webApiPost(String url, String contentType) {
-		return given().contentType(contentType).when().post(url);
-	}
-
-	public Response webApiPost(String url, String contentType, String token) {
-		return given()
-				.contentType(contentType)
-				.header(AUTHORIZATION, BEARER + token)
-				.multiPart(new File(ConfigPropertyValue.getAccessKeyPubFileName()))
-				.formParam(ConfigPropertyValue.getAccessKeyPubFileName())
-				.contentType(contentType)
-				.when()
-				.post(url);
-	}
-
-	public Response webApiPost(String url, String contentType, Object body, String token) {
-		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).body(body).when().post(url);
-	}
-
-	public Response webApiPut(String url, String contentType, Object body, String token) {
-		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).body(body).when().put(url);
-	}
-
-	public Response webApiPut(String url, String contentType, String token) {
-		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).when().put(url);
-	}
-
-	public Response webApiDelete(String url, String contentType, String token) {
-		return given().contentType(contentType).header(AUTHORIZATION, BEARER + token).when().delete(url);
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/http/HttpStatusCode.java b/integration-tests/src/main/java/com/epam/dlab/automation/http/HttpStatusCode.java
deleted file mode 100644
index 6c4aef0..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/http/HttpStatusCode.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.http;
-
-public class HttpStatusCode {
-    
-    public static final int OK = 200;
-    public static final int UNAUTHORIZED = 401;
-    public static final int ACCEPTED = 202;
-    public static final int NOT_FOUND = 404;
-
-	private HttpStatusCode() {
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsConfigProperties.java b/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsConfigProperties.java
deleted file mode 100644
index 158715b..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsConfigProperties.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.jenkins;
-
-public class JenkinsConfigProperties {
-
-    static final long JENKINS_REQUEST_TIMEOUT = 5000;
-
-	static final String AUTHORIZATION = "Authorization";
-	static final String AUTHORIZATION_KEY = "Basic %s";//the replacement is decoded to base64 user:password
-
-	static final String SUCCESS_STATUS = "true";
-	static final String JENKINS_JOB_NAME_SEARCH = "/";
-
-	static String jenkinsJobStartBody = "\"name=Access_Key_ID&value=%s" +
-            "&name=Secret_Access_Key&value=%s" +
-            "&name=Infrastructure_Tag&value=%s" +
-            "name=OS_user&value=%s&name=Cloud_provider&value=aws&name=OS_family&value=%s&name=Action&value=create" +
-            "&json=%7B%22parameter" +
-            "%22%3A+%5B%7B%22name%22%3A+%22Access_Key_ID%22%2C+%22value%22%3A+%22%s" +
-            "%22%7D%2C+%7B%22name%22%3A+%22Secret_Access_Key%22%2C+%22value%22%3A+%22%s" +
-            "%22%7D%2C+%7B%22name%22%3A+%22Infrastructure_Tag%22%2C+%22value%22%3A+%22%s" +
-            "%22%7D%2C+%7B%22name%22%3A+%22OS_user%22%2C+%22value%22%3A+%22%s" +
-            "%22%7D%2C+%7B%22name%22%3A+%22Cloud_provider%22%2C+%22value%22%3A+%22aws" +
-            "%22%7D%2C+%7B%22name%22%3A+%22OS_family%22%2C+%22value%22%3A+%22%s" +
-            "%22%7D%2C+%7B%22name%22%3A+%22Action%22%2C+%22value%22%3A+%22create" +
-            "%22%7D%5D%7D&Submit=Build";
-
-	private JenkinsConfigProperties() {
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsResponseElements.java b/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsResponseElements.java
deleted file mode 100644
index 0aca9f6..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsResponseElements.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.jenkins;
-
-public class JenkinsResponseElements {
-	public static final String IN_QUEUE_ELEMENT = "freeStyleProject.inQueue";
-	public static final String HTML_TITLE = "html.head.title";
-	public static final String RESULT = "result";
-
-	private JenkinsResponseElements() {
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsService.java b/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsService.java
deleted file mode 100644
index 5d2a995..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsService.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.jenkins;
-
-import com.epam.dlab.automation.exceptions.JenkinsException;
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.epam.dlab.automation.helper.NamingHelper;
-import com.epam.dlab.automation.http.HttpStatusCode;
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.authentication.FormAuthConfig;
-import com.jayway.restassured.http.ContentType;
-import com.jayway.restassured.response.Response;
-import com.jayway.restassured.specification.RequestSpecification;
-import org.apache.commons.codec.binary.Base64;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.time.Duration;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-
-import static com.jayway.restassured.RestAssured.given;
-
-public class JenkinsService {
-	private static final Logger LOGGER = LogManager.getLogger(JenkinsService.class);
-
-    private final String awsAccessKeyId;
-    private final String awsSecretAccessKey;
-    
-    private String ssnURL;
-    private String serviceBaseName;
-
-	private FormAuthConfig config = new FormAuthConfig(JenkinsConfigProperties.JENKINS_JOB_NAME_SEARCH, "username",
-			"password");
-    
-    public JenkinsService(){
-    	if (!ConfigPropertyValue.isUseJenkins()) {
-    		ssnURL = ConfigPropertyValue.getSsnUrl();
-    		serviceBaseName = ConfigPropertyValue.getServiceBaseName();
-    	}
-        awsAccessKeyId = convertToParam(ConfigPropertyValue.getAwsAccessKeyId());
-        awsSecretAccessKey = convertToParam(ConfigPropertyValue.getAwsSecretAccessKey());
-    }
-    
-    private String convertToParam(String s) {
-    	s= s.replaceAll("/", "%2F");
-    	return s;
-    }
-    
-    public String getSsnURL() {
-        return ssnURL;
-    }
-
-    public String getServiceBaseName() {
-        return serviceBaseName;
-    }
-    
-    private String getQueueStatus() {
-    	return getWhen(ContentType.XML)
-                .get(JenkinsUrls.API).getBody()
-                .xmlPath()
-                .getString(JenkinsResponseElements.IN_QUEUE_ELEMENT);
-    }
-
-	private void waitForJenkinsStartup(Duration duration) throws InterruptedException {
-    	String actualStatus;
-    	long timeout = duration.toMillis();
-        long expiredTime = System.currentTimeMillis() + timeout;
-        
-    	while ((actualStatus = getQueueStatus()).endsWith(JenkinsConfigProperties.SUCCESS_STATUS)) {
-            Thread.sleep(JenkinsConfigProperties.JENKINS_REQUEST_TIMEOUT);
-            if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-            	actualStatus = getQueueStatus();
-            	break;
-            }
-        }
-        
-        if (actualStatus.endsWith(JenkinsConfigProperties.SUCCESS_STATUS)) {
-            LOGGER.info("ERROR: Timeout has been expired for Jenkins");
-            LOGGER.info("  timeout is {}");
-        }
-    }
-
-	public String runJenkinsJob(String jenkinsJobURL) throws InterruptedException {
-    	if (!ConfigPropertyValue.isUseJenkins()) {
-    		return ConfigPropertyValue.getJenkinsBuildNumber();
-    	}
-
-		baseUriInitialize(jenkinsJobURL);
-        String dateAsString = NamingHelper.generateRandomValue();
-        Response responsePostJob = getWhen(ContentType.URLENC)
-				.body(String.format(JenkinsConfigProperties.jenkinsJobStartBody,
-                        awsAccessKeyId, awsSecretAccessKey, dateAsString,
-                        ConfigPropertyValue.getClusterOsUser(), ConfigPropertyValue.getClusterOsFamily(),
-                        awsAccessKeyId, awsSecretAccessKey, dateAsString,
-                        ConfigPropertyValue.getClusterOsUser(), ConfigPropertyValue.getClusterOsFamily()))
-        		.post(jenkinsJobURL + "build");
-        Assert.assertEquals(responsePostJob.statusCode(), HttpStatusCode.OK);
-        
-        waitForJenkinsStartup(ConfigPropertyValue.getTimeoutJenkinsAutotest());
-        
-        setBuildNumber();
-        checkBuildResult();
-        setJenkinsURLServiceBaseName();
-        
-        return ConfigPropertyValue.getJenkinsBuildNumber();
-    }
-
-	public String getJenkinsJob() throws InterruptedException {
-    	if (!ConfigPropertyValue.isUseJenkins()) {
-    		return ConfigPropertyValue.getJenkinsBuildNumber();
-    	}
-
-		baseUriInitialize(ConfigPropertyValue.getJenkinsJobURL());
-
-        setBuildNumber();
-        checkBuildResult();
-        setJenkinsURLServiceBaseName();
-
-        return ConfigPropertyValue.getJenkinsBuildNumber();
-    }
-
-	private static void baseUriInitialize(String value) {
-		RestAssured.baseURI = value;
-	}
-
-	private void setBuildNumber() {
-        if (ConfigPropertyValue.getJenkinsBuildNumber() != null) {
-            LOGGER.info("Jenkins build number is {}", ConfigPropertyValue.getJenkinsBuildNumber());
-        	return;
-    	}
-
-        String buildName = getWhen(ContentType.URLENC)
-                .get(JenkinsUrls.LAST_BUILD).getBody().htmlPath().getString(JenkinsResponseElements.HTML_TITLE);
-        
-        Pattern pattern = Pattern.compile("\\s#\\d+(?!\\d+)\\s");      
-        Matcher matcher = pattern.matcher(buildName);
-        if(matcher.find()) {
-        	ConfigPropertyValue.setJenkinsBuildNumber(matcher.group().substring(2).trim());
-        } else {
-			throw new JenkinsException("Jenkins job was failed. There is no buildNumber");
-        }
-        LOGGER.info("Jenkins build number is {}", ConfigPropertyValue.getJenkinsBuildNumber());
-    }
-
-
-	private void checkBuildResult() throws InterruptedException {
-    	String buildResult;
-    	long timeout = ConfigPropertyValue.getTimeoutJenkinsAutotest().toMillis();
-    	long expiredTime = System.currentTimeMillis() + timeout;
-        
-        do {
-        	buildResult = getWhen(ContentType.JSON)
-        			.get(ConfigPropertyValue.getJenkinsBuildNumber() + JenkinsUrls.JSON_PRETTY)
-        			.getBody()
-                    .jsonPath()
-                    .getString(JenkinsResponseElements.RESULT);
-            if (buildResult == null) {
-            	if (timeout != 0 && expiredTime < System.currentTimeMillis()) {
-					throw new JenkinsException("Timeout has been expired for Jenkins build. Timeout is " +
-							ConfigPropertyValue.getTimeoutJenkinsAutotest());
-            	}
-            	Thread.sleep(JenkinsConfigProperties.JENKINS_REQUEST_TIMEOUT);
-            }
-        } while (buildResult == null);
-        
-        if(!buildResult.equals("SUCCESS")) {
-			throw new JenkinsException("Jenkins job was failed. Build result is not success");
-        }
-    }
-
-	private void setJenkinsURLServiceBaseName() {
-        String jenkinsHoleURL = getWhen(ContentType.TEXT)
-        		.get(ConfigPropertyValue.getJenkinsBuildNumber() + JenkinsUrls.LOG_TEXT)
-        		.getBody()
-                .prettyPrint();
-        Pattern pattern = Pattern.compile("Jenkins URL:(.+)");      
-        Matcher matcher = pattern.matcher(jenkinsHoleURL);
-        if(matcher.find()) {
-        	ssnURL = matcher.group(1).replaceAll("/jenkins", "");         
-        }
-            
-        pattern = Pattern.compile("Service base name:(.+)");      
-        matcher = pattern.matcher(jenkinsHoleURL);
-        if(matcher.find()) {
-        	serviceBaseName = matcher.group(1);         
-        } else {
-			throw new JenkinsException("SSN URL in Jenkins job not found");
-        }
-    }
-
-    private RequestSpecification getWhen(ContentType contentType) {
-        return given()
-                .header(JenkinsConfigProperties.AUTHORIZATION,
-						String.format(JenkinsConfigProperties.AUTHORIZATION_KEY, base64CredentialDecode
-								(ConfigPropertyValue.get(ConfigPropertyValue.JENKINS_USERNAME), ConfigPropertyValue
-										.get(ConfigPropertyValue.JENKINS_PASS))))
-        		.auth()
-                .form(ConfigPropertyValue.getJenkinsUsername(), ConfigPropertyValue.getJenkinsPassword(), config)
-        		.contentType(contentType).when();
-    }
-
-    private static String base64CredentialDecode(String user, String password) {
-        byte[] bytesEncoded = Base64.encodeBase64(String.format("%s:%s", user, password).getBytes());
-        return new String(bytesEncoded);
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsUrls.java b/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsUrls.java
deleted file mode 100644
index 04bedcc..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/jenkins/JenkinsUrls.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.jenkins;
-
-public class JenkinsUrls {
-	public static final String API = "api/xml";
-	public static final String LAST_BUILD = "lastBuild";
-	public static final String JSON_PRETTY = "/api/json?pretty=true";
-	public static final String LOG_TEXT = "/logText/progressiveText?start=0";
-
-	private JenkinsUrls() {
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/CreateNotebookDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/CreateNotebookDto.java
deleted file mode 100644
index 71bfedf..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/CreateNotebookDto.java
+++ /dev/null
@@ -1,86 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class CreateNotebookDto {
-    
-	private String image;
-    private String name;
-    @JsonProperty("template_name")
-    private String templateName;
-    private String shape;
-    private String version;
-	@JsonProperty("notebook_image_name")
-	private String imageName;
-
-	public String getImageName() {
-		return imageName;
-	}
-
-	public void setImageName(String imageName) {
-		this.imageName = imageName;
-	}
-
-	public String getImage() {
-		return image;
-	}
-
-	public void setImage(String image) {
-		this.image = image;
-	}
-
-	public String getName() {
-        return name;
-    }
-    
-    public void setName(String name) {
-        this.name = name;
-    }
-    
-    public String getShape() {
-        return shape;
-    }
-    
-    public void setShape(String shape) {
-        this.shape = shape;
-    }
-    
-	public String getTemplateName() {
-		return templateName;
-	}
-
-	public void setTemplateName(String templateName) {
-		this.templateName = templateName;
-	}
-
-    public String getVersion() {
-        return version;
-    }
-    
-    public void setVersion(String version) {
-        this.version = version;
-    }
-    
-    public CreateNotebookDto(){
-		//This empty constructor is required for proper serialization/deserialization
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployClusterDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployClusterDto.java
deleted file mode 100644
index 695a5eb..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployClusterDto.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public abstract class DeployClusterDto {
-
-	private String image;
-
-	@JsonProperty("template_name")
-	private String templateName;
-	private String name;
-
-	@JsonProperty("notebook_name")
-	private String notebookName;
-
-	public String getImage() {
-		return image;
-	}
-
-	public void setImage(String image) {
-		this.image = image;
-	}
-
-	public String getTemplateName() {
-		return templateName;
-	}
-
-	public void setTemplateName(String templateName) {
-		this.templateName = templateName;
-	}
-
-	public String getName() {
-		return name;
-	}
-
-	public void setName(String name) {
-		this.name = name;
-	}
-
-	public String getNotebookName() {
-		return notebookName;
-	}
-
-	public void setNotebookName(String notebookName) {
-		this.notebookName = notebookName;
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployDataProcDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployDataProcDto.java
deleted file mode 100644
index b3b64e2..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployDataProcDto.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-
-public class DeployDataProcDto extends DeployClusterDto {
-
-	@JsonProperty("dataproc_master_count")
-	private String dataprocMasterCount;
-
-	@JsonProperty("dataproc_slave_count")
-	private String dataprocSlaveCount;
-
-	@JsonProperty("dataproc_preemptible_count")
-	private String dataprocPreemptibleCount;
-
-	@JsonProperty("dataproc_master_instance_type")
-	private String dataprocMasterInstanceType;
-
-	@JsonProperty("dataproc_slave_instance_type")
-	private String dataprocSlaveInstanceType;
-
-	@JsonProperty("dataproc_version")
-	private String dataprocVersion;
-
-
-	public String getDataprocMasterCount() {
-		return dataprocMasterCount;
-	}
-
-	public void setDataprocMasterCount(String dataprocMasterCount) {
-		this.dataprocMasterCount = dataprocMasterCount;
-	}
-
-	public String getDataprocSlaveCount() {
-		return dataprocSlaveCount;
-	}
-
-	public void setDataprocSlaveCount(String dataprocSlaveCount) {
-		this.dataprocSlaveCount = dataprocSlaveCount;
-	}
-
-	public String getDataprocPreemptibleCount() {
-		return dataprocPreemptibleCount;
-	}
-
-	public void setDataprocPreemptibleCount(String dataprocPreemptibleCount) {
-		this.dataprocPreemptibleCount = dataprocPreemptibleCount;
-	}
-
-	public String getDataprocMasterInstanceType() {
-		return dataprocMasterInstanceType;
-	}
-
-	public void setDataprocMasterInstanceType(String dataprocMasterInstanceType) {
-		this.dataprocMasterInstanceType = dataprocMasterInstanceType;
-	}
-
-	public String getDataprocSlaveInstanceType() {
-		return dataprocSlaveInstanceType;
-	}
-
-	public void setDataprocSlaveInstanceType(String dataprocSlaveInstanceType) {
-		this.dataprocSlaveInstanceType = dataprocSlaveInstanceType;
-	}
-
-	public String getDataprocVersion() {
-		return dataprocVersion;
-	}
-
-	public void setDataprocVersion(String dataprocVersion) {
-		this.dataprocVersion = dataprocVersion;
-	}
-
-	@Override
-	public String toString() {
-		return MoreObjects.toStringHelper(this)
-				.add("image", getImage())
-				.add("template_name", getTemplateName())
-				.add("name", getName())
-				.add("notebook_name", getNotebookName())
-				.add("dataproc_master_count", dataprocMasterCount)
-				.add("dataproc_slave_count", dataprocSlaveCount)
-				.add("dataproc_preemptible_count", dataprocPreemptibleCount)
-				.add("dataproc_master_instance_type", dataprocMasterInstanceType)
-				.add("dataproc_slave_instance_type", dataprocSlaveInstanceType)
-				.add("dataproc_version", dataprocVersion)
-				.toString();
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployEMRDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployEMRDto.java
deleted file mode 100644
index 8f3ac1e..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeployEMRDto.java
+++ /dev/null
@@ -1,109 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-
-public class DeployEMRDto extends DeployClusterDto{
-
-	@JsonProperty("emr_instance_count")
-	private String emrInstanceCount;
-
-	@JsonProperty("emr_master_instance_type")
-	private String emrMasterInstanceType;
-
-	@JsonProperty("emr_slave_instance_type")
-	private String emrSlaveInstanceType;
-
-	@JsonProperty("emr_slave_instance_spot")
-	private boolean emrSlaveInstanceSpot = false;
-
-	@JsonProperty("emr_slave_instance_spot_pct_price")
-	private Integer emrSlaveInstanceSpotPctPrice = 0;
-
-	@JsonProperty("emr_version")
-	private String emrVersion;
-
-
-	public String getEmrInstanceCount() {
-		return emrInstanceCount;
-	}
-
-	public void setEmrInstanceCount(String emrInstanceCount) {
-		this.emrInstanceCount = emrInstanceCount;
-	}
-
-	public String getEmrMasterInstanceType() {
-		return emrMasterInstanceType;
-	}
-
-	public void setEmrMasterInstanceType(String emrMasterInstanceType) {
-		this.emrMasterInstanceType = emrMasterInstanceType;
-	}
-
-	public String getEmrSlaveInstanceType() {
-		return emrSlaveInstanceType;
-	}
-
-	public void setEmrSlaveInstanceType(String emrSlaveInstanceType) {
-		this.emrSlaveInstanceType = emrSlaveInstanceType;
-	}
-
-	public boolean isEmrSlaveInstanceSpot() {
-		return emrSlaveInstanceSpot;
-	}
-
-	public void setEmrSlaveInstanceSpot(boolean emrSlaveInstanceSpot) {
-		this.emrSlaveInstanceSpot = emrSlaveInstanceSpot;
-	}
-
-	public Integer getEmrSlaveInstanceSpotPctPrice() {
-		return emrSlaveInstanceSpotPctPrice;
-	}
-
-	public void setEmrSlaveInstanceSpotPctPrice(Integer emrSlaveInstanceSpotPctPrice) {
-		this.emrSlaveInstanceSpotPctPrice = emrSlaveInstanceSpotPctPrice;
-	}
-
-	public String getEmrVersion() {
-		return emrVersion;
-	}
-
-	public void setEmrVersion(String emrVersion) {
-		this.emrVersion = emrVersion;
-	}
-
-	@Override
-    public String toString() {
-        return MoreObjects.toStringHelper(this)
-        		.add("image", getImage())
-				.add("template_name", getTemplateName())
-        		.add("name", getName())
-				.add("notebook_name", getNotebookName())
-				.add("emr_instance_count", emrInstanceCount)
-				.add("emr_master_instance_type", emrMasterInstanceType)
-				.add("emr_slave_instance_type", emrSlaveInstanceType)
-				.add("emr_slave_instance_spot", emrSlaveInstanceSpot)
-				.add("emr_slave_instance_spot_pct_price", emrSlaveInstanceSpotPctPrice)
-				.add("emr_version", emrVersion)
-        		.toString();
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeploySparkDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/DeploySparkDto.java
deleted file mode 100644
index d1b4734..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/DeploySparkDto.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-
-public class DeploySparkDto extends DeployClusterDto{
-
-	@JsonProperty("dataengine_instance_count")
-	private String dataengineInstanceCount;
-
-	@JsonProperty("dataengine_instance_shape")
-	private String dataengineInstanceShape;
-
-
-	public String getDataengineInstanceCount() {
-		return dataengineInstanceCount;
-	}
-
-	public void setDataengineInstanceCount(String dataengineInstanceCount) {
-		this.dataengineInstanceCount = dataengineInstanceCount;
-	}
-
-	public String getDataengineInstanceShape() {
-		return dataengineInstanceShape;
-	}
-
-	public void setDataengineInstanceShape(String dataengineInstanceShape) {
-		this.dataengineInstanceShape = dataengineInstanceShape;
-	}
-
-	@Override
-    public String toString() {
-        return MoreObjects.toStringHelper(this)
-        		.add("image", getImage())
-				.add("template_name", getTemplateName())
-        		.add("name", getName())
-				.add("notebook_name", getNotebookName())
-				.add("dataengine_instance_shape", dataengineInstanceShape)
-				.add("dataengine_instance_count", dataengineInstanceCount)
-        		.toString();
-    }
-	
-	
-
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/ExploratoryImageDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/ExploratoryImageDto.java
deleted file mode 100644
index 84950df..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/ExploratoryImageDto.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class ExploratoryImageDto {
-
-	@JsonProperty("exploratory_name")
-	private String notebookName;
-	private String name;
-	private String description;
-
-	public ExploratoryImageDto() {
-	}
-
-	public ExploratoryImageDto(String notebookName, String name, String description) {
-		this.notebookName = notebookName;
-		this.name = name;
-		this.description = description;
-	}
-
-	public String getNotebookName() {
-		return notebookName;
-	}
-
-	public void setNotebookName(String notebookName) {
-		this.notebookName = notebookName;
-	}
-
-	public String getName() {
-		return name;
-	}
-
-	public void setName(String name) {
-		this.name = name;
-	}
-
-	public String getDescription() {
-		return description;
-	}
-
-	public void setDescription(String description) {
-		this.description = description;
-	}
-
-	@Override
-	public String toString() {
-		return "ExploratoryImageDto{" +
-				"notebookName='" + notebookName + '\'' +
-				", name='" + name + '\'' +
-				", description='" + description + '\'' +
-				'}';
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/ImageDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/ImageDto.java
deleted file mode 100644
index 0a5dc0b..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/ImageDto.java
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-public class ImageDto {
-
-	private String name;
-	private String description;
-	private String application;
-	private String fullName;
-	private String status;
-
-	public ImageDto() {
-	}
-
-	public ImageDto(String name, String description, String application, String fullName, String status) {
-
-		this.name = name;
-		this.description = description;
-		this.application = application;
-		this.fullName = fullName;
-		this.status = status;
-	}
-
-	public String getName() {
-		return name;
-	}
-
-	public void setName(String name) {
-		this.name = name;
-	}
-
-	public String getDescription() {
-		return description;
-	}
-
-	public void setDescription(String description) {
-		this.description = description;
-	}
-
-	public String getApplication() {
-		return application;
-	}
-
-	public void setApplication(String application) {
-		this.application = application;
-	}
-
-	public String getFullName() {
-		return fullName;
-	}
-
-	public void setFullName(String fullName) {
-		this.fullName = fullName;
-	}
-
-	public String getStatus() {
-		return status;
-	}
-
-	public void setStatus(String status) {
-		this.status = status;
-	}
-
-	@Override
-	public String toString() {
-		return "ImageDto{" +
-				"name='" + name + '\'' +
-				", description='" + description + '\'' +
-				", application='" + application + '\'' +
-				", fullName='" + fullName + '\'' +
-				", status='" + status + '\'' +
-				'}';
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/JsonMapperDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/JsonMapperDto.java
deleted file mode 100644
index 11cdd50..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/JsonMapperDto.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.type.CollectionType;
-import com.fasterxml.jackson.databind.type.TypeFactory;
-import com.google.gson.JsonParseException;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.List;
-
-public class JsonMapperDto {
-
-    private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
-
-	private JsonMapperDto() {
-	}
-
-    @SuppressWarnings("unchecked")
-	public static <T> T readNode(String pathToJson, Class<T> clasz) throws IOException {
-        try (FileInputStream in = new FileInputStream(pathToJson)){
-			return OBJECT_MAPPER.readerFor(clasz).readValue(in);
-        }
-    }
-
-    public static <T> List<T> readListOf(String pathToJson, Class<T> clasz) {
-        try (FileInputStream in = new FileInputStream(pathToJson)){
-            CollectionType typeReference = TypeFactory.defaultInstance().constructCollectionType(List.class, clasz);
-            return OBJECT_MAPPER.readValue(in, typeReference);
-        } catch (IOException e) {
-			throw new JsonParseException("Cannot read json file", e);
-        }
-    }
-
-    public static <T> T readObject(String pathToJson, Class<T> clasz) {
-        try (FileInputStream in = new FileInputStream(pathToJson)){
-            return OBJECT_MAPPER.readValue(in, clasz);
-        } catch (IOException e) {
-			throw new JsonParseException("Cannot read json file", e);
-        }
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/Lib.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/Lib.java
deleted file mode 100644
index a804c18..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/Lib.java
+++ /dev/null
@@ -1,66 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-import lombok.EqualsAndHashCode;
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-@EqualsAndHashCode
-public class Lib {
-	@JsonProperty
-	private String group;
-	@JsonProperty
-	private String name;
-	@JsonProperty
-	private String version;
-
-	public Lib() {
-	}
-
-	public Lib(String group, String name, String version) {
-		this.group = group;
-		this.name = name;
-		this.version = version;
-	}
-
-	public String getGroup() {
-		return group;
-	}
-
-	public String getName() {
-		return name;
-	}
-
-	public String getVersion() {
-		return version;
-	}
-
-	@Override
-	public String toString() {
-		return MoreObjects.toStringHelper(this)
-				.add("group", group)
-				.add("name", name)
-				.add("version", version)
-				.toString();
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/LoginDto.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/LoginDto.java
deleted file mode 100644
index 4018643..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/LoginDto.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class LoginDto {
-
-    private String username;
-    private String password;
-
-	@JsonProperty("access_token")
-	private String accessToken;
-    
-    public String getUsername() {
-        return username;
-    }
-    public void setUsername(String username) {
-        this.username = username;
-    }
-    
-    public String getPassword() {
-        return password;
-    }
-    public void setPassword(String password) {
-        this.password = password;
-    }
-
-	public String getAccessToken() {
-		return accessToken;
-    }
-    
-    public LoginDto(String username, String password) {
-        this.username = username;
-        this.password = password;
-		this.accessToken = "";
-    }
-   
-    public LoginDto(){
-        
-    }
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/model/NotebookConfig.java b/integration-tests/src/main/java/com/epam/dlab/automation/model/NotebookConfig.java
deleted file mode 100644
index bac7893..0000000
--- a/integration-tests/src/main/java/com/epam/dlab/automation/model/NotebookConfig.java
+++ /dev/null
@@ -1,209 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.model;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.List;
-
-public class NotebookConfig {
-
-    @JsonProperty("notebook_template")
-    private String notebookTemplate;
-
-    @JsonProperty("data_engine_type")
-    private String dataEngineType;
-
-    @JsonProperty("full_test")
-    private boolean fullTest;
-
-
-    @JsonProperty("timeout_notebook_create")
-    private String timeoutNotebookCreate = "60m";
-
-    @JsonProperty("timeout_notebook_startup")
-    private String timeoutNotebookStartup = "20m";
-
-    @JsonProperty("timeout_notebook_shutdown")
-    private String timeoutNotebookShutdown = "20m";
-
-
-    @JsonProperty("timeout_cluster_create")
-    private String timeoutClusterCreate = "60m";
-
-	@JsonProperty("timeout_cluster_startup")
-	private String timeoutClusterStartup = "20m";
-
-	@JsonProperty("timeout_cluster_stop")
-	private String timeoutClusterStop = "20m";
-
-    @JsonProperty("timeout_cluster_terminate")
-    private String timeoutClusterTerminate = "20m";
-
-
-    @JsonProperty("timeout_lib_groups")
-    private String timeoutLibGroups = "5m";
-
-    @JsonProperty("timeout_lib_list")
-    private String timeoutLibList = "5m";
-
-    @JsonProperty("timeout_lib_install")
-    private String timeoutLibInstall = "15m";
-
-	@JsonProperty("timeout_image_create")
-	private String timeoutImageCreate = "60m";
-
-	@JsonProperty("image_test_required")
-	private boolean imageTestRequired = false;
-
-	@JsonProperty("skipped_libraries")
-	private List<Lib> skippedLibraries;
-
-	@JsonProperty("notebook_shape")
-	private String notebookShape = StringUtils.EMPTY;
-
-	@JsonProperty("des_version")
-	private String desVersion = StringUtils.EMPTY;
-
-	@JsonProperty("des_spot_required")
-	private boolean desSpotRequired = false;
-
-	@JsonProperty("des_spot_price")
-	private int desSpotPrice = 0;
-
-	public List<Lib> getSkippedLibraries() {
-		return skippedLibraries;
-	}
-
-	public String getTimeoutNotebookCreate() {
-    	return timeoutNotebookCreate;
-    }
-
-	public String getNotebookShape() {
-		return notebookShape;
-	}
-
-	public String getDesVersion() {
-		return desVersion;
-	}
-
-	public boolean isDesSpotRequired() {
-		return desSpotRequired;
-	}
-
-	public int getDesSpotPrice() {
-		return desSpotPrice;
-	}
-
-	public String getTimeoutNotebookStartup() {
-    	return timeoutNotebookStartup;
-    }
-
-    public String getTimeoutNotebookShutdown() {
-    	return timeoutNotebookShutdown;
-    }
-
-    public String getTimeoutClusterCreate() {
-    	return timeoutClusterCreate;
-    }
-
-    public String getTimeoutClusterTerminate() {
-    	return timeoutClusterTerminate;
-    }
-
-    public String getTimeoutLibGroups() {
-    	return timeoutLibGroups;
-    }
-
-    public String getTimeoutLibList() {
-    	return timeoutLibList;
-    }
-
-    public String getTimeoutLibInstall() {
-    	return timeoutLibInstall;
-    }
-
-	public String getTimeoutImageCreate() {
-		return timeoutImageCreate;
-	}
-
-    public String getNotebookTemplate() {
-    	return notebookTemplate;
-    }
-
-
-    public String getDataEngineType() {
-    	return dataEngineType;
-    }
-
-	public String getTimeoutClusterStartup() {
-		return timeoutClusterStartup;
-	}
-
-	public String getTimeoutClusterStop() {
-		return timeoutClusterStop;
-	}
-
-	public boolean isFullTest() {
-    	return fullTest;
-    }
-
-	public boolean isImageTestRequired() {
-		return imageTestRequired;
-	}
-
-	public void setImageTestRequired(boolean imageTestRequired) {
-		this.imageTestRequired = imageTestRequired;
-	}
-
-	public void setSkippedLibraries(List<Lib> skippedLibraries) {
-		this.skippedLibraries = skippedLibraries;
-	}
-
-
-	@Override
-    public String toString() {
-    	return MoreObjects.toStringHelper(this)
-    			.add("timeoutClusterCreate", timeoutClusterCreate)
-    			.add("timeoutClusterTerminate", timeoutClusterTerminate)
-				.add("timeoutClusterStartup", timeoutClusterStartup)
-				.add("timeoutClusterStop", timeoutClusterStop)
-    			.add("timeoutLibGroups", timeoutLibGroups)
-    			.add("timeoutLibInstall", timeoutLibInstall)
-				.add("timeoutImageCreate", timeoutImageCreate)
-    			.add("timeoutLibList", timeoutLibList)
-    			.add("timeoutNotebookCreate", timeoutNotebookCreate)
-    			.add("timeoutNotebookShutdown", timeoutNotebookShutdown)
-    			.add("timeoutNotebookStartup", timeoutNotebookStartup)
-    			.add("notebookTemplate", notebookTemplate)
-				.add("notebookShape", notebookShape)
-    			.add("dataEngineType", dataEngineType)
-				.add("dataEngineServiceVersion", desVersion)
-				.add("dataEngineServiceSpotRequired", desSpotRequired)
-				.add("dataEngineServiceSpotPrice", desSpotPrice)
-    			.add("fullTest", fullTest)
-				.add("imageTestRequired", imageTestRequired)
-				.add("skippedLibraries", skippedLibraries)
-    			.toString();
-    }
-
-}
diff --git a/integration-tests/src/main/resources/application.properties b/integration-tests/src/main/resources/application.properties
deleted file mode 100644
index 67a3228..0000000
--- a/integration-tests/src/main/resources/application.properties
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements.  See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership.  The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License.  You may obtain a copy of the License at
-#
-#   http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied.  See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-
-conf.file.location=${CONF_ROOT_PATH}/config.properties
-keys.directory.location=${CONF_ROOT_PATH}/keys
-notebook.test.data.copy.script=${CONF_ROOT_PATH}/copy_files.py
-notebook.test.lib.location=${CONF_ROOT_PATH}/test_libs
-jupyter.test.templates.location=${CONF_ROOT_PATH}/test_templates/jupyter
-deeplearning.test.templates.location=${CONF_ROOT_PATH}/test_templates/deeplearning
-rstudio.test.templates.location=${CONF_ROOT_PATH}/test_templates/rstudio
-tensor.test.templates.location=${CONF_ROOT_PATH}/test_templates/tensor
-zeppelin.test.templates.location=${CONF_ROOT_PATH}/test_templates/zeppelin
-scenario.jupyter.files.location=${CONF_ROOT_PATH}/scenario_jupyter/
-scenario.rstudio.files.location=${CONF_ROOT_PATH}/scenario_rstudio/
-scenario.zeppelin.files.location=${CONF_ROOT_PATH}/scenario_zeppelin/
-scenario.deeplearning.files.location=${CONF_ROOT_PATH}/scenario_deeplearning/
-scenario.tensor.files.location=${CONF_ROOT_PATH}/scenario_tensor/
-ec2.config.files.location=${CONF_ROOT_PATH}/ec2_templates/
-azure.config.files.location=${CONF_ROOT_PATH}/azure_templates/
-gcp.config.files.location=${CONF_ROOT_PATH}/gcp_templates/
diff --git a/integration-tests/src/main/resources/log4j2.xml b/integration-tests/src/main/resources/log4j2.xml
deleted file mode 100644
index 8c91840..0000000
--- a/integration-tests/src/main/resources/log4j2.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-
-<Configuration>
-
-	<appender name="console" class="org.apache.log4j.ConsoleAppender">
-		<layout class="org.apache.log4j.PatternLayout">
-			<param name="ConversionPattern"
-				   value="%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n" />
-		</layout>
-	</appender>
-
-	<appender name="file" class="org.apache.log4j.FileAppender">
-
-		<param name="file" value="FILE.log"/>
-		<param name="immediateFlush" value="true"/>
-		<param name="threshold" value="debug"/>
-		<param name="append" value="false"/>
-
-		<layout class="org.apache.log4j.PatternLayout">
-			<param name="conversionPattern" value="%m%n"/>
-		</layout>
-	</appender>
-
-	<Appenders>
-		<Console name="console" target="SYSTEM_OUT">
-			<PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
-		</Console>
-		<File name="file" fileName="output.log" bufferedIO="false" advertiseURI="file:log.log" advertise="true">
-		</File>
-	</Appenders>
-
-
-	<Loggers>
-		<Root level="info">
-			<AppenderRef ref="file" />
-			<AppenderRef ref="console" />
-		</Root>
-		<Logger name="com.epam.dlab.automation" level="debug" additivity="false">
-			<AppenderRef ref="file" />
-			<AppenderRef ref="console" />
-    	</Logger>
-	</Loggers>
-
-</Configuration>
\ No newline at end of file
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/TestCallable.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/TestCallable.java
deleted file mode 100644
index 881b69e..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/TestCallable.java
+++ /dev/null
@@ -1,766 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test;
-
-import com.epam.dlab.automation.cloud.VirtualMachineStatusChecker;
-import com.epam.dlab.automation.cloud.aws.AmazonHelper;
-import com.epam.dlab.automation.docker.Docker;
-import com.epam.dlab.automation.helper.*;
-import com.epam.dlab.automation.http.ApiPath;
-import com.epam.dlab.automation.http.ContentType;
-import com.epam.dlab.automation.http.HttpRequest;
-import com.epam.dlab.automation.http.HttpStatusCode;
-import com.epam.dlab.automation.model.*;
-import com.epam.dlab.automation.test.libs.LibsHelper;
-import com.epam.dlab.automation.test.libs.TestLibGroupStep;
-import com.epam.dlab.automation.test.libs.TestLibInstallStep;
-import com.epam.dlab.automation.test.libs.TestLibListStep;
-import com.epam.dlab.automation.test.libs.models.LibToSearchData;
-import com.jayway.restassured.response.Response;
-import org.apache.commons.lang3.StringUtils;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.io.File;
-import java.nio.file.Paths;
-import java.time.Duration;
-import java.util.*;
-import java.util.concurrent.Callable;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
-
-import static org.testng.Assert.fail;
-
-public class TestCallable implements Callable<Boolean> {
-    private final static Logger LOGGER = LogManager.getLogger(TestCallable.class);
-
-    private final String notebookTemplate;
-    private final boolean fullTest;
-	private final String token, ssnExpEnvURL, ssnProUserResURL, ssnCompResURL;
-    private final String storageName;
-    private final String notebookName, clusterName, dataEngineType;
-    private final NotebookConfig notebookConfig;
-	private final List<Lib> skippedLibraries;
-	private final boolean imageTestRequired;
-	private int libsFailedToInstall = 0;
-
-	TestCallable(NotebookConfig notebookConfig) {
-    	this.notebookTemplate = notebookConfig.getNotebookTemplate();
-    	this.dataEngineType = notebookConfig.getDataEngineType();
-        this.fullTest = notebookConfig.isFullTest();
-
-		this.notebookConfig = notebookConfig;
-		this.skippedLibraries = notebookConfig.getSkippedLibraries();
-		this.imageTestRequired = notebookConfig.isImageTestRequired();
-        
-        this.token = NamingHelper.getSsnToken();
-        this.ssnExpEnvURL = NamingHelper.getSelfServiceURL(ApiPath.EXP_ENVIRONMENT);
-        this.ssnProUserResURL = NamingHelper.getSelfServiceURL(ApiPath.PROVISIONED_RES);
-        this.storageName = NamingHelper.getStorageName();
-
-        final String suffixName = NamingHelper.generateRandomValue(notebookTemplate);
-        notebookName = "nb" + suffixName;
-
-		if (NamingHelper.DATA_ENGINE.equals(dataEngineType)) {
-        	this.ssnCompResURL=NamingHelper.getSelfServiceURL(ApiPath.COMPUTATIONAL_RES_SPARK);
-			clusterName = "spark" + suffixName;
-		} else if (NamingHelper.DATA_ENGINE_SERVICE.equals(dataEngineType)) {
-        	this.ssnCompResURL=NamingHelper.getSelfServiceURL(ApiPath.COMPUTATIONAL_RES);
-			clusterName = "des" + suffixName;
-        } else {
-			ssnCompResURL = "";
-			clusterName = NamingHelper.CLUSTER_ABSENT;
-			LOGGER.info("illegal argument dataEngineType {} , should be dataengine or dataengine-service",
-					dataEngineType);
-        }
-
-        LOGGER.info("   SSN exploratory environment URL is {}", ssnExpEnvURL);
-        LOGGER.info("   SSN provisioned user resources URL is {}", ssnProUserResURL);
-    }
-
-    private static Duration getDuration(String duration) {
-    	return Duration.parse("PT" + duration);
-    }
-
-	@Override
-    public Boolean call() throws Exception {
-		try {
-			final String notebookIp = createNotebook(notebookName, "");
-			testLibs();
-
-			if (imageTestRequired) {
-				executeImageTest();
-			}
-
-			final DeployClusterDto deployClusterDto = createClusterDto();
-			final String actualClusterName = deployClusterDto != null ? NamingHelper.getClusterName(
-					NamingHelper.getClusterInstanceNameForTestDES(notebookName, clusterName, dataEngineType),
-					dataEngineType, true) : NamingHelper.CLUSTER_ABSENT;
-
-			LOGGER.info("Actual cluster name of {} is {}", dataEngineType, actualClusterName);
-
-			if (NamingHelper.DATA_ENGINE.equals(dataEngineType)) {
-				LOGGER.debug("Spark cluster {} is stopping...", clusterName);
-				stopCluster();
-				LOGGER.debug("Starting Spark cluster {}...", clusterName);
-				startCluster();
-			}
-
-			if (!ConfigPropertyValue.isRunModeLocal()) {
-
-				TestDataEngineService test = new TestDataEngineService();
-				test.run(notebookName, notebookTemplate, actualClusterName);
-
-				String notebookScenarioFilesLocation = PropertiesResolver.getPropertyByName(
-						String.format(PropertiesResolver.NOTEBOOK_SCENARIO_FILES_LOCATION_PROPERTY_TEMPLATE,
-								notebookTemplate));
-				String notebookTemplatesLocation = PropertiesResolver.getPropertyByName(
-						String.format(PropertiesResolver.NOTEBOOK_TEST_TEMPLATES_LOCATION, notebookTemplate));
-				test.run2(NamingHelper.getSsnIp(), notebookIp, actualClusterName,
-						new File(notebookScenarioFilesLocation),
-						new File(notebookTemplatesLocation), notebookName);
-			}
-
-			if (NamingHelper.DATA_ENGINE_SERVICE.equals(dataEngineType) && fullTest && deployClusterDto != null) {
-				stopEnvironment();
-				restartNotebookAndRedeployToTerminate(deployClusterDto);
-			}
-			if (deployClusterDto != null) {
-				terminateNotebook(deployClusterDto);
-			} else {
-				terminateNotebook(notebookName);
-			}
-
-			LOGGER.info("{} All tests finished successfully", notebookName);
-			return true;
-		} catch (AssertionError | Exception e) {
-			LOGGER.error("Error occurred while testing notebook {} with configuration {}", notebookName,
-					notebookConfig, e);
-			throw e;
-		}
-	}
-
-	private void executeImageTest() throws Exception {
-		LOGGER.debug("Tests with machine image are starting...");
-		try {
-			String imageName = "TestIm" +
-					String.valueOf(new Random().ints(0, 1000).findFirst().orElse(0));
-			LOGGER.info("Machine image with name {} from notebook {} is creating...", imageName, notebookName);
-			createMachineImageFromNotebook(notebookName, imageName);
-			LOGGER.info("Machine image with name {} was successfully created.", imageName);
-
-			String copyNotebookName = "cp" + notebookName;
-			LOGGER.info("Notebook {} from machine image {} is creating...", copyNotebookName, imageName);
-			createNotebook(copyNotebookName, imageName);
-			LOGGER.info("Notebook {} from machine image {} was successfully created.", copyNotebookName, imageName);
-
-			LOGGER.info("Comparing notebooks: {} with {}...", notebookName, copyNotebookName);
-			if (areNotebooksEqual(notebookName, copyNotebookName)) {
-				LOGGER.info("Notebooks with names {} and {} are equal", notebookName, copyNotebookName);
-			} else {
-				Assert.fail("Notebooks aren't equal. Created from machine image notebook is different from base " +
-						"exploratory");
-			}
-
-			LOGGER.debug("Notebook {} created from image {} is terminating...", copyNotebookName, imageName);
-			terminateNotebook(copyNotebookName);
-
-			LOGGER.info("Tests with machine image creation finished successfully");
-		} catch (AssertionError | Exception e) {
-			LOGGER.error("Error occurred while testing notebook {} and machine image {}", notebookName, e);
-			throw e;
-		}
-	}
-
-	private DeployClusterDto createClusterDto() throws Exception {
-	if (ConfigPropertyValue.getCloudProvider().equalsIgnoreCase(CloudProvider.AZURE_PROVIDER)
-			&& NamingHelper.DATA_ENGINE_SERVICE.equals(dataEngineType)) {
-        LOGGER.info("There are no available dataengine services for Azure. Cluster creation is skipped.");
-        return null;
-    }
-	if (!NamingHelper.DATA_ENGINE_SERVICE.equals(dataEngineType) && !NamingHelper.DATA_ENGINE.equals(dataEngineType)) {
-		LOGGER.info("Parameter 'dataEngineType' is unspecified or isn't valid. Cluster creation is skipped.");
-		return null;
-	}
-	String gettingStatus;
-    LOGGER.info("7. {} cluster {} will be deployed for {} ...",dataEngineType, clusterName, notebookName);
-    LOGGER.info("  {} : SSN computational resources URL is {}", notebookName, ssnCompResURL);
-
-    DeployClusterDto clusterDto = null;
-	if (NamingHelper.DATA_ENGINE.equals(dataEngineType)) {
-		clusterDto = JsonMapperDto.readNode(
-					Paths.get(String.format("%s/%s", CloudHelper.getClusterConfFileLocation(), notebookTemplate), "spark_cluster.json").toString(),
-					DeploySparkDto.class);
-	} else if (NamingHelper.DATA_ENGINE_SERVICE.equals(dataEngineType)) {
-		clusterDto = JsonMapperDto.readNode(
-				Paths.get(String.format("%s/%s", CloudHelper.getClusterConfFileLocation(), notebookTemplate),
-						CloudHelper.getDockerTemplateFileForDES(notebookConfig.isDesSpotRequired())).toString(),
-				CloudHelper.getDeployClusterClass());
-    } else {
-		LOGGER.error("illegal argument dataEngineType {} , should be dataengine or dataengine-service", dataEngineType);
-		fail("illegal argument dataEngineType " + dataEngineType + ", should be dataengine or dataengine-service");
-	}
-
-    clusterDto.setName(clusterName);
-		clusterDto.setNotebookName(notebookName);
-		clusterDto = CloudHelper.populateDeployClusterDto(clusterDto, notebookConfig);
-		LOGGER.info("{}: {} cluster = {}", notebookName, dataEngineType, clusterDto);
-    Response responseDeployingCluster = new HttpRequest().webApiPut(ssnCompResURL, ContentType.JSON,
-    		clusterDto, token);
-	LOGGER.info("{}:   responseDeployingCluster.getBody() is {}", notebookName,
-			responseDeployingCluster.getBody().asString());
-	Assert.assertEquals(responseDeployingCluster.statusCode(), HttpStatusCode.OK, dataEngineType +
-			" cluster " + clusterName + " was not deployed");
-
-	gettingStatus = WaitForStatus.cluster(ssnProUserResURL, token, notebookName, clusterName, "creating",
-			getDuration(notebookConfig.getTimeoutClusterCreate()));
-    if(!ConfigPropertyValue.isRunModeLocal()) {
-        if (!(gettingStatus.contains("configuring") || gettingStatus.contains("running")))
-			throw new Exception(notebookName + ": " + dataEngineType + " cluster " + clusterName +
-					" has not been deployed. Cluster status is " + gettingStatus);
-        LOGGER.info("{}: {} cluster {} has been deployed", notebookName, dataEngineType, clusterName);
-
-		VirtualMachineStatusChecker.checkIfRunning(
-				NamingHelper.getClusterInstanceName(notebookName, clusterName, dataEngineType), false);
-
-		Docker.checkDockerStatus(
-				NamingHelper.getClusterContainerName(notebookName, clusterName, "create"), NamingHelper.getSsnIp());
-    }
-    LOGGER.info("{}:   Waiting until {} cluster {} has been configured ...", notebookName,dataEngineType,clusterName);
-
-	gettingStatus = WaitForStatus.cluster(ssnProUserResURL, token, notebookName, clusterName, "configuring",
-			getDuration(notebookConfig.getTimeoutClusterCreate()));
-    if (!gettingStatus.contains("running"))
-		throw new Exception(notebookName + ": " + dataEngineType + " cluster " + clusterName +
-				" has not been configured. Spark cluster status is " + gettingStatus);
-    LOGGER.info(" {}: {} cluster {} has been configured", notebookName, dataEngineType , clusterName);
-
-    if(!ConfigPropertyValue.isRunModeLocal()) {
-		VirtualMachineStatusChecker.checkIfRunning(
-				NamingHelper.getClusterInstanceName(notebookName, clusterName, dataEngineType), false);
-		Docker.checkDockerStatus(
-				NamingHelper.getClusterContainerName(notebookName, clusterName, "create"), NamingHelper.getSsnIp());
-    }
-    if(ConfigPropertyValue.getCloudProvider().equalsIgnoreCase(CloudProvider.AWS_PROVIDER)){
-        LOGGER.info("{}:   Check bucket {}", notebookName, storageName);
-        AmazonHelper.printBucketGrants(storageName);
-    }
-
-    return clusterDto;
-	}
-
-	private String createNotebook(String notebookName, String imageName) throws Exception {
-		LOGGER.info("6. Notebook {} will be created ...", notebookName);
-		String notebookConfigurationFile =
-				String.format(PropertiesResolver.NOTEBOOK_CONFIGURATION_FILE_TEMPLATE, notebookTemplate, notebookTemplate);
-		LOGGER.info("{} notebook configuration file: {}", notebookName, notebookConfigurationFile);
-
-		CreateNotebookDto createNoteBookRequest =
-				JsonMapperDto.readNode(
-						Paths.get(Objects.requireNonNull(CloudHelper.getClusterConfFileLocation()),
-								notebookConfigurationFile).toString(), CreateNotebookDto.class);
-
-		createNoteBookRequest.setName(notebookName);
-		if (!StringUtils.isEmpty(notebookConfig.getNotebookShape())) {
-			createNoteBookRequest.setShape(notebookConfig.getNotebookShape());
-		}
-
-		if (StringUtils.isNotBlank(imageName)) {
-			final String ssnImageDataUrl =
-					String.format(NamingHelper.getSelfServiceURL(ApiPath.IMAGE_CREATION + "/%s"), imageName);
-			LOGGER.info("Image data fetching URL: {}", ssnImageDataUrl);
-
-			Response response = new HttpRequest().webApiGet(ssnImageDataUrl, token);
-			Assert.assertEquals(response.statusCode(), HttpStatusCode.OK, "Cannot get data of machine image with name "
-					+ imageName);
-			ImageDto dto = response.as(ImageDto.class);
-			LOGGER.info("Image dto is: {}", dto);
-			createNoteBookRequest.setImageName(dto.getFullName());
-		}
-
-		LOGGER.info("Inside createNotebook(): createNotebookRequest: image is {}, templateName is {}, shape is {}, " +
-						"version is {}", createNoteBookRequest.getImage(), createNoteBookRequest.getTemplateName(),
-				createNoteBookRequest.getShape(), createNoteBookRequest.getVersion());
-
-		Response responseCreateNotebook = new HttpRequest().webApiPut(ssnExpEnvURL, ContentType.JSON,
-				createNoteBookRequest, token);
-
-		LOGGER.info(" {}:  responseCreateNotebook.getBody() is {}", notebookName,
-				responseCreateNotebook.getBody().asString());
-
-		LOGGER.info("Inside createNotebook(): responseCreateNotebook.statusCode() is {}",
-				responseCreateNotebook.statusCode());
-
-		Assert.assertEquals(responseCreateNotebook.statusCode(), HttpStatusCode.OK,
-				"Notebook " + notebookName + " was not created");
-
-		String gettingStatus = WaitForStatus.notebook(ssnProUserResURL, token, notebookName, "creating",
-				getDuration(notebookConfig.getTimeoutNotebookCreate()));
-		if (!gettingStatus.contains("running")) {
-			LOGGER.error("Notebook {} is in state {}", notebookName, gettingStatus);
-			throw new Exception("Notebook " + notebookName + " has not been created. Notebook status is " + gettingStatus);
-		}
-		LOGGER.info("   Notebook {} has been created", notebookName);
-
-		VirtualMachineStatusChecker.checkIfRunning(NamingHelper.getNotebookInstanceName(notebookName), false);
-
-		Docker.checkDockerStatus(NamingHelper.getNotebookContainerName(notebookName, "create"),
-				NamingHelper.getSsnIp());
-
-		LOGGER.info("   Notebook {} status has been verified", notebookName);
-		//get notebook IP
-		String notebookIp =
-				CloudHelper.getInstancePrivateIP(NamingHelper.getNotebookInstanceName(notebookName), false);
-
-		LOGGER.info("   Notebook {} IP is {}", notebookName, notebookIp);
-
-		return notebookIp;
-	}
-
-	private void createMachineImageFromNotebook(String notebookName, String imageName) throws InterruptedException {
-		final String ssnImageCreationURL = NamingHelper.getSelfServiceURL(ApiPath.IMAGE_CREATION);
-		ExploratoryImageDto requestBody =
-				new ExploratoryImageDto(notebookName, imageName, "Machine image for testing");
-
-		final String ssnImageDataUrl = ssnImageCreationURL + "/" + imageName;
-		LOGGER.info("Machine image data fetching URL: {}", ssnImageDataUrl);
-
-		long currentTime = System.currentTimeMillis() / 1000L;
-		long expiredTime = currentTime + getDuration(notebookConfig.getTimeoutImageCreate()).getSeconds();
-
-		Response imageCreationResponse =
-				new HttpRequest().webApiPost(ssnImageCreationURL, ContentType.JSON, requestBody, token);
-		if (imageCreationResponse.getStatusCode() != HttpStatusCode.ACCEPTED) {
-			LOGGER.error("Machine image creation response status {}, body {}", imageCreationResponse.getStatusCode(),
-					imageCreationResponse.getBody().print());
-			Assert.fail("Cannot create machine image for " + requestBody);
-		}
-
-		while (expiredTime > currentTime) {
-
-			imageCreationResponse = new HttpRequest().webApiGet(ssnImageDataUrl, token);
-			if (imageCreationResponse.getStatusCode() == HttpStatusCode.OK) {
-
-				LOGGER.info("Image creation response body for notebook {} is {}", notebookName,
-						imageCreationResponse.getBody().asString());
-
-				String actualImageStatus = imageCreationResponse.as(ImageDto.class).getStatus();
-
-				LOGGER.info("Current machine image status is: {}", actualImageStatus);
-
-				if (!"created".equalsIgnoreCase(actualImageStatus)) {
-					LOGGER.info("Wait {} sec left for machine image status {}", expiredTime - currentTime,
-							requestBody);
-					TimeUnit.SECONDS.sleep(ConfigPropertyValue.isRunModeLocal() ? 3L : 20L);
-				} else {
-					break;
-				}
-
-			} else {
-				LOGGER.error("Response status{}, body {}", imageCreationResponse.getStatusCode(),
-						imageCreationResponse.getBody().print());
-				Assert.fail("Machine image creation failed for " + notebookName);
-			}
-			currentTime = System.currentTimeMillis() / 1000L;
-		}
-
-		if (expiredTime <= currentTime) {
-			Assert.fail("Due to timeout cannot create machine image on " + notebookName + " " + requestBody);
-		}
-	}
-
-	private boolean areNotebooksEqual(String firstNotebookName, String secondNotebookName) {
-		if (firstNotebookName == null || secondNotebookName == null) {
-			Assert.fail("Wrong exploratory names passed");
-			return false;
-		}
-		Response fetchExploratoriesResponse = new HttpRequest().webApiGet(ssnProUserResURL, token);
-		if (fetchExploratoriesResponse.statusCode() != HttpStatusCode.OK) {
-			LOGGER.error("Response status: {}, body: {}", fetchExploratoriesResponse.getStatusCode(),
-					fetchExploratoriesResponse.getBody().print());
-			Assert.fail("Fetching resource list is failed");
-			return false;
-		}
-		List<Map<String, String>> notebooksTotal = fetchExploratoriesResponse.jsonPath().getList("exploratory");
-		List<Map<String, String>> notebooksFilterred = notebooksTotal.stream()
-				.filter(map -> map.get("exploratory_name").equals(firstNotebookName) ||
-						map.get("exploratory_name").equals(secondNotebookName))
-				.collect(Collectors.toList());
-
-		if (notebooksFilterred.isEmpty()) {
-			Assert.fail("Notebooks with names " + firstNotebookName + ", " + secondNotebookName + " don't exist");
-			return false;
-		}
-		if (notebooksFilterred.size() == 1) {
-			Assert.fail("Only one notebook with name " + notebooksFilterred.get(0).get("exploratory_name") +
-					" found. There is nothing for comparison");
-			return false;
-		}
-		if (notebooksFilterred.size() > 2) {
-			Assert.fail("Error occured: found " + notebooksFilterred.size() + " notebooks, but only 2 expected");
-			return false;
-		}
-
-		return areNotebooksEqualByFields(notebooksFilterred.get(0), notebooksFilterred.get(1)) &&
-				areLibListsEqual(getNotebookLibList(firstNotebookName), getNotebookLibList(secondNotebookName));
-
-	}
-
-	private boolean areNotebooksEqualByFields(Map<String, String> firstNotebook, Map<String, String> secondNotebook) {
-		if (!firstNotebook.get("shape").equals(secondNotebook.get("shape"))) {
-			Assert.fail("Notebooks aren't equal: they have different shapes");
-			return false;
-		}
-		if (!firstNotebook.get("image").equals(secondNotebook.get("image"))) {
-			Assert.fail("Notebooks aren't equal: they are created from different Docker images");
-			return false;
-		}
-		if (!firstNotebook.get("template_name").equals(secondNotebook.get("template_name"))) {
-			Assert.fail("Notebooks aren't equal: they are created from different templates");
-			return false;
-		}
-		if (!firstNotebook.get("version").equals(secondNotebook.get("version"))) {
-			Assert.fail("Notebooks aren't equal: they have different versions");
-			return false;
-		}
-		return true;
-	}
-
-	private List<Lib> getNotebookLibList(String notebookName) {
-		Map<String, String> params = new HashMap<>();
-		params.put("exploratory_name", notebookName);
-		Response libListResponse = new HttpRequest()
-				.webApiGet(NamingHelper.getSelfServiceURL(ApiPath.LIB_LIST_EXPLORATORY_FORMATTED), token, params);
-		List<Lib> libs = null;
-		if (libListResponse.getStatusCode() == HttpStatusCode.OK) {
-			libs = Arrays.asList(libListResponse.getBody().as(Lib[].class));
-		} else {
-			LOGGER.error("Response status {}, body {}", libListResponse.getStatusCode(), libListResponse.getBody()
-					.print());
-			Assert.fail("Cannot get lib list for " + libListResponse);
-			return libs;
-		}
-		return libs.stream().filter(Objects::nonNull).collect(Collectors.toList());
-	}
-
-	private boolean areLibListsEqual(List<Lib> firstLibList, List<Lib> secondLibList) {
-		if (firstLibList == null && secondLibList == null) {
-			return true;
-		}
-		if (firstLibList == null || secondLibList == null || firstLibList.size() != secondLibList.size()) {
-			return false;
-		}
-		for (Lib lib : firstLibList) {
-			String libGroup = lib.getGroup();
-			String libName = lib.getName();
-			String libVersion = lib.getVersion();
-			List<Lib> filterred = secondLibList.stream().filter(l ->
-					l.getGroup().equals(libGroup) && l.getName().equals(libName) && l.getVersion().equals(libVersion))
-					.collect(Collectors.toList());
-			if (filterred.isEmpty()) {
-				return false;
-			}
-		}
-		return true;
-	}
-
-	private void testLibs() throws Exception {
-		LOGGER.info("{}: install libraries  ...", notebookName);
-
-		TestLibGroupStep testLibGroupStep = new TestLibGroupStep(ApiPath.LIB_GROUPS, token, notebookName,
-				getDuration(notebookConfig.getTimeoutLibGroups()).getSeconds(),
-				getTemplateTestLibFile(LibsHelper.getLibGroupsPath(notebookName)));
-
-		testLibGroupStep.init();
-		testLibGroupStep.verify();
-
-		List<LibToSearchData> libToSearchDataList = JsonMapperDto.readListOf(
-				getTemplateTestLibFile(LibsHelper.getLibListPath(notebookName)), LibToSearchData.class);
-
-		LOGGER.debug("Skipped libraries for notebook {}: {}", notebookName, skippedLibraries);
-		int maxLibsFailedToInstall = libToSearchDataList.size();
-
-		for (LibToSearchData libToSearchData : libToSearchDataList) {
-			TestLibListStep testLibListStep = new TestLibListStep(ApiPath.LIB_LIST, token, notebookName,
-					getDuration(notebookConfig.getTimeoutLibList()).getSeconds(), libToSearchData);
-
-			testLibListStep.init();
-			testLibListStep.verify();
-
-			Lib lib;
-			do {
-				lib = testLibListStep.getLibs().get(new Random().nextInt(testLibListStep.getLibs().size()));
-			} while (skippedLibraries.contains(lib));
-
-			TestLibInstallStep testLibInstallStep =
-					new TestLibInstallStep(ApiPath.LIB_INSTALL, ApiPath.LIB_LIST_EXPLORATORY_FORMATTED,
-							token, notebookName, getDuration(notebookConfig.getTimeoutLibInstall()).getSeconds(), lib);
-
-			testLibInstallStep.init();
-			testLibInstallStep.verify();
-			if (!testLibInstallStep.isLibraryInstalled()) {
-				libsFailedToInstall++;
-			}
-			if (libsFailedToInstall == maxLibsFailedToInstall) {
-				Assert.fail("Test for library installing is failed: there are not any installed library");
-			}
-
-			LOGGER.info("{}: current quantity of failed libs to install: {}", notebookName, libsFailedToInstall);
-		}
-		LOGGER.info("{}: installed {} testing libraries from {}", notebookName,
-				(maxLibsFailedToInstall - libsFailedToInstall), maxLibsFailedToInstall);
-	}
-
-	private String getTemplateTestLibFile(String fileName) {
-        String absoluteFileName = Paths.get(PropertiesResolver.getNotebookTestLibLocation(), fileName).toString();
-        LOGGER.info("Absolute file name is {}", absoluteFileName);
-        return absoluteFileName;
-   }
-
-   private void restartNotebookAndRedeployToTerminate(DeployClusterDto deployClusterDto) throws Exception {
-	   restartNotebook();
-	   final String clusterNewName = redeployCluster(deployClusterDto);
-	   terminateCluster(clusterNewName);
-   }
-
-
-	private void restartNotebook() throws Exception {
-       LOGGER.info("9. Notebook {} will be re-started ...", notebookName);
-       String requestBody = "{\"notebook_instance_name\":\"" + notebookName + "\"}";
-       Response respStartNotebook = new HttpRequest().webApiPost(ssnExpEnvURL, ContentType.JSON, requestBody, token);
-       LOGGER.info("    respStartNotebook.getBody() is {}", respStartNotebook.getBody().asString());
-       Assert.assertEquals(respStartNotebook.statusCode(), HttpStatusCode.OK);
-
-		String gettingStatus = WaitForStatus.notebook(ssnProUserResURL, token, notebookName,
-			VirtualMachineStatusChecker.getStartingStatus(), getDuration(notebookConfig.getTimeoutNotebookStartup()));
-       String status = VirtualMachineStatusChecker.getRunningStatus();
-       if (!Objects.requireNonNull(status).contains(gettingStatus)){
-           throw new Exception("Notebook " + notebookName + " has not been started. Notebook status is " + gettingStatus);
-       }
-       LOGGER.info("    Notebook {} has been started", notebookName);
-
-       VirtualMachineStatusChecker.checkIfRunning(NamingHelper.getNotebookInstanceName(notebookName), false);
-
-       Docker.checkDockerStatus(NamingHelper.getNotebookContainerName(notebookName, "start"), NamingHelper.getSsnIp());
-   }
-
-   private void terminateNotebook(String notebookName) throws Exception {
-       String gettingStatus;
-       LOGGER.info("12. Notebook {} will be terminated ...", notebookName);
-       final String ssnTerminateNotebookURL = NamingHelper.getSelfServiceURL(ApiPath.getTerminateNotebookUrl(notebookName));
-       Response respTerminateNotebook = new HttpRequest().webApiDelete(ssnTerminateNotebookURL, ContentType.JSON, token);
-       LOGGER.info("    respTerminateNotebook.getBody() is {}", respTerminateNotebook.getBody().asString());
-       Assert.assertEquals(respTerminateNotebook.statusCode(), HttpStatusCode.OK);
-
-	   gettingStatus = WaitForStatus.notebook(ssnProUserResURL, token, notebookName, "terminating",
-			   getDuration(notebookConfig.getTimeoutClusterTerminate()));
-       if (!gettingStatus.contains("terminated"))
-           throw new Exception("Notebook" + notebookName + " has not been terminated. Notebook status is " +
-				   gettingStatus);
-
-       VirtualMachineStatusChecker.checkIfTerminated(NamingHelper.getNotebookInstanceName(notebookName), false);
-       Docker.checkDockerStatus(NamingHelper.getNotebookContainerName(notebookName, "terminate"), NamingHelper.getSsnIp());
-   }
-
-   private void terminateNotebook(DeployClusterDto deployCluster) throws Exception {
-	   terminateNotebook(deployCluster.getNotebookName());
-
-       String gettingStatus = WaitForStatus.getClusterStatus(
-				new HttpRequest()
-					.webApiGet(ssnProUserResURL, token)
-					.getBody()
-					.jsonPath(),
-			   deployCluster.getNotebookName(), deployCluster.getName());
-       if (!gettingStatus.contains("terminated"))
-		   throw new Exception(dataEngineType + " cluster " + deployCluster.getName() + " has not been terminated for Notebook "
-				   + deployCluster.getNotebookName() + ". Cluster status is " + gettingStatus);
-	   LOGGER.info("    {} cluster {} has been terminated for Notebook {}", dataEngineType, deployCluster.getName(),
-			   deployCluster.getNotebookName());
-
-	   VirtualMachineStatusChecker.checkIfTerminated(
-			   NamingHelper.getClusterInstanceName(
-					   deployCluster.getNotebookName(), deployCluster.getName(), dataEngineType), true);
-
-   }
-
-	private void startCluster() throws Exception {
-		String gettingStatus;
-		LOGGER.info("    Cluster {} will be started for notebook {} ...", clusterName, notebookName);
-		final String ssnStartClusterURL =
-				NamingHelper.getSelfServiceURL(ApiPath.getStartClusterUrl(notebookName, clusterName));
-		LOGGER.info("    SSN start cluster URL is {}", ssnStartClusterURL);
-
-		Response respStartCluster = new HttpRequest().webApiPut(ssnStartClusterURL, ContentType.JSON, token);
-		LOGGER.info("    respStartCluster.getBody() is {}", respStartCluster.getBody().asString());
-		Assert.assertEquals(respStartCluster.statusCode(), HttpStatusCode.OK);
-
-		gettingStatus = WaitForStatus.cluster(ssnProUserResURL, token, notebookName, clusterName, "starting",
-				getDuration(notebookConfig.getTimeoutClusterStartup()));
-		if (!gettingStatus.contains("running"))
-			throw new Exception(dataEngineType + " cluster " + clusterName +
-					" has not been started. Cluster status is " + gettingStatus);
-		LOGGER.info("    {} cluster {} has been started for notebook {}", dataEngineType, clusterName,
-				notebookName);
-
-		VirtualMachineStatusChecker.checkIfRunning(
-				NamingHelper.getClusterInstanceName(notebookName, clusterName, dataEngineType), true);
-
-		Docker.checkDockerStatus(
-				NamingHelper.getClusterContainerName(notebookName, clusterName, "start"), NamingHelper.getSsnIp());
-	}
-
-	private void stopCluster() throws Exception {
-		String gettingStatus;
-		LOGGER.info("    Cluster {} will be stopped for notebook {} ...", clusterName, notebookName);
-		final String ssnStopClusterURL =
-				NamingHelper.getSelfServiceURL(ApiPath.getStopClusterUrl(notebookName, clusterName));
-		LOGGER.info("    SSN stop cluster URL is {}", ssnStopClusterURL);
-
-		Response respStopCluster = new HttpRequest().webApiDelete(ssnStopClusterURL, ContentType.JSON, token);
-		LOGGER.info("    respStopCluster.getBody() is {}", respStopCluster.getBody().asString());
-		Assert.assertEquals(respStopCluster.statusCode(), HttpStatusCode.OK);
-
-		gettingStatus = WaitForStatus.cluster(ssnProUserResURL, token, notebookName, clusterName, "stopping",
-				getDuration(notebookConfig.getTimeoutClusterStop()));
-		if (!gettingStatus.contains("stopped"))
-			throw new Exception(dataEngineType + " cluster " + clusterName +
-					" has not been stopped. Cluster status is " + gettingStatus);
-		LOGGER.info("    {} cluster {} has been stopped for notebook {}", dataEngineType, clusterName,
-				notebookName);
-
-		VirtualMachineStatusChecker.checkIfStopped(
-				NamingHelper.getClusterInstanceName(notebookName, clusterName, dataEngineType), true);
-
-		Docker.checkDockerStatus(
-				NamingHelper.getClusterContainerName(notebookName, clusterName, "stop"), NamingHelper.getSsnIp());
-	}
-   
-   private void terminateCluster(String clusterNewName) throws Exception {
-       String gettingStatus;
-       LOGGER.info("    New cluster {} will be terminated for notebook {} ...", clusterNewName, notebookName);
-	   final String ssnTerminateClusterURL =
-			   NamingHelper.getSelfServiceURL(ApiPath.getTerminateClusterUrl(notebookName, clusterNewName));
-       LOGGER.info("    SSN terminate cluster URL is {}", ssnTerminateClusterURL);
-
-       Response respTerminateCluster = new HttpRequest().webApiDelete(ssnTerminateClusterURL, ContentType.JSON, token);
-       LOGGER.info("    respTerminateCluster.getBody() is {}", respTerminateCluster.getBody().asString());
-       Assert.assertEquals(respTerminateCluster.statusCode(), HttpStatusCode.OK);
-
-	   gettingStatus = WaitForStatus.cluster(ssnProUserResURL, token, notebookName, clusterNewName, "terminating",
-			   getDuration(notebookConfig.getTimeoutClusterTerminate()));
-       if (!gettingStatus.contains("terminated"))
-		   throw new Exception("New " + dataEngineType + " cluster " + clusterNewName +
-				   " has not been terminated. Cluster status is " + gettingStatus);
-       LOGGER.info("    New {} cluster {} has been terminated for notebook {}",dataEngineType, clusterNewName,
-			   notebookName);
-
-	   VirtualMachineStatusChecker.checkIfTerminated(
-			   NamingHelper.getClusterInstanceName(notebookName, clusterNewName, dataEngineType), true);
-
-	   Docker.checkDockerStatus(
-			   NamingHelper.getClusterContainerName(notebookName, clusterNewName, "terminate"),
-			   NamingHelper.getSsnIp());
-   }
-
-   private String redeployCluster(DeployClusterDto deployCluster) throws Exception {
-       final String clusterNewName = "New" + clusterName;
-       String gettingStatus;
-
-	   LOGGER.info("10. New {} cluster {} will be deployed for termination for notebook {} ...", dataEngineType,
-			   clusterNewName, notebookName);
-
-       deployCluster.setName(clusterNewName);
-	   deployCluster.setNotebookName(notebookName);
-       Response responseDeployingClusterNew = new HttpRequest().webApiPut(ssnCompResURL, ContentType.JSON, deployCluster, token);
-       LOGGER.info("    responseDeployingClusterNew.getBody() is {}", responseDeployingClusterNew.getBody().asString());
-       Assert.assertEquals(responseDeployingClusterNew.statusCode(), HttpStatusCode.OK);
-
-	   gettingStatus = WaitForStatus.cluster(ssnProUserResURL, token, notebookName, clusterNewName, "creating",
-			   getDuration(notebookConfig.getTimeoutClusterCreate()));
-       if (!(gettingStatus.contains("configuring") || gettingStatus.contains("running")))
-           throw new Exception("New cluster " + clusterNewName + " has not been deployed. Cluster status is " + gettingStatus);
-       LOGGER.info("    New cluster {} has been deployed", clusterNewName);
-
-       LOGGER.info("   Waiting until cluster {} has been configured ...", clusterNewName);
-	   gettingStatus = WaitForStatus.cluster(ssnProUserResURL, token, notebookName, clusterNewName, "configuring",
-			   getDuration(notebookConfig.getTimeoutClusterCreate()));
-       if (!gettingStatus.contains("running"))
-           throw new Exception("Cluster " + clusterNewName + " has not been configured. Cluster status is " +
-				   gettingStatus);
-       LOGGER.info("   Cluster {} has been configured", clusterNewName);
-
-	   VirtualMachineStatusChecker.checkIfRunning(
-			   NamingHelper.getClusterInstanceName(notebookName, clusterNewName, dataEngineType), true);
-
-	   Docker.checkDockerStatus(NamingHelper.getClusterContainerName(notebookName, clusterNewName, "create"),
-			   NamingHelper.getSsnIp());
-       return clusterNewName;
-   }
-
-   private void stopEnvironment() throws Exception {
-       String gettingStatus;
-       LOGGER.info("8. Notebook {} will be stopped ...", notebookName);
-       final String ssnStopNotebookURL = NamingHelper.getSelfServiceURL(ApiPath.getStopNotebookUrl(notebookName));
-       LOGGER.info("   SSN stop notebook URL is {}", ssnStopNotebookURL);
-
-       Response responseStopNotebook = new HttpRequest().webApiDelete(ssnStopNotebookURL, ContentType.JSON, token);
-       LOGGER.info("   responseStopNotebook.getBody() is {}", responseStopNotebook.getBody().asString());
-	   Assert.assertEquals(responseStopNotebook.statusCode(), HttpStatusCode.OK, "Notebook " + notebookName +
-			   " was not stopped");
-
-	   gettingStatus = WaitForStatus.notebook(ssnProUserResURL, token, notebookName, "stopping",
-			   getDuration(notebookConfig.getTimeoutNotebookShutdown()));
-       if (!gettingStatus.contains("stopped"))
-           throw new Exception("Notebook " + notebookName + " has not been stopped. Notebook status is " +
-				   gettingStatus);
-       LOGGER.info("   Notebook {} has been stopped", notebookName);
-	   if (!clusterName.equalsIgnoreCase(NamingHelper.CLUSTER_ABSENT)) {
-		   gettingStatus = WaitForStatus.getClusterStatus(
-				   new HttpRequest()
-						   .webApiGet(ssnProUserResURL, token)
-						   .getBody()
-						   .jsonPath(),
-				   notebookName, clusterName);
-
-		   if (NamingHelper.DATA_ENGINE.equals(dataEngineType) && !gettingStatus.contains("stopped")){
-			   throw new Exception("Computational resources has not been stopped for Notebook " + notebookName +
-					   ". Data engine status is " + gettingStatus);
-		   } else if (NamingHelper.DATA_ENGINE_SERVICE.equals(dataEngineType) &&
-				   !ConfigPropertyValue.getCloudProvider().equalsIgnoreCase(CloudProvider.AZURE_PROVIDER)
-				   && !gettingStatus.contains("terminated")){
-			   throw new Exception("Computational resources has not been terminated for Notebook " + notebookName +
-					   ". Data engine service status is " + gettingStatus);
-		   }
-
-		   LOGGER.info("   Computational resources has been terminated for notebook {}", notebookName);
-
-		   if (NamingHelper.DATA_ENGINE.equals(dataEngineType)){
-			   VirtualMachineStatusChecker.checkIfStopped(NamingHelper.getClusterInstanceName(notebookName,
-					   clusterName, dataEngineType), true);
-		   } else if (NamingHelper.DATA_ENGINE_SERVICE.equals(dataEngineType)){
-			   VirtualMachineStatusChecker.checkIfTerminated(NamingHelper.getClusterInstanceName(notebookName,
-					   clusterName, dataEngineType), true);
-		   }
-
-	   }
-       Docker.checkDockerStatus(NamingHelper.getNotebookContainerName(notebookName, "stop"), NamingHelper.getSsnIp());
-   }
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/TestDataEngineService.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/TestDataEngineService.java
deleted file mode 100644
index ad73842..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/TestDataEngineService.java
+++ /dev/null
@@ -1,412 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test;
-
-import com.epam.dlab.automation.docker.AckStatus;
-import com.epam.dlab.automation.docker.SSHConnect;
-import com.epam.dlab.automation.helper.CloudHelper;
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.epam.dlab.automation.helper.NamingHelper;
-import com.epam.dlab.automation.helper.PropertiesResolver;
-import com.jcraft.jsch.*;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.nio.file.Paths;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.List;
-import java.util.Vector;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static org.testng.Assert.*;
-
-class TestDataEngineService {
-    private final static Logger LOGGER = LogManager.getLogger(TestDataEngineService.class);
-    
-    private final static String COMMAND_COPY_TO_NOTEBOOK;
-    private final static String COMMAND_RUN_PYTHON;
-    private final static String COMMAND_RUN_PYTHON2;
-
-    static {
-        COMMAND_COPY_TO_NOTEBOOK = "scp -r -i %s -o 'StrictHostKeyChecking no' ~/%s %s@%s:/tmp/%s";
-        COMMAND_RUN_PYTHON = CloudHelper.getPythonTestingScript();
-        COMMAND_RUN_PYTHON2 = CloudHelper.getPythonTestingScript2();
-    }
-
-
-	void run(String notebookName, String notebookTemplate, String clusterName) throws Exception {
-        Session ssnSession = null;
-        try {
-            LOGGER.info("{}: Copying test data copy scripts {} to SSN {}...",
-            		notebookName, NamingHelper.getStorageName(), NamingHelper.getSsnIp());
-            ssnSession = SSHConnect.getSession(ConfigPropertyValue.getClusterOsUser(), NamingHelper.getSsnIp(), 22);
-            copyFileToSSN(ssnSession, PropertiesResolver.getNotebookTestDataCopyScriptLocation(), "");
-			executePythonScript2(ssnSession, clusterName,
-					new File(PropertiesResolver.getNotebookTestDataCopyScriptLocation()).getName(),
-					notebookName, notebookTemplate);
-        } finally {
-            if (ssnSession != null && ssnSession.isConnected()) {
-	            ssnSession.disconnect();
-	        }
-        }
-    }
-    
-    //TODO refactor two methods and make one
-	private void executePythonScript2(Session ssnSession, String clusterName, String notebookTestFile,
-									  String notebookName, String notebookTemplate) throws JSchException,
-			InterruptedException {
-        String command;
-        AckStatus status;
-
-        command = String.format(COMMAND_RUN_PYTHON2, ConfigPropertyValue.getClusterOsUser(), notebookTestFile,
-				NamingHelper.getStorageName(), notebookTemplate);
-        LOGGER.info("{}: Executing command {}...", notebookName, command);
-
-        ChannelExec runScript = SSHConnect.setCommand(ssnSession, command);
-        status = SSHConnect.checkAck(runScript);
-        LOGGER.info("{}: Script execution status message {} and code {}", notebookName, status.getMessage(), status.getStatus());
-        assertTrue(status.isOk(), notebookName + ": The python script execution wasn`t successful on : " + clusterName);
-
-        LOGGER.info("{}: Python script executed successfully ", notebookName);
-    }
-
-	private void executePythonScript(String Ip, String cluster_name, String notebookTestFile, int assignedPort,
-									 String notebookName) throws JSchException, InterruptedException {
-        String command;
-        AckStatus status;
-        Session session = SSHConnect.getForwardedConnect(ConfigPropertyValue.getClusterOsUser(), Ip, assignedPort);
-
-        try {
-            command = String.format(COMMAND_RUN_PYTHON,
-                    "/tmp/" +  notebookTestFile,
-                    NamingHelper.getStorageName(),
-                    cluster_name,
-                    ConfigPropertyValue.getClusterOsUser());
-            LOGGER.info(String.format("{}: Executing command %s...", command), notebookName);
-
-            ChannelExec runScript = SSHConnect.setCommand(session, command);
-            status = SSHConnect.checkAck(runScript);
-			LOGGER.info("{}: Script execution status message {} and status code {}", notebookName, status.getMessage(),
-					status.getStatus());
-            assertTrue(status.isOk(), notebookName + ": The python script execution wasn`t successful on " + cluster_name);
-
-            LOGGER.info("{}: Python script executed successfully ", notebookName);
-        }
-        finally {
-            if(session != null && session.isConnected()) {
-                LOGGER.info("{}: Closing notebook session", notebookName);
-                session.disconnect();
-            }
-        }
-    }
-
-	void run2(String ssnIP, String noteBookIp, String clusterName, File notebookScenarioDirectory,
-			  File notebookTemplatesDirectory, String notebookName)
-            throws JSchException, IOException, InterruptedException {
-		LOGGER.info("Python tests for directories {} and {} will be started ...", notebookScenarioDirectory,
-				notebookTemplatesDirectory);
-    	if (ConfigPropertyValue.isRunModeLocal()) {
-    		LOGGER.info("  tests are skipped");
-    		return;
-    	}
-
-		assertTrue(notebookScenarioDirectory.exists(), notebookName + ": Checking notebook scenario directory " +
-				notebookScenarioDirectory);
-        assertTrue(notebookScenarioDirectory.isDirectory());
-
-		assertTrue(notebookTemplatesDirectory.exists(), notebookName + ": Checking notebook templates directory " +
-				notebookTemplatesDirectory);
-        assertTrue(notebookTemplatesDirectory.isDirectory());
-
-        String [] templatesFiles = notebookTemplatesDirectory.list();
-        assertNotNull(templatesFiles, "Notebook " + notebookName + " templates directory is empty!");
-
-    	String [] scenarioFiles = notebookScenarioDirectory.list();
-        assertNotNull(scenarioFiles, "Notebook " + notebookName + " scenario directory is empty!");
-
-		assertEquals(1, scenarioFiles.length, "The python script location " + notebookScenarioDirectory +
-				" found more more then 1 file, expected 1 *.py file, but found multiple files: " +
-				Arrays.toString(scenarioFiles));
-        assertTrue(scenarioFiles[0].endsWith(".py"), "The python script was not found");
-        // it is assumed there should be 1 python file.
-        String notebookScenarioTestFile = scenarioFiles[0];
-
-        Session ssnSession = SSHConnect.getSession(ConfigPropertyValue.getClusterOsUser(), ssnIP, 22);
-        try {
-            LOGGER.info("{}: Copying scenario test file to SSN {}...", notebookName, ssnIP);
-			copyFileToSSN(ssnSession, Paths.get(notebookScenarioDirectory.getAbsolutePath(),
-					notebookScenarioTestFile).toString(), "");
-
-        	LOGGER.info("{}: Copying scenario test file to Notebook {}...", notebookName, noteBookIp);
-            copyFileToNotebook(ssnSession, notebookScenarioTestFile, noteBookIp, "");
-
-            LOGGER.info("In notebook templates directory {} available following template files: {}",
-                    notebookTemplatesDirectory, Arrays.toString(templatesFiles));
-
-            if(existsInSSN(ssnSession, NamingHelper.getNotebookTestTemplatesPath(notebookName))){
-				LOGGER.info("{}: Corresponding folder for notebook templates already exists in SSN {} " +
-						"and will be removed ...", notebookName, ssnIP);
-                removeFromSSN(ssnSession, NamingHelper.getNotebookTestTemplatesPath(notebookName).split("/")[0]);
-            }
-
-            LOGGER.info("{}: Creating subfolder in home directory in SSN for copying templates {}...", notebookName, ssnIP);
-            mkDirInSSN(ssnSession, NamingHelper.getNotebookTestTemplatesPath(notebookName));
-
-            LOGGER.info("{}: Copying templates to SSN {}...", notebookName, ssnIP);
-            for(String filename : templatesFiles){
-                copyFileToSSN(ssnSession, Paths.get(notebookTemplatesDirectory.getAbsolutePath(), filename).toString(),
-                        NamingHelper.getNotebookTestTemplatesPath(notebookName));
-            }
-
-            LOGGER.info("{}: Copying templates to Notebook {}...", notebookName, noteBookIp);
-            copyFileToNotebook(ssnSession, NamingHelper.getNotebookTestTemplatesPath(notebookName),
-                        noteBookIp, notebookName);
-
-			if (!clusterName.equalsIgnoreCase(NamingHelper.CLUSTER_ABSENT)
-					|| !NamingHelper.isClusterRequired(notebookName)) {
-				LOGGER.info("{}: Port forwarding from ssn {} to notebook {}...", notebookName, ssnIP, noteBookIp);
-				int assignedPort = ssnSession.setPortForwardingL(0, noteBookIp, 22);
-				LOGGER.info("{}: Port forwarded localhost:{} -> {}:22", notebookName, assignedPort, noteBookIp);
-				executePythonScript(noteBookIp, clusterName, notebookScenarioTestFile, assignedPort, notebookName);
-			}
-        }
-        finally {
-            if(ssnSession != null && ssnSession.isConnected()) {
-                LOGGER.info("{}: Closing ssn session", notebookName);
-                ssnSession.disconnect();
-            }
-        }
-    }
-
-    // Copies file to subfolder of home directory of SSN. If parameter 'destDirectoryInSSN' is empty string then copies
-    // to home directory.
-	private void copyFileToSSN(Session ssnSession, String sourceFilenameWithPath, String destDirectoryInSSN)
-			throws IOException, JSchException {
-        LOGGER.info("Copying {} to SSN...", sourceFilenameWithPath);
-        File file = new File(sourceFilenameWithPath);
-        assertTrue(file.exists(), "Source file " + sourceFilenameWithPath + " doesn't exist!");
-        LOGGER.info("Source file {} exists: {}", sourceFilenameWithPath, file.exists());
-
-        ChannelSftp channelSftp = null;
-        FileInputStream src = new FileInputStream(file);
-        try {
-        	channelSftp = SSHConnect.getChannelSftp(ssnSession);
-			channelSftp.put(src,
-					String.format("/home/%s/%s%s", ConfigPropertyValue.getClusterOsUser(), destDirectoryInSSN, file
-							.getName()));
-        } catch (SftpException e) {
-            LOGGER.error("An error occured during copying file to SSN: {}", e);
-			fail("Copying file " + file.getName() + " to SSN is failed");
-        } finally {
-            if(channelSftp != null && channelSftp.isConnected()) {
-                channelSftp.disconnect();
-            }
-        }
-
-    }
-
-    // Creates a folder in home directory of SSN
-    private void mkDirInSSN(Session ssnSession, String directoryName) throws JSchException {
-        String newDirectoryAbsolutePath = String.format("/home/%s/%s", ConfigPropertyValue.getClusterOsUser(), directoryName);
-        LOGGER.info("Creating directory {} in SSN...", newDirectoryAbsolutePath);
-
-        ChannelSftp channelSftp = null;
-        try {
-            channelSftp = SSHConnect.getChannelSftp(ssnSession);
-            if(!directoryName.equals("")){
-                String[] partsOfPath = directoryName.split("/");
-                StringBuilder sb = new StringBuilder();
-                for(String partOfPath : partsOfPath){
-                    if(partOfPath.equals("")){
-                        continue;
-                    }
-                    sb.append(partOfPath);
-                    if(!existsInSSN(ssnSession, sb.toString())){
-                        LOGGER.info("Creating directory {} in SSN...",
-                                String.format("/home/%s/%s", ConfigPropertyValue.getClusterOsUser(), sb.toString()));
-                        channelSftp.mkdir(String.format("/home/%s/%s", ConfigPropertyValue.getClusterOsUser(), sb.toString()));
-                    }
-                    sb.append("/");
-                }
-            }
-            assertTrue(channelSftp.stat(newDirectoryAbsolutePath).isDir(), "Directory " + newDirectoryAbsolutePath +
-                    " wasn't created in SSN!");
-        } catch (SftpException e) {
-            LOGGER.error("An error occured during creation directory in SSN: {}", e);
-			fail("Creating directory " + newDirectoryAbsolutePath + " in SSN is failed");
-        } finally {
-            if(channelSftp != null && channelSftp.isConnected()) {
-                channelSftp.disconnect();
-            }
-        }
-
-    }
-
-    // Checks if file exists in home directory of SSN
-    private boolean existsInSSN(Session ssnSession, String fileName) throws JSchException {
-        String homeDirectoryAbsolutePath = String.format("/home/%s", ConfigPropertyValue.getClusterOsUser());
-        LOGGER.info("Checking if file/directory {} exists in home directory {} of SSN...", fileName, homeDirectoryAbsolutePath);
-
-        boolean isFileEmbeddedIntoFolder = fileName.contains("/");
-        ChannelSftp channelSftp = null;
-        List<String> fileNames = new ArrayList<>();
-        try {
-            channelSftp = SSHConnect.getChannelSftp(ssnSession);
-            Vector fileDataList = channelSftp.ls(homeDirectoryAbsolutePath);
-            for (Object fileData : fileDataList) {
-                ChannelSftp.LsEntry entry = (ChannelSftp.LsEntry) fileData;
-                fileNames.add(entry.getFilename());
-            }
-            if(fileNames.isEmpty()){
-				LOGGER.info("Does file/directory {} exist in home directory {} of SSN: {}",
-                        fileName, homeDirectoryAbsolutePath, "false");
-                return false;
-            }
-            LOGGER.info("In home directory {} of SSN there are following files: {}",
-                    homeDirectoryAbsolutePath, fileNames);
-            if(!isFileEmbeddedIntoFolder){
-				LOGGER.info("Does file/directory {} exist in home directory {} of SSN: {}",
-                        fileName, homeDirectoryAbsolutePath, fileNames.contains(fileName));
-                return fileNames.contains(fileName);
-            }else{
-                List<String> partsOfPath =
-                        Stream.of(fileName.split("/")).filter(e -> !e.equals("")).collect(Collectors.toList());
-                StringBuilder currentPath = new StringBuilder(homeDirectoryAbsolutePath);
-                for(int i = 0; i < partsOfPath.size(); i++){
-                    String partOfPath = partsOfPath.get(i);
-                    if(fileNames.isEmpty() || !fileNames.contains(partOfPath)){
-						LOGGER.info("Does file/directory {} exist in home directory {} of SSN: {}",
-                                fileName, homeDirectoryAbsolutePath, "false");
-                        return false;
-                    }else{
-                        if(i == partsOfPath.size() - 1){
-							LOGGER.info("Does file/directory {} exist in home directory {} of SSN: {}",
-                                    fileName, homeDirectoryAbsolutePath, "true");
-                            return true;
-                        }
-                        currentPath.append("/").append(partOfPath);
-                        fileDataList = channelSftp.ls(currentPath.toString());
-                        fileNames = new ArrayList<>();
-                        for (Object fileData : fileDataList) {
-                            ChannelSftp.LsEntry entry = (ChannelSftp.LsEntry) fileData;
-                            fileNames.add(entry.getFilename());
-                        }
-
-                    }
-
-                }
-
-            }
-
-        } catch (SftpException e) {
-            LOGGER.error("An error occured during obtaining list of files from home directory in SSN: {}", e);
-        } finally {
-            if(channelSftp != null && channelSftp.isConnected()) {
-                channelSftp.disconnect();
-            }
-        }
-		LOGGER.info("Does file/directory {} exist in home directory {} of SSN: {}",
-                fileName, homeDirectoryAbsolutePath, "false");
-        return false;
-    }
-
-    // Removes file or directory from home directory of SSN
-    private void removeFromSSN(Session ssnSession, String fileNameWithRelativePath) throws JSchException {
-        String absoluteFilePath = String.format("/home/%s/%s", ConfigPropertyValue.getClusterOsUser(), fileNameWithRelativePath);
-
-        ChannelSftp channelSftp = null;
-        try {
-            channelSftp = SSHConnect.getChannelSftp(ssnSession);
-            boolean isDir = channelSftp.stat(absoluteFilePath).isDir();
-            LOGGER.info("Is file {} a directory in SSN: {}", absoluteFilePath, isDir);
-            if(isDir){
-                LOGGER.info("Removing directory {} from SSN...", absoluteFilePath);
-                recursiveDirectoryDelete(ssnSession, absoluteFilePath);
-            }else{
-                LOGGER.info("Removing file {} from SSN...", absoluteFilePath);
-                channelSftp.rm(absoluteFilePath);
-            }
-        } catch (SftpException e) {
-            LOGGER.error("An error occured during removing file {} from SSN: {}", absoluteFilePath, e);
-        } finally {
-            if(channelSftp != null && channelSftp.isConnected()) {
-                channelSftp.disconnect();
-            }
-        }
-    }
-
-    private void recursiveDirectoryDelete(Session ssnSession, String remoteDir) throws JSchException{
-        ChannelSftp channelSftp = null;
-        try{
-            channelSftp = SSHConnect.getChannelSftp(ssnSession);
-            boolean isDir = channelSftp.stat(remoteDir).isDir();
-            if(isDir){
-                Vector dirList = channelSftp.ls(remoteDir);
-                for(Object fileData : dirList){
-                    ChannelSftp.LsEntry entry = (ChannelSftp.LsEntry) fileData;
-                    if(!(entry.getFilename().equals(".") || entry.getFilename().equals(".."))){
-                        if(entry.getAttrs().isDir()){
-                            recursiveDirectoryDelete(ssnSession, remoteDir + File.separator
-                                    + entry.getFilename() + File.separator);
-                        }
-                        else{
-                            channelSftp.rm(remoteDir + entry.getFilename());
-                        }
-                    }
-                }
-                channelSftp.cd("..");
-                channelSftp.rmdir(remoteDir);
-            }
-        }
-        catch (SftpException e){
-            LOGGER.error("An error occured while deleting directory {}: {}", remoteDir, e.getMessage());
-        }
-        finally {
-            if(channelSftp != null && channelSftp.isConnected()) {
-                channelSftp.disconnect();
-            }
-        }
-    }
-
-	private void copyFileToNotebook(Session session, String filename, String ip, String notebookName)
-			throws JSchException, InterruptedException {
-    	String command = String.format(COMMAND_COPY_TO_NOTEBOOK,
-    			"keys/"+ Paths.get(ConfigPropertyValue.getAccessKeyPrivFileName()).getFileName().toString(),
-                filename,
-                ConfigPropertyValue.getClusterOsUser(),
-                ip,
-                NamingHelper.getNotebookType(notebookName));
-
-    	LOGGER.info("Copying {} to notebook...", filename);
-    	LOGGER.info("  Run command: {}", command);
-
-        ChannelExec copyResult = SSHConnect.setCommand(session, command);
-        AckStatus status = SSHConnect.checkAck(copyResult);
-
-        LOGGER.info("Copied {}: {}", filename, status.toString());
-        assertTrue(status.isOk());
-    }
-
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/TestServices.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/TestServices.java
deleted file mode 100644
index 8d9aad4..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/TestServices.java
+++ /dev/null
@@ -1,322 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test;
-
-import com.epam.dlab.automation.cloud.VirtualMachineStatusChecker;
-import com.epam.dlab.automation.docker.Docker;
-import com.epam.dlab.automation.helper.*;
-import com.epam.dlab.automation.http.ApiPath;
-import com.epam.dlab.automation.http.ContentType;
-import com.epam.dlab.automation.http.HttpRequest;
-import com.epam.dlab.automation.http.HttpStatusCode;
-import com.epam.dlab.automation.jenkins.JenkinsService;
-import com.epam.dlab.automation.model.Lib;
-import com.epam.dlab.automation.model.LoginDto;
-import com.epam.dlab.automation.model.NotebookConfig;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.jayway.restassured.RestAssured;
-import com.jayway.restassured.response.Response;
-import com.jayway.restassured.response.ResponseBody;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-import org.testng.annotations.AfterClass;
-import org.testng.annotations.BeforeClass;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.Objects;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.FutureTask;
-import java.util.concurrent.TimeUnit;
-
-import static org.testng.Assert.fail;
-
-@Test(singleThreaded = true)
-public class TestServices {
-
-	private final static Logger LOGGER = LogManager.getLogger(TestServices.class);
-	// This time 3 notebooks are tested in parallel - so 3 threads are used,
-	// restartNotebookAndRedeployToTerminate are a pool for future notebooks grow.
-	// needed to investigate Amazon behaviour when same AIM requests set of
-	// computation resources in parallel
-	// looks like running test in 1 thread mostly succeeds, running in 2 and more
-	// threads - usually fails.
-	private static final int N_THREADS = 10;
-	private static final long NOTEBOOK_CREATION_DELAY = 60000;
-
-	private long testTimeMillis;
-	private List<NotebookConfig> notebookConfigs;
-	private List<Lib> skippedLibs;
-
-
-	@BeforeClass
-	public void Setup() throws IOException {
-		testTimeMillis = System.currentTimeMillis();
-		// Load properties
-		ConfigPropertyValue.getJenkinsJobURL();
-
-		ObjectMapper mapper = new ObjectMapper();
-		notebookConfigs = mapper.readValue(ConfigPropertyValue.getNotebookTemplates(),
-				new TypeReference<ArrayList<NotebookConfig>>() {
-				});
-		skippedLibs = mapper.readValue(ConfigPropertyValue.getSkippedLibs(),
-				new TypeReference<ArrayList<Lib>>() {
-				});
-	}
-
-	@AfterClass
-	public void Cleanup() {
-		testTimeMillis = System.currentTimeMillis() - testTimeMillis;
-		LOGGER.info("Test time {} ms", testTimeMillis);
-	}
-
-	@Test
-	public void runTest() throws Exception {
-		testJenkinsJob();
-		testLoginSsnService();
-
-		RestAssured.baseURI = NamingHelper.getSsnURL();
-		NamingHelper.setSsnToken(ssnLoginAndKeyUpload());
-		runTestsInNotebooks();
-	}
-
-	private void testJenkinsJob() throws Exception {
-		/*
-		 * LOGGER.info("1. Jenkins Job will be started ...");
-		 *
-		 * JenkinsService jenkins = new
-		 * JenkinsService(ConfigPropertyValue.getJenkinsUsername(),
-		 * ConfigPropertyValue.getJenkinsPassword()); String buildNumber =
-		 * jenkins.runJenkinsJob(ConfigPropertyValue.getJenkinsJobURL());
-		 * LOGGER.info("   Jenkins Job has been completed");
-		 */
-
-		LOGGER.info("1. Looking for last Jenkins Job ...");
-		JenkinsService jenkins = new JenkinsService();
-		String buildNumber = jenkins.getJenkinsJob();
-		LOGGER.info("   Jenkins Job found:");
-		LOGGER.info("Build number is: {}", buildNumber);
-
-		NamingHelper.setSsnURL(jenkins.getSsnURL().replaceAll(" ", ""));
-		NamingHelper.setServiceBaseName(jenkins.getServiceBaseName().replaceAll(" ", ""));
-		Assert.assertNotNull(NamingHelper.getSsnURL(), "Jenkins URL was not generated");
-		Assert.assertNotNull(NamingHelper.getServiceBaseName(), "Service BaseName was not generated");
-		LOGGER.info("Self-Service URL is: " + NamingHelper.getSsnURL());
-		LOGGER.info("ServiceBaseName is: " + NamingHelper.getServiceBaseName());
-	}
-
-	private ResponseBody<?> login(String username, String password, int expectedStatusCode, String errorMessage) {
-		final String ssnLoginURL = NamingHelper.getSelfServiceURL(ApiPath.LOGIN);
-		LoginDto requestBody = new LoginDto(username, password);
-		Response response = new HttpRequest().webApiPost(ssnLoginURL, ContentType.JSON, requestBody);
-		LOGGER.info("   login response body for user {} is {}", username, response.getBody().asString());
-		Assert.assertEquals(response.statusCode(), expectedStatusCode, errorMessage);
-		return response.getBody();
-	}
-
-	private void testLoginSsnService() throws Exception {
-
-		String cloudProvider = ConfigPropertyValue.getCloudProvider();
-
-		LOGGER.info("Check status of SSN node on {}: {}", cloudProvider.toUpperCase(), NamingHelper.getSsnName());
-
-		String publicSsnIp = CloudHelper.getInstancePublicIP(NamingHelper.getSsnName(), true);
-		LOGGER.info("Public IP is: {}", publicSsnIp);
-		String privateSsnIp = CloudHelper.getInstancePrivateIP(NamingHelper.getSsnName(), true);
-		LOGGER.info("Private IP is: {}", privateSsnIp);
-		if (publicSsnIp == null || privateSsnIp == null) {
-			Assert.fail("There is not any virtual machine in " + cloudProvider + " with name " + NamingHelper.getSsnName());
-			return;
-		}
-		NamingHelper.setSsnIp(PropertiesResolver.DEV_MODE ? publicSsnIp : privateSsnIp);
-		VirtualMachineStatusChecker.checkIfRunning(NamingHelper.getSsnName(), true);
-		LOGGER.info("{} instance state is running", cloudProvider.toUpperCase());
-
-		LOGGER.info("2. Waiting for SSN service ...");
-		Assert.assertTrue(WaitForStatus.selfService(ConfigPropertyValue.getTimeoutSSNStartup()), "SSN service was " +
-				"not" +
-				" " +
-				"started");
-		LOGGER.info("   SSN service is available");
-
-		LOGGER.info("3. Check login");
-		final String ssnLoginURL = NamingHelper.getSelfServiceURL(ApiPath.LOGIN);
-		LOGGER.info("   SSN login URL is {}", ssnLoginURL);
-
-		ResponseBody<?> responseBody;
-		// TODO Choose username and password for this check
-		// if (!ConfigPropertyValue.isRunModeLocal()) {
-		// responseBody = login(ConfigPropertyValue.getNotIAMUsername(),
-		// ConfigPropertyValue.getNotIAMPassword(),
-		// HttpStatusCode.UNAUTHORIZED, "Unauthorized user " +
-		// ConfigPropertyValue.getNotIAMUsername());
-		// Assert.assertEquals(responseBody.asString(), "Please contact AWS
-		// administrator to create corresponding IAM User");
-		// }
-
-		responseBody = login(ConfigPropertyValue.getNotDLabUsername(), ConfigPropertyValue.getNotDLabPassword(),
-				HttpStatusCode.UNAUTHORIZED, "Unauthorized user " + ConfigPropertyValue.getNotDLabUsername());
-
-		Assert.assertEquals(responseBody.path("message"), "Username or password is invalid");
-
-		if (!ConfigPropertyValue.isRunModeLocal()) {
-			responseBody = login(ConfigPropertyValue.getUsername(), ".", HttpStatusCode.UNAUTHORIZED,
-					"Unauthorized user " + ConfigPropertyValue.getNotDLabUsername());
-			Assert.assertEquals(responseBody.path("message"), "Username or password is invalid");
-		}
-
-		LOGGER.info("Logging in with credentials {}/***", ConfigPropertyValue.getUsername());
-		responseBody = login(ConfigPropertyValue.getUsername(), ConfigPropertyValue.getPassword(), HttpStatusCode.OK,
-				"User login " + ConfigPropertyValue.getUsername() + " was not successful");
-
-		LOGGER.info("4. Check logout");
-		final String ssnlogoutURL = NamingHelper.getSelfServiceURL(ApiPath.LOGOUT);
-		LOGGER.info("   SSN logout URL is {}", ssnlogoutURL);
-
-		Response responseLogout = new HttpRequest().webApiPost(ssnlogoutURL, ContentType.ANY);
-		LOGGER.info("responseLogout.statusCode() is {}", responseLogout.statusCode());
-		Assert.assertEquals(responseLogout.statusCode(), HttpStatusCode.UNAUTHORIZED,
-				"User log out was not successful"/*
-				 * Replace to HttpStatusCode.OK when EPMCBDCCSS-938 will be fixed
-				 * and merged
-				 */);
-	}
-
-	private String ssnLoginAndKeyUpload() throws Exception {
-		LOGGER.info("5. Login as {} ...", ConfigPropertyValue.getUsername());
-		final String ssnLoginURL = NamingHelper.getSelfServiceURL(ApiPath.LOGIN);
-		final String ssnUploadKeyURL = NamingHelper.getSelfServiceURL(ApiPath.UPLOAD_KEY);
-		LOGGER.info("   SSN login URL is {}", ssnLoginURL);
-		LOGGER.info("   SSN upload key URL is {}", ssnUploadKeyURL);
-
-		ResponseBody<?> responseBody = login(ConfigPropertyValue.getUsername(), ConfigPropertyValue.getPassword(),
-				HttpStatusCode.OK, "Failed to login");
-		String token = responseBody.asString();
-		LOGGER.info("   Logged in. Obtained token: {}", token);
-
-		LOGGER.info("5.a Checking for user Key...");
-		Response respCheckKey = new HttpRequest().webApiGet(ssnUploadKeyURL, token);
-
-		if (respCheckKey.getStatusCode() == HttpStatusCode.NOT_FOUND) {
-			LOGGER.info("5.b Upload Key will be started ...");
-
-			Response respUploadKey = new HttpRequest().webApiPost(ssnUploadKeyURL, ContentType.FORMDATA, token);
-			LOGGER.info("   respUploadKey.getBody() is {}", respUploadKey.getBody().asString());
-
-			Assert.assertEquals(respUploadKey.statusCode(), HttpStatusCode.OK, "The key uploading was not successful");
-			int responseCodeAccessKey = WaitForStatus.uploadKey(ssnUploadKeyURL, token, HttpStatusCode.ACCEPTED,
-					ConfigPropertyValue.getTimeoutUploadKey());
-			LOGGER.info("   Upload Key has been completed");
-			LOGGER.info("responseAccessKey.statusCode() is {}", responseCodeAccessKey);
-			Assert.assertEquals(responseCodeAccessKey, HttpStatusCode.OK, "The key uploading was not successful");
-		} else if (respCheckKey.getStatusCode() == HttpStatusCode.OK) {
-			LOGGER.info("   Key has been uploaded already");
-		} else {
-			Assert.assertEquals(200, respCheckKey.getStatusCode(), "Failed to check User Key.");
-		}
-
-		final String nodePrefix = ConfigPropertyValue.getUsernameSimple();
-		Docker.checkDockerStatus(nodePrefix + "_create_edge_", NamingHelper.getSsnIp());
-
-		VirtualMachineStatusChecker.checkIfRunning(NamingHelper.getEdgeName(), true);
-
-		final String ssnExpEnvURL = NamingHelper.getSelfServiceURL(ApiPath.EXP_ENVIRONMENT);
-		LOGGER.info("   SSN exploratory environment URL is {}", ssnExpEnvURL);
-		final String ssnProUserResURL = NamingHelper.getSelfServiceURL(ApiPath.PROVISIONED_RES);
-		LOGGER.info("   SSN provisioned user resources URL is {}", ssnProUserResURL);
-
-		return token;
-	}
-
-	private void populateNotebookConfigWithSkippedLibs(NotebookConfig notebookCfg) {
-		if (Objects.isNull(notebookCfg.getSkippedLibraries())) {
-			notebookCfg.setSkippedLibraries(skippedLibs);
-		}
-	}
-
-	private void runTestsInNotebooks() throws Exception {
-
-		ExecutorService executor = Executors.newFixedThreadPool(
-				ConfigPropertyValue.getExecutionThreads() > 0 ? ConfigPropertyValue.getExecutionThreads() : N_THREADS);
-		notebookConfigs.forEach(this::populateNotebookConfigWithSkippedLibs);
-		List<FutureTask<Boolean>> futureTasks = new ArrayList<>();
-		if (CloudProvider.GCP_PROVIDER.equals(ConfigPropertyValue.getCloudProvider())) {
-			LOGGER.debug("Image creation tests are skipped for all types of notebooks in GCP.");
-			notebookConfigs.forEach(config -> config.setImageTestRequired(false));
-		}
-		LOGGER.info("Testing the following notebook configs: {}", notebookConfigs);
-		for (NotebookConfig notebookConfig : notebookConfigs) {
-			if (!ConfigPropertyValue.isRunModeLocal() &&
-					CloudProvider.AZURE_PROVIDER.equals(ConfigPropertyValue.getCloudProvider())) {
-				LOGGER.debug("Waiting " + NOTEBOOK_CREATION_DELAY / 1000 + " sec to start notebook creation...");
-				TimeUnit.SECONDS.sleep(NOTEBOOK_CREATION_DELAY / 1000);
-			}
-			FutureTask<Boolean> runScenarioTask = new FutureTask<>(new TestCallable(notebookConfig));
-			futureTasks.add(runScenarioTask);
-			executor.execute(runScenarioTask);
-		}
-		final long checkThreadTimeout = ConfigPropertyValue.isRunModeLocal() ? 1000 : 5000;
-		while (true) {
-			boolean done = allScenariosDone(futureTasks);
-			if (done) {
-				verifyResults(futureTasks);
-				executor.shutdown();
-				return;
-			} else {
-				TimeUnit.SECONDS.sleep(checkThreadTimeout / 1000);
-			}
-		}
-	}
-
-	private void verifyResults(List<FutureTask<Boolean>> futureTasks) {
-		List<Exception> resExceptions = new ArrayList<>();
-		for (FutureTask<Boolean> ft : futureTasks) {
-			try {
-				ft.get();
-			} catch (Exception exception) {
-				resExceptions.add(exception);
-			}
-		}
-
-		if (resExceptions.size() > 0) {
-			for (Exception exception : resExceptions) {
-				LOGGER.error("{} :\n {} ", exception, exception.getStackTrace());
-				exception.printStackTrace();
-			}
-			fail("There were failed tests with " + resExceptions.size() + " from " + futureTasks.size()
-					+ " notebooks, see stacktrace above.");
-		}
-	}
-
-	private boolean allScenariosDone(List<FutureTask<Boolean>> futureTasks) {
-		boolean done = true;
-		for (FutureTask<Boolean> ft : futureTasks) {
-			if (!ft.isDone()) {
-				done = ft.isDone();
-			}
-		}
-		return done;
-	}
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/LibraryNotFoundException.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/LibraryNotFoundException.java
deleted file mode 100644
index b8fca93..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/LibraryNotFoundException.java
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs;
-
-class LibraryNotFoundException extends RuntimeException {
-
-	private static final long serialVersionUID = 1L;
-
-	LibraryNotFoundException(String message) {
-		super(message);
-	}
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/LibsHelper.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/LibsHelper.java
deleted file mode 100644
index 471679c..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/LibsHelper.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs;
-
-import com.epam.dlab.automation.helper.NamingHelper;
-
-import static com.epam.dlab.automation.helper.NamingHelper.*;
-
-public class LibsHelper {
-
-	private static final String LIB_GROUPS_JSON = "lib_groups.json";
-	private static final String LIB_LIST_JSON = "lib_list.json";
-
-    public static String getLibGroupsPath(String notebookName){
-		if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(DEEPLEARNING))) {
-			return DEEPLEARNING + "/" + LIB_GROUPS_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(JUPYTER))) {
-			return JUPYTER + "/" + LIB_GROUPS_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(RSTUDIO))) {
-			return RSTUDIO + "/" + LIB_GROUPS_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(TENSOR))) {
-			return TENSOR + "/" + LIB_GROUPS_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(ZEPPELIN))) {
-			return ZEPPELIN + "/" + LIB_GROUPS_JSON;
-		} else return LIB_GROUPS_JSON;
-    }
-
-    public static String getLibListPath(String notebookName){
-		if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(DEEPLEARNING))) {
-			return DEEPLEARNING + "/" + LIB_LIST_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(JUPYTER))) {
-			return JUPYTER + "/" + LIB_LIST_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(RSTUDIO))) {
-			return RSTUDIO + "/" + LIB_LIST_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(TENSOR))) {
-			return TENSOR + "/" + LIB_LIST_JSON;
-		} else if (notebookName.contains(NamingHelper.getSimpleNotebookNames().get(ZEPPELIN))) {
-			return NamingHelper.ZEPPELIN + "/" + LIB_LIST_JSON;
-		} else return LIB_LIST_JSON;
-    }
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibGroupStep.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibGroupStep.java
deleted file mode 100644
index 57d56d4..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibGroupStep.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs;
-
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.epam.dlab.automation.helper.NamingHelper;
-import com.epam.dlab.automation.http.HttpRequest;
-import com.epam.dlab.automation.http.HttpStatusCode;
-import com.epam.dlab.automation.model.JsonMapperDto;
-import com.jayway.restassured.response.Response;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-
-@TestDescription("Test \"Show available library groups\" ")
-public class TestLibGroupStep extends TestLibStep {
-    private static final Logger LOGGER = LogManager.getLogger(TestLibGroupStep.class);
-    private List<String> expectedGroups;
-
-    public TestLibGroupStep(String url, String token, String notebookName, long initTimeout, String jsonFilePath) {
-        super(NamingHelper.getSelfServiceURL(url), token, notebookName, initTimeout);
-        this.expectedGroups = JsonMapperDto.readListOf(jsonFilePath, String.class);
-    }
-
-    @Override
-    public void init() throws InterruptedException {
-
-        long currentTime = System.currentTimeMillis() / 1000L;
-        long expiredTime = currentTime + initTimeoutSec;
-
-        while (expiredTime > currentTime) {
-            HttpRequest httpRequest = new HttpRequest();
-            
-            Map<String, Object> params = new HashMap<>();
-            params.put("exploratory_name", notebookName);
-			Response groups= httpRequest.webApiGet(url, token,params );
-            if (groups.getStatusCode() != HttpStatusCode.OK) {
-                LOGGER.error("Response status {}, body {}", groups.getStatusCode(), groups.getBody().print());
-                Assert.fail("Cannot get lib groups " + notebookName);
-            } else {
-                List<String> availableGroups = groups.getBody().jsonPath().getList("", String.class);
-
-                if (availableGroups == null || availableGroups.isEmpty()) {
-                    LOGGER.info("Init lib group. Wait for time out {} seconds left for {}", expiredTime - currentTime, notebookName);
-                    TimeUnit.SECONDS.sleep(ConfigPropertyValue.isRunModeLocal() ? 3L : 20L);
-                } else {
-                    return;
-                }
-            }
-
-            currentTime = System.currentTimeMillis() / 1000L;
-        }
-
-        Assert.fail("Timeout Cannot get lib groups " + notebookName);
-    }
-
-    @Override
-    public void verify() {
-        HttpRequest httpRequest = new HttpRequest();
-        
-        Map<String, Object> params = new HashMap<>();
-        params.put("exploratory_name", notebookName);
-		Response response= httpRequest.webApiGet(url, token,params );
-        if (response.getStatusCode() == HttpStatusCode.OK) {
-            List<String> availableGroups = response.getBody().jsonPath().getList("", String.class);
-
-            LOGGER.info("Expected groups {}", expectedGroups);
-
-            LOGGER.info("Available groups {}", availableGroups);
-
-            for (String lib : expectedGroups) {
-                Assert.assertTrue(availableGroups.contains(lib), String.format("%s lib groups is not available for %s", lib, notebookName));
-            }
-
-        } else {
-            LOGGER.error("Response status {}, body {}", response.getStatusCode(), response.getBody().print());
-            Assert.fail("Lib group request failed for " + notebookName);
-        }
-
-        LOGGER.info(getDescription() + "passed");
-    }
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibInstallStep.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibInstallStep.java
deleted file mode 100644
index 9b9d521..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibInstallStep.java
+++ /dev/null
@@ -1,158 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs;
-
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.epam.dlab.automation.helper.NamingHelper;
-import com.epam.dlab.automation.http.ContentType;
-import com.epam.dlab.automation.http.HttpRequest;
-import com.epam.dlab.automation.http.HttpStatusCode;
-import com.epam.dlab.automation.model.Lib;
-import com.epam.dlab.automation.test.libs.models.LibInstallRequest;
-import com.epam.dlab.automation.test.libs.models.LibStatusResponse;
-import com.epam.dlab.automation.test.libs.models.LibraryStatus;
-import com.jayway.restassured.response.Response;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.util.*;
-import java.util.concurrent.TimeUnit;
-
-@TestDescription("Test \"Install libraries\" ")
-public class TestLibInstallStep extends TestLibStep {
-    private final static Logger LOGGER = LogManager.getLogger(TestLibInstallStep.class);
-    private String statusUrl;
-    private Lib libToInstall;
-	private boolean isInstalled = true;
-
-	public TestLibInstallStep(String requestUrl, String statusUrl, String token, String notebookName, long
-			initTimeoutSec,
-							  Lib libToInstall) {
-
-        super(NamingHelper.getSelfServiceURL(requestUrl), token, notebookName, initTimeoutSec);
-        this.statusUrl = NamingHelper.getSelfServiceURL(statusUrl);
-        this.libToInstall = libToInstall;
-    }
-
-    @Override
-    public void init() throws InterruptedException {
-        LibInstallRequest request = new LibInstallRequest(Collections.singletonList(libToInstall), notebookName);
-
-        LOGGER.info("Install lib {}", request);
-
-        long currentTime = System.currentTimeMillis() / 1000L;
-        long expiredTime = currentTime + initTimeoutSec;
-
-        Response response = new HttpRequest().webApiPost(url, ContentType.JSON, request, token);
-        if (response.getStatusCode() != HttpStatusCode.OK) {
-            LOGGER.error("Response status {}, body {}", response.getStatusCode(), response.getBody().print());
-            Assert.fail("Cannot install libs for " + request);
-        }
-
-        while (expiredTime > currentTime) {
-
-            HttpRequest httpRequest = new HttpRequest();
-            Map<String,Object> params = new HashMap<>();
-            params.put("exploratory_name", notebookName);
-            response = httpRequest.webApiGet(statusUrl, token,params);
-            if (response.getStatusCode() == HttpStatusCode.OK) {
-
-                List<LibStatusResponse> actualStatuses = Arrays.asList(response.getBody().as(LibStatusResponse[].class));
-
-                LOGGER.info("Actual statuses {}", actualStatuses);
-
-                LibStatusResponse s = actualStatuses.stream()
-                        .filter(e -> e.getGroup().equals(libToInstall.getGroup())
-                                && e.getName().equals(libToInstall.getName())
-                                && (e.getVersion().equals(libToInstall.getVersion()) || "N/A".equals(libToInstall.getVersion())))
-						.findFirst().orElseThrow(() -> new LibraryNotFoundException(String.format("Library " +
-										"template with parameters: group=%s, name=%s, version=%s not found.",
-								libToInstall.getGroup(), libToInstall.getName(), libToInstall.getVersion())));
-
-                LOGGER.info("Lib status is {}", s);
-                
-                boolean allLibStatusesDone = true;
-                
-                for (LibraryStatus libStatus : s.getStatus()) {
-                	if (libStatus.getStatus().equals("installing")) {
-                		allLibStatusesDone = false;
-                    } 
-				}
-                if(!allLibStatusesDone) {
-                	LOGGER.info("Wait {} sec left for installation libs {}", expiredTime - currentTime, request);
-                    TimeUnit.SECONDS.sleep(ConfigPropertyValue.isRunModeLocal() ? 3L : 20L);
-                } else {
-                    return;
-                }
-                
-            } else {
-                LOGGER.error("Response status{}, body {}", response.getStatusCode(), response.getBody().print());
-                Assert.fail("Install libs failed for " + notebookName);
-            }
-
-            currentTime = System.currentTimeMillis() / 1000L;
-        }
-
-        Assert.fail("Timeout Cannot install libs on " + notebookName + " " + request);
-    }
-
-    @Override
-    public void verify() {
-        HttpRequest httpRequest = new HttpRequest();
-        Map<String,Object> params = new HashMap<>();
-        params.put("exploratory_name", notebookName);
-        Response response = httpRequest.webApiGet(statusUrl, token,params);
-        if (response.getStatusCode() == HttpStatusCode.OK) {
-
-            List<LibStatusResponse> actualStatuses = Arrays.asList(response.getBody().as(LibStatusResponse[].class));
-            LOGGER.info("Actual statuses {}", actualStatuses);
-
-            LibStatusResponse libStatusResponse = actualStatuses.stream()
-                    .filter(e -> e.getGroup().equals(libToInstall.getGroup())
-                            && e.getName().equals(libToInstall.getName())
-                            && (e.getVersion().equals(libToInstall.getVersion()) || "N/A".equals(libToInstall.getVersion())))
-					.findFirst().orElseThrow(() -> new LibraryNotFoundException(String.format("Library " +
-									"template with parameters: group=%s, name=%s, version=%s not found.",
-							libToInstall.getGroup(), libToInstall.getName(), libToInstall.getVersion())));
-
-            for (LibraryStatus libStatus : libStatusResponse.getStatus()) {
-            	if ("installed".equals(libStatus.getStatus())) {
-                    LOGGER.info("Library status of {} is {}", libToInstall, libStatusResponse);
-                } else if ("failed".equals(libStatus.getStatus())) {
-                    LOGGER.warn("Failed status with proper error message happend for {}", libStatusResponse);
-					isInstalled = false;
-                } else {
-					Assert.assertEquals("installed", libStatus.getStatus(), "Lib " + libToInstall + " is not " +
-							"installed" +
-							". Status " + libStatusResponse);
-                }
-			}
-        } else {
-            LOGGER.error("Response status{}, body {}", response.getStatusCode(), response.getBody().print());
-            Assert.fail("Install libs failed for " + notebookName);
-        }
-        LOGGER.info(getDescription() + "passed");
-    }
-
-	public boolean isLibraryInstalled() {
-		return isInstalled;
-	}
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibListStep.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibListStep.java
deleted file mode 100644
index 89566c2..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibListStep.java
+++ /dev/null
@@ -1,129 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs;
-
-import com.epam.dlab.automation.helper.ConfigPropertyValue;
-import com.epam.dlab.automation.helper.NamingHelper;
-import com.epam.dlab.automation.http.ContentType;
-import com.epam.dlab.automation.http.HttpRequest;
-import com.epam.dlab.automation.http.HttpStatusCode;
-import com.epam.dlab.automation.model.Lib;
-import com.epam.dlab.automation.test.libs.models.LibSearchRequest;
-import com.epam.dlab.automation.test.libs.models.LibToSearchData;
-import com.jayway.restassured.response.Response;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.Logger;
-import org.testng.Assert;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
-
-@TestDescription("Test \"Search libraries by group and prefix\" ")
-public class TestLibListStep extends TestLibStep {
-	private static final Logger LOGGER = LogManager.getLogger(TestLibListStep.class);
-	private LibToSearchData libToSearchData;
-	private List<Lib> libs = new ArrayList<>();
-
-	public TestLibListStep(String url, String token, String notebookName, long initTimeoutSec,
-						   LibToSearchData libToSearchData) {
-		super(NamingHelper.getSelfServiceURL(url), token, notebookName, initTimeoutSec);
-		this.libToSearchData = libToSearchData;
-	}
-
-	@Override
-	public void init() throws InterruptedException {
-		LibSearchRequest request = new LibSearchRequest(notebookName, libToSearchData.getGroup(),
-				libToSearchData.getStartWith());
-
-		long currentTime = System.currentTimeMillis() / 1000L;
-		long expiredTime = currentTime + initTimeoutSec;
-
-		while (expiredTime > currentTime) {
-			Response response = new HttpRequest().webApiPost(url, ContentType.JSON, request, token);
-			LOGGER.info("Request libraries {}", request);
-
-			if (response.getStatusCode() != HttpStatusCode.OK) {
-				LOGGER.error("Response status {}, body {}", response.getStatusCode(), response.getBody().print());
-				Assert.fail("Cannot get lib list for " + request);
-			} else {
-				Map<String, String> foundLibs =
-						getLibMap(response);
-				if (foundLibs == null || foundLibs.isEmpty()) {
-					LOGGER.info("Init lib list. Wait for time out {} seconds left for {}", expiredTime - currentTime,
-							notebookName);
-					TimeUnit.SECONDS.sleep(ConfigPropertyValue.isRunModeLocal() ? 3L : 20L);
-				} else {
-					return;
-				}
-			}
-
-			currentTime = System.currentTimeMillis() / 1000L;
-		}
-
-		Assert.fail("Timeout Cannot get lib list " + notebookName);
-	}
-
-	private Map<String, String> getLibMap(Response response) {
-		return response.getBody().jsonPath().getList("")
-				.stream()
-				.collect(Collectors.toMap(o -> (String) ((Map) o).get("name"),
-						o -> (String) ((Map) o).get("version")));
-	}
-
-	@Override
-	public void verify() {
-		Map<String, String> actualFoundLibs = new HashMap<>();
-
-		LibSearchRequest request = new LibSearchRequest(notebookName, libToSearchData.getGroup(),
-				libToSearchData.getStartWith());
-		Response response = new HttpRequest().webApiPost(url, ContentType.JSON, request, token);
-		LOGGER.info("Request libraries {}", request);
-		if (response.getStatusCode() == HttpStatusCode.OK) {
-			actualFoundLibs = getLibMap(response);
-			if (actualFoundLibs == null || actualFoundLibs.isEmpty()) {
-				Assert.fail("Libraries not found");
-			} else {
-				LOGGER.info("Found libraries for {} are {}", request, actualFoundLibs);
-				for (Map.Entry<String, String> entry : actualFoundLibs.entrySet()) {
-					Assert.assertTrue(entry.getKey().toLowerCase().startsWith(libToSearchData.getStartWith().toLowerCase()),
-							String.format("Nor expected lib is found %s-%s", entry.getKey(), entry.getValue()));
-				}
-				LOGGER.info("Libraries are verified");
-			}
-
-		} else {
-			LOGGER.error("Response {}", response);
-			Assert.fail("Lib list request failed for " + request);
-		}
-		LOGGER.info(getDescription() + "passed");
-
-		for (Map.Entry<String, String> entry : actualFoundLibs.entrySet()) {
-			libs.add(new Lib(libToSearchData.getGroup(), entry.getKey(), entry.getValue()));
-		}
-	}
-
-	public List<Lib> getLibs() {
-		return libs;
-	}
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibStep.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibStep.java
deleted file mode 100644
index 5930f77..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestLibStep.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs;
-
-import java.lang.annotation.Annotation;
-import java.util.concurrent.TimeUnit;
-
-abstract class TestLibStep {
-    final String url;
-    final String token;
-    final String notebookName;
-    final long initTimeoutSec; //seconds
-
-    TestLibStep(String url, String token, String notebookName, long initTimeoutSec) {
-        this.url = url;
-        this.token = token;
-        this.notebookName = notebookName;
-        this.initTimeoutSec = initTimeoutSec;
-    }
-
-    public abstract void verify();
-
-    String getDescription() {
-        Annotation annotation = getClass().getAnnotation(TestDescription.class);
-        return (annotation != null) ? ((TestDescription) annotation).value() : "";
-    }
-
-    public void init() throws InterruptedException {
-        if (initTimeoutSec != 0L) {
-            TimeUnit.SECONDS.sleep(initTimeoutSec);
-        }
-    }
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibInstallRequest.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibInstallRequest.java
deleted file mode 100644
index ad48b07..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibInstallRequest.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs.models;
-
-import com.epam.dlab.automation.model.Lib;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-
-import java.util.List;
-
-
-public class LibInstallRequest {
-	@JsonProperty
-	private List<Lib> libs;
-	@JsonProperty("exploratory_name")
-	private String notebookName;
-
-	public LibInstallRequest(List<Lib> libs, String notebookName) {
-		this.libs = libs;
-		this.notebookName = notebookName;
-	}
-
-	public List<Lib> getLibs() {
-		return libs;
-	}
-
-	public String getNotebookName() {
-		return notebookName;
-	}
-
-	@Override
-	public String toString() {
-		return MoreObjects.toStringHelper(this)
-				.add("libs", libs)
-				.add("notebookName", notebookName)
-				.toString();
-	}
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibSearchRequest.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibSearchRequest.java
deleted file mode 100644
index 45ffa32..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibSearchRequest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs.models;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-
-/**
- * Created by yu on 7/3/17.
- */
-public class LibSearchRequest {
-    @JsonProperty("exploratory_name")
-    private String notebookName;
-    @JsonProperty
-    private String group;
-    @JsonProperty("start_with")
-    private String startWith;
-
-    public LibSearchRequest() {
-    }
-
-    public LibSearchRequest(String notebookName, String group, String startWith) {
-        this.notebookName = notebookName;
-        this.group = group;
-        this.startWith = startWith;
-    }
-
-    @Override
-    public String toString() {
-        return MoreObjects.toStringHelper(this)
-                .add("notebookName", notebookName)
-                .add("group", group)
-                .add("startWith", startWith)
-                .toString();
-    }
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibStatusResponse.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibStatusResponse.java
deleted file mode 100644
index cf79d82..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibStatusResponse.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs.models;
-
-import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-
-import java.util.List;
-
-@JsonIgnoreProperties(ignoreUnknown = true)
-public class LibStatusResponse {
-    @JsonProperty
-    private String group;
-    @JsonProperty
-    private String name;
-    @JsonProperty
-    private String version;
-    @JsonProperty
-    private List<LibraryStatus> status;
-
-    public String getGroup() {
-        return group;
-    }
-
-    public String getName() {
-        return name;
-    }
-
-    public String getVersion() {
-        return version;
-    }
-
-    public List<LibraryStatus> getStatus() {
-        return status;
-    }
-
-
-    @Override
-    public boolean equals(Object o) {
-		if (this == o) return true;
-		if (o == null || getClass() != o.getClass()) return false;
-
-		LibStatusResponse that = (LibStatusResponse) o;
-
-		return (group != null ? group.equals(that.group) : that.group == null) && (name != null ? name.equals(that
-				.name) : that.name == null) && (version != null ? version.equals(that.version) : that.version == null)
-				&& (status != null ? status.equals(that.status) : that.status == null);
-	}
-
-    @Override
-    public int hashCode() {
-        int result = group != null ? group.hashCode() : 0;
-        result = 31 * result + (name != null ? name.hashCode() : 0);
-        result = 31 * result + (version != null ? version.hashCode() : 0);
-        result = 31 * result + (status != null ? status.hashCode() : 0);
-        return result;
-    }
-
-    @Override
-    public String toString() {
-        return MoreObjects.toStringHelper(this)
-                .add("group", group)
-                .add("name", name)
-                .add("version", version)
-                .add("status", status)
-                .toString();
-    }
-}
-
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibToSearchData.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibToSearchData.java
deleted file mode 100644
index e6aa205..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibToSearchData.java
+++ /dev/null
@@ -1,37 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs.models;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-
-public class LibToSearchData {
-    @JsonProperty
-    private String group;
-    @JsonProperty("start_with")
-    private String startWith;
-
-    public String getGroup() {
-        return group;
-    }
-
-    public String getStartWith() {
-        return startWith;
-    }
-}
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibraryStatus.java b/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibraryStatus.java
deleted file mode 100644
index 1be3139..0000000
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/models/LibraryStatus.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.automation.test.libs.models;
-
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-
-public class LibraryStatus {
-	    @JsonProperty
-	    private String resource;
-	    @JsonProperty
-	    private String resourceType;
-	    @JsonProperty
-	    private String status;
-	    @JsonProperty
-	    private String error;
-	    
-		public String getResource() {
-			return resource;
-		}
-	    public String getResourceType() { return resourceType;}
-	    public String getStatus() {
-			return status;
-		}
-		public String getError() {
-			return error;
-		}
-		
-		@Override
-		public int hashCode() {
-			final int prime = 31;
-			int result = 1;
-			result = prime * result + ((error == null) ? 0 : error.hashCode());
-			result = prime * result + ((resource == null) ? 0 : resource.hashCode());
-			result = prime * result + ((status == null) ? 0 : status.hashCode());
-			result = prime * result + ((resourceType == null) ? 0 : resourceType.hashCode());
-			return result;
-		}
-		@Override
-		public boolean equals(Object obj) {
-			if (this == obj)
-				return true;
-			if (obj == null)
-				return false;
-			if (getClass() != obj.getClass())
-				return false;
-			LibraryStatus other = (LibraryStatus) obj;
-			if (error == null) {
-				if (other.error != null)
-					return false;
-			} else if (!error.equals(other.error))
-				return false;
-			if (resource == null) {
-				if (other.resource != null)
-					return false;
-			} else if (!resource.equals(other.resource))
-				return false;
-			if (status == null) {
-				if (other.status != null)
-					return false;
-			} else if (!status.equals(other.status))
-				return false;
-			if (resourceType == null) {
-				return other.resourceType == null;
-			} else return resourceType.equals(other.resourceType);
-		}
-		@Override
-		public String toString() {
-			return MoreObjects.toStringHelper(this)
-					.add("resource", resource)
-					.add("resourceType", resourceType)
-					.add("status", status)
-					.add("error", error)
-					.toString();
-		}
-	    
-	    
-}
diff --git a/integration-tests/src/test/resources/log4j2.xml b/integration-tests/src/test/resources/log4j2.xml
deleted file mode 100644
index 91d23a2..0000000
--- a/integration-tests/src/test/resources/log4j2.xml
+++ /dev/null
@@ -1,63 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" ?>
-
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~   http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-
-<Configuration>
-
-	<appender name="console" class="org.apache.log4j.ConsoleAppender">
-		<layout class="org.apache.log4j.PatternLayout">
-			<param name="ConversionPattern"
-				   value="%d{yyyy-MM-dd HH:mm:ss} %-5p %c{1}:%L - %m%n" />
-		</layout>
-	</appender>
-
-	<appender name="file" class="org.apache.log4j.FileAppender">
-
-		<param name="file" value="FILE.log"/>
-		<param name="immediateFlush" value="true"/>
-		<param name="threshold" value="debug"/>
-		<param name="append" value="false"/>
-
-		<layout class="org.apache.log4j.PatternLayout">
-			<param name="conversionPattern" value="%m%n"/>
-		</layout>
-	</appender>
-
-	<Appenders>
-		<Console name="console" target="SYSTEM_OUT">
-			<PatternLayout pattern="%d{HH:mm:ss.SSS} [%t] %-5level %logger{36} - %msg%n"/>
-		</Console>
-			<File name="file" fileName="output.log" bufferedIO="false" advertiseURI="file:log.log" advertise="true">
-			</File>
-	</Appenders>
-
-
-	<Loggers>
-		<Root level="info">
-			<AppenderRef ref="file" />
-			<AppenderRef ref="console" />
-		</Root>
-		<Logger name="com.epam.dlab.automation" level="debug" additivity="false">
-			<AppenderRef ref="file" />
-			<AppenderRef ref="console" />
-    	</Logger>
-	</Loggers>
-
-</Configuration>
\ No newline at end of file
diff --git a/integration-tests/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker b/integration-tests/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
deleted file mode 100644
index ca6ee9c..0000000
--- a/integration-tests/src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker
+++ /dev/null
@@ -1 +0,0 @@
-mock-maker-inline
\ No newline at end of file
diff --git a/pom.xml b/pom.xml
index 853ef15..46b9616 100644
--- a/pom.xml
+++ b/pom.xml
@@ -74,7 +74,7 @@
         <com.google.inject.version>4.2.0</com.google.inject.version>
         <dropwizard-template-config.version>1.4.0</dropwizard-template-config.version>
         <com.aegisql.conveyor.version>1.1.7</com.aegisql.conveyor.version>
-        <org.mongodb.version>3.3.0</org.mongodb.version>
+        <org.mongodb.version>3.8.2</org.mongodb.version>
         <junit.version>4.12</junit.version>
         <org.mockito.version>1.10.19</org.mockito.version>
         <java.version>1.8</java.version>
@@ -240,6 +240,7 @@
                         <exclude>**/*.ipynb</exclude>
                         <exclude>**/*.iml</exclude>
                         <exclude>**/*.json</exclude>
+                        <exclude>**/*.json.tpl</exclude>
                         <exclude>**/*.r</exclude>
                         <exclude>**/__init__.py</exclude>
                         <exclude>**/*.conf</exclude>
diff --git a/services/billing-aws/billing.yml b/services/billing-aws/billing.yml
index ee70487..3b1943f 100644
--- a/services/billing-aws/billing.yml
+++ b/services/billing-aws/billing.yml
@@ -26,43 +26,40 @@
 
 billingEnabled: true
 
-host: localhost
+host: MONGO_HOST
 port: 27017
 username: admin
-password: <MONGODB_PASSWORD>
+password: MONGO_PASSWORD
 database: dlabdb
 
-scheduler:
-# Schedule is comma separated values of time in format hh[:mm[:ss]]. hh - in the 24-hour clock, at 8:15PM is 20:15.
-  schedule: 0:00, 3:00, 6:00, 9:00, 12:00, 15:00, 18:00, 21:00
-
 # Adapter for reading source data. Known types: file, s3file
 adapterIn:
   - type: s3file
-    bucket: <BILLING_BUCKET_NAME>
-    path: <REPORT_PATH>
-    awsJobEnabled: <AWS_JOB_ENABLED>
-    accountId: <ACCOUNT_ID>
-    accessKeyId: <ACCESS_KEY_ID>
-    secretAccessKey: <SECRET_ACCESS_KEY>
+    bucket: BILLING_BUCKET_NAME
+    path: REPORT_PATH
+    awsJobEnabled: AWS_JOB_ENABLED
+    accountId: ACCOUNT_ID
+    accessKeyId: ACCESS_KEY_ID
+    secretAccessKey: SECRET_ACCESS_KEY
 
 # Adapter for writing converted data. Known types: console, file, s3file, mongodb
 adapterOut:
   - type: mongodlab
-    host: localhost
+    host: MONGO_HOST
     port: 27017
     username: admin
-    password: <MONGODB_PASSWORD>
+    password: MONGO_PASSWORD
     database: dlabdb
 #    bufferSize: 10000
     upsert: true
+    serviceBaseName: SERVICE_BASE_NAME
 
 # Filter for source and converted data.
 filter:
   - type: aws
     currencyCode: USD
-    columnDlabTag: <CONF_BILLING_TAG>
-    serviceBaseName: <CONF_SERVICE_BASE_NAME>
+    columnDlabTag: CONF_BILLING_TAG
+    serviceBaseName: SERVICE_BASE_NAME
 
 
 # Parser of source data to common format.
@@ -71,9 +68,9 @@
     headerLineNo: 1
     skipLines: 1
     columnMapping: >-
-      dlab_id=<DLAB_ID>;usage_date=<USAGE_DATE>;product=<PRODUCT>;
-      usage_type=<USAGE_TYPE>;usage=<USAGE>;cost=<COST>;
-      resource_id=<RESOURCE_ID>;tags=<TAGS>
+      dlab_id=DLAB_ID;usage_date=USAGE_DATE;product=PRODUCT;
+      usage_type=USAGE_TYPE;usage=USAGE;cost=COST;
+      resource_id=RESOURCE_ID;tags=TAGS
     aggregate: day
 
 
@@ -94,4 +91,4 @@
       currentLogFilename: /var/opt/dlab/log/ssn/billing.log
       archive: true
       archivedLogFilenamePattern: /var/opt/dlab/log/ssn/billing-%d{yyyy-MM-dd}.log.gz
-      archivedFileCount: 10
+      archivedFileCount: 10
\ No newline at end of file
diff --git a/services/billing-aws/pom.xml b/services/billing-aws/pom.xml
index a411c20..ec4c830 100644
--- a/services/billing-aws/pom.xml
+++ b/services/billing-aws/pom.xml
@@ -28,7 +28,7 @@
         <version>1.0</version>
         <relativePath>../../pom.xml</relativePath>
     </parent>
-    
+
     <artifactId>billing-aws</artifactId>
 
     <properties>
@@ -37,6 +37,25 @@
         <org.freemarker.version>2.3.22</org.freemarker.version>
     </properties>
 
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-dependencies</artifactId>
+                <version>2.1.3.RELEASE</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+            <dependency>
+                <groupId>org.keycloak.bom</groupId>
+                <artifactId>keycloak-adapter-bom</artifactId>
+                <version>4.8.3.Final</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
     <dependencies>
         <dependency>
             <groupId>com.epam.dlab</groupId>
@@ -64,11 +83,6 @@
             <version>${org.freemarker.version}</version>
         </dependency>
         <dependency>
-            <groupId>com.epam.dlab</groupId>
-            <artifactId>dlab-model</artifactId>
-            <version>${project.parent.version}</version>
-        </dependency>
-        <dependency>
             <groupId>javax.validation</groupId>
             <artifactId>validation-api</artifactId>
             <version>2.0.0.Final</version>
@@ -119,80 +133,62 @@
             <artifactId>guava</artifactId>
             <version>24.1-jre</version>
         </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-configuration-processor</artifactId>
+            <optional>true</optional>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-mongodb</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-security</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.keycloak</groupId>
+            <artifactId>keycloak-spring-boot-starter</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.keycloak</groupId>
+            <artifactId>keycloak-spring-security-adapter</artifactId>
+            <version>4.8.3.Final</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+
         <dependency>
             <groupId>org.mockito</groupId>
             <artifactId>mockito-core</artifactId>
             <version>${org.mockito.version}</version>
             <scope>test</scope>
         </dependency>
-
+        <dependency>
+            <groupId>com.epam.dlab</groupId>
+            <artifactId>dlab-model</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
 
     </dependencies>
     <build>
         <plugins>
-             <plugin>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>${maven-shade-plugin.version}</version>
+            <plugin>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
                 <executions>
                     <execution>
-                        <phase>package</phase>
                         <goals>
-                            <goal>shade</goal>
+                            <goal>repackage</goal>
                         </goals>
-                        <configuration>
-                            <createDependencyReducedPom>false</createDependencyReducedPom>
-                            <minimizeJar>false</minimizeJar>
-                            <filters>
-                                <filter>
-                                    <artifact>org.hibernate:hibernate-validator</artifact>
-                                    <includes>**</includes>
-                                </filter>
-                                <filter>
-                                    <artifact>org.glassfish.web:javax.el</artifact>
-                                    <includes>**</includes>
-                                </filter>
-                                <filter>
-                                    <artifact>org.jboss.logging:jboss-logging</artifact>
-                                    <includes>**</includes>
-                                </filter>
-                                <filter>
-                                    <artifact>com.fasterxml:classmate</artifact>
-                                    <includes>**</includes>
-                                </filter>
-
-                                <filter>
-                                    <artifact>javax.validation:validation-api</artifact>
-                                    <includes>**</includes>
-                                </filter>
-                                <filter>
-                                    <artifact>commons-logging:commons-logging</artifact>
-                                    <includes>**</includes>
-                                </filter>
-                                <filter>
-                                    <artifact>com.fasterxml.jackson.core:jackson-databind</artifact>
-                                    <includes>**</includes>
-                                </filter>
-                            </filters>
-                            <transformers>
-                                <transformer
-                                    	implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
-                                <transformer
-                                    	implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-                                    <mainClass>com.epam.dlab.BillingScheduler</mainClass>
-									<manifestEntries>
-										<Created-By>&lt;EPAM&gt; Systems</Created-By>
-										<Name>com/epam/dlab</Name>
-										<Implementation-Title>DLab Billing Tool</Implementation-Title>
-										<Implementation-Version>${dlab.version}</Implementation-Version>
-										<Implementation-Vendor>&lt;EPAM&gt; Systems</Implementation-Vendor>
-										<Build-Time>${maven.build.timestamp}</Build-Time>
-										<Build-OS>${os.name}</Build-OS>
-										<GIT-Branch>${scmBranch}</GIT-Branch>
-										<GIT-Commit>${buildNumber}</GIT-Commit>
-									</manifestEntries>
-                                </transformer>
-                            </transformers>
-                        </configuration>
                     </execution>
                 </executions>
             </plugin>
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/BillingAwsApplication.java b/services/billing-aws/src/main/java/com/epam/dlab/BillingAwsApplication.java
new file mode 100644
index 0000000..c878370
--- /dev/null
+++ b/services/billing-aws/src/main/java/com/epam/dlab/BillingAwsApplication.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab;
+
+import com.epam.dlab.exceptions.InitializationException;
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
+
+@SpringBootApplication
+@EnableMongoRepositories
+@EnableConfigurationProperties
+public class BillingAwsApplication {
+
+    public static void main(String[] args) throws InitializationException {
+        SpringApplication.run(BillingAwsApplication.class, args);
+        BillingServiceImpl.startApplication(args);
+    }
+}
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/BillingScheduler.java b/services/billing-aws/src/main/java/com/epam/dlab/BillingScheduler.java
deleted file mode 100644
index 5db8269..0000000
--- a/services/billing-aws/src/main/java/com/epam/dlab/BillingScheduler.java
+++ /dev/null
@@ -1,267 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab;
-
-import com.epam.dlab.configuration.BillingToolConfiguration;
-import com.epam.dlab.configuration.BillingToolConfigurationFactory;
-import com.epam.dlab.configuration.SchedulerConfiguration;
-import com.epam.dlab.core.parser.ParserBase;
-import com.epam.dlab.exceptions.AdapterException;
-import com.epam.dlab.exceptions.DlabException;
-import com.epam.dlab.exceptions.InitializationException;
-import com.epam.dlab.exceptions.ParseException;
-import com.epam.dlab.util.ServiceUtils;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.Arrays;
-
-/**
- * Billing scheduler for loading billing report.
- */
-public class BillingScheduler implements Runnable {
-	private static final Logger LOGGER = LoggerFactory.getLogger(BillingScheduler.class);
-
-	/**
-	 * Timeout for check the schedule in milliseconds.
-	 */
-	private static final long CHECK_TIMEOUT_MILLIS = 60000;
-
-	/**
-	 * Billing scheduler instance.
-	 */
-	private static BillingScheduler scheduler;
-	private final boolean enabled;
-	private final BillingToolConfiguration configuration;
-
-	/**
-	 * Starts the scheduler for given configuration.
-	 *
-	 * @param filename the name of file for billing configuration.
-	 * @throws InitializationException
-	 */
-	public static void start(String filename) throws InitializationException {
-		if (scheduler == null) {
-			scheduler = new BillingScheduler(filename);
-			scheduler.thread.start();
-		} else {
-			LOGGER.debug("Billing scheduler already started");
-		}
-	}
-
-	/**
-	 * Stops the scheduler.
-	 */
-	public static void stop() {
-		if (scheduler.thread != null) {
-			LOGGER.debug("Billing scheduler will be stopped ...");
-			synchronized (scheduler.thread) {
-				scheduler.thread.interrupt();
-				scheduler.thread = null;
-			}
-			LOGGER.info("Scheduler has been stopped");
-		}
-	}
-
-
-	/**
-	 * Thread of the scheduler.
-	 */
-	private Thread thread = new Thread(this, this.getClass().getSimpleName());
-
-	/**
-	 * Name of configuration file.
-	 */
-	private final String confFilename;
-
-	/**
-	 * Current schedule.
-	 */
-	private SchedulerConfiguration schedule;
-
-	/**
-	 * Instantiate billing scheduler for given configuration.
-	 *
-	 * @param filename the name of file for billing configuration.
-	 * @throws InitializationException
-	 */
-	public BillingScheduler(String filename) throws InitializationException {
-		this.confFilename = filename;
-		LOGGER.debug("Billing report configuration file: {}", filename);
-		configuration = BillingToolConfigurationFactory.build(confFilename, BillingToolConfiguration.class);
-		this.enabled = configuration.isBillingEnabled();
-		setSchedule(configuration);
-	}
-
-	/**
-	 * Loads the billing report.
-	 *
-	 * @throws InitializationException
-	 * @throws AdapterException
-	 * @throws ParseException
-	 */
-	private void load() throws InitializationException, AdapterException, ParseException {
-		ParserBase parser = configuration.build();
-		long time = schedule.getNearTime().getTimeInMillis();
-		if (setSchedule(configuration)) {
-			if (time != schedule.getNearTime().getTimeInMillis()) {
-				LOGGER.info("Previous billing schedule has been canceled");
-				return;
-			}
-		}
-
-		LOGGER.info("Try to laod billing report for configuration: {}", configuration);
-		parser.parse();
-		if (!parser.getStatistics().isEmpty()) {
-			LOGGER.info("Billing report parser statistics:");
-			for (int i = 0; i < parser.getStatistics().size(); i++) {
-				LOGGER.info("  {}", parser.getStatistics().get(i).toString());
-			}
-		}
-	}
-
-	/**
-	 * Read the schedule from configuration.
-	 *
-	 * @param configuration the billing configuration.
-	 * @return <b>true>/b> if new schedule was loaded, otherwise <b>false</b>.
-	 * @throws InitializationException
-	 */
-	private boolean setSchedule(BillingToolConfiguration configuration) throws InitializationException {
-		SchedulerConfiguration schedulerConfiguration = configuration.getScheduler();
-		boolean isModified = false;
-		if (schedulerConfiguration == null) {
-			throw new InitializationException(String.format("Schedule of billing report in configuration file \"%s " +
-					"not found", confFilename));
-		}
-		if (this.schedule == null) {
-			isModified = true;
-			LOGGER.debug("Billing report schedule: {}", schedulerConfiguration);
-		} else {
-			this.schedule.adjustStartTime();
-			if (!schedulerConfiguration.equals(this.schedule)) {
-				isModified = true;
-				LOGGER.debug("New billing report schedule has been loaded: {}", schedulerConfiguration);
-			}
-		}
-
-		try {
-			this.schedule = new SchedulerConfiguration();
-			this.schedule.setSchedule(schedulerConfiguration.getSchedule());
-			this.schedule.build();
-		} catch (Exception e) {
-			throw new InitializationException("Cannot configure billing scheduler. " + e.getLocalizedMessage(), e);
-		}
-
-		return isModified;
-	}
-
-	@Override
-	public void run() {
-		if (enabled) {
-			LOGGER.info("Billing scheduler has been started");
-			long startTimeMillis = schedule.getNextTime().getTimeInMillis();
-			long timeMillis;
-			LOGGER.info("Billing report will be loaded at {}", schedule.getNextTime().getTime());
-
-			try {
-				while (!Thread.currentThread().isInterrupted()) {
-					if (startTimeMillis <= System.currentTimeMillis()) {
-						try {
-							LOGGER.debug("Try to load billing report for schedule {}",
-									schedule.getNextTime().getTime());
-							load();
-						} catch (InitializationException | AdapterException | ParseException e) {
-							LOGGER.error("Error loading billing report: {}", e.getLocalizedMessage(), e);
-						}
-						startTimeMillis = schedule.getNextTime().getTimeInMillis();
-						LOGGER.info("Billing report will be loaded at {}", schedule.getNextTime().getTime());
-					} else {
-						schedule.adjustStartTime();
-						timeMillis = schedule.getNextTime().getTimeInMillis();
-						if (startTimeMillis != timeMillis) {
-							LOGGER.info("Billing report will be loaded at {}", schedule.getNextTime().getTime());
-							startTimeMillis = timeMillis;
-						}
-					}
-
-					try {
-						timeMillis = startTimeMillis - System.currentTimeMillis();
-						if (timeMillis > 0) {
-							timeMillis = Math.min(CHECK_TIMEOUT_MILLIS, timeMillis);
-							Thread.sleep(timeMillis);
-						}
-					} catch (InterruptedException e) {
-						LOGGER.warn("Billing scheduler interrupted", e);
-						Thread.currentThread().interrupt();
-					}
-				}
-			} catch (Exception e) {
-				LOGGER.error("Unhandled billing report error: {}", e.getLocalizedMessage(), e);
-			}
-			LOGGER.info("Scheduler has been stopped");
-		} else {
-			LOGGER.info("Billing scheduler is disabled");
-		}
-	}
-
-
-	/**
-	 * Runs billing scheduler for given configuration file.
-	 *
-	 * @param args the arguments of command line.
-	 * @throws InitializationException
-	 */
-	public static void main(String[] args) throws InitializationException {
-		if (ServiceUtils.printAppVersion(BillingTool.class, args)) {
-			return;
-		}
-
-		String confName = null;
-		for (int i = 0; i < args.length; i++) {
-			if (BillingTool.isKey("help", args[i])) {
-				i++;
-				Help.usage(i < args.length ? Arrays.copyOfRange(args, i, args.length) : null);
-				return;
-			} else if (BillingTool.isKey("conf", args[i])) {
-				i++;
-				if (i < args.length) {
-					confName = args[i];
-				} else {
-					throw new InitializationException("Missing the name of configuration file");
-				}
-			} else {
-				throw new InitializationException("Unknow argument: " + args[i]);
-			}
-		}
-
-		if (confName == null) {
-			Help.usage();
-			throw new InitializationException("Missing arguments");
-		}
-
-		BillingTool.setLoggerLevel();
-		try {
-			start(confName);
-		} catch (Exception e) {
-			throw new DlabException("Billing scheduler failed", e);
-		}
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java b/services/billing-aws/src/main/java/com/epam/dlab/BillingService.java
similarity index 82%
rename from integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java
rename to services/billing-aws/src/main/java/com/epam/dlab/BillingService.java
index 1e49a60..9b4d6db 100644
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/BillingService.java
@@ -17,7 +17,12 @@
  * under the License.
  */
 
-package com.epam.dlab.automation.docker;
+package com.epam.dlab;
 
-class Labels {
+import com.epam.dlab.dto.billing.BillingData;
+
+import java.util.List;
+
+public interface BillingService {
+    List<BillingData> getBillingData();
 }
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/BillingServiceImpl.java b/services/billing-aws/src/main/java/com/epam/dlab/BillingServiceImpl.java
new file mode 100644
index 0000000..8ac6c48
--- /dev/null
+++ b/services/billing-aws/src/main/java/com/epam/dlab/BillingServiceImpl.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab;
+
+import com.epam.dlab.configuration.BillingToolConfiguration;
+import com.epam.dlab.configuration.BillingToolConfigurationFactory;
+import com.epam.dlab.core.parser.ParserBase;
+import com.epam.dlab.dto.billing.BillingData;
+import com.epam.dlab.exceptions.DlabException;
+import com.epam.dlab.exceptions.InitializationException;
+import com.epam.dlab.util.ServiceUtils;
+import org.bson.Document;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+import org.springframework.stereotype.Service;
+
+import java.time.LocalDate;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+import static com.epam.dlab.model.aws.ReportLine.FIELD_COST;
+import static com.epam.dlab.model.aws.ReportLine.FIELD_CURRENCY_CODE;
+import static com.epam.dlab.model.aws.ReportLine.FIELD_DLAB_ID;
+import static com.epam.dlab.model.aws.ReportLine.FIELD_PRODUCT;
+import static com.epam.dlab.model.aws.ReportLine.FIELD_RESOURCE_TYPE;
+import static com.epam.dlab.model.aws.ReportLine.FIELD_USAGE_DATE;
+
+@Service
+public class BillingServiceImpl implements BillingService {
+	private static final Logger LOGGER = LoggerFactory.getLogger(BillingServiceImpl.class);
+	private static BillingToolConfiguration configuration;
+
+	public List<BillingData> getBillingData() {
+		try {
+			ParserBase parser = configuration.build();
+
+			LOGGER.info("Try to load billing report for configuration: {}", configuration);
+			List<BillingData> billingData = parser.parse()
+					.stream()
+					.map(this::toBillingData)
+					.collect(Collectors.toList());
+
+			if (!parser.getStatistics().isEmpty()) {
+				LOGGER.info("Billing report parser statistics:");
+				for (int i = 0; i < parser.getStatistics().size(); i++) {
+					LOGGER.info("  {}", parser.getStatistics().get(i).toString());
+				}
+			}
+
+			return billingData;
+		} catch (Exception e) {
+			LOGGER.error("Something went wrong ", e);
+			return Collections.emptyList();
+		}
+	}
+
+	private BillingData toBillingData(Document billingData) {
+		return BillingData.builder()
+				.tag(billingData.getString(FIELD_DLAB_ID).toLowerCase())
+				.usageDateFrom(Optional.ofNullable(billingData.getString(FIELD_USAGE_DATE)).map(LocalDate::parse).orElse(null))
+				.usageDateTo(Optional.ofNullable(billingData.getString(FIELD_USAGE_DATE)).map(LocalDate::parse).orElse(null))
+				.usageDate(billingData.getString(FIELD_USAGE_DATE))
+				.product(billingData.getString(FIELD_PRODUCT))
+				.usageType(billingData.getString(FIELD_RESOURCE_TYPE))
+				.cost(billingData.getDouble(FIELD_COST))
+				.currency(billingData.getString(FIELD_CURRENCY_CODE))
+				.build();
+	}
+
+	public static void initialize(String filename) throws InitializationException {
+		LOGGER.debug("Billing report configuration file: {}", filename);
+		configuration = BillingToolConfigurationFactory.build(filename, BillingToolConfiguration.class);
+	}
+
+	public static void startApplication(String[] args) throws InitializationException {
+		if (ServiceUtils.printAppVersion(BillingTool.class, args)) {
+			return;
+		}
+
+		String confName = null;
+		for (int i = 0; i < args.length; i++) {
+			if (BillingTool.isKey("help", args[i])) {
+				i++;
+				Help.usage(i < args.length ? Arrays.copyOfRange(args, i, args.length) : null);
+				return;
+			} else if (BillingTool.isKey("conf", args[i])) {
+				i++;
+				if (i < args.length) {
+					confName = args[i];
+				} else {
+					throw new InitializationException("Missing the name of configuration file");
+				}
+			}
+		}
+
+		if (confName == null) {
+			Help.usage();
+			throw new InitializationException("Missing arguments");
+		}
+
+		BillingTool.setLoggerLevel();
+		try {
+			initialize(confName);
+		} catch (Exception e) {
+			throw new DlabException("Billing scheduler failed", e);
+		}
+	}
+}
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/BillingTool.java b/services/billing-aws/src/main/java/com/epam/dlab/BillingTool.java
index cf2b8d6..cde9d4e 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/BillingTool.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/BillingTool.java
@@ -19,24 +19,22 @@
 
 package com.epam.dlab;
 
-import java.util.Arrays;
-
-import com.epam.dlab.exceptions.DlabException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
+import ch.qos.logback.classic.Level;
+import ch.qos.logback.classic.LoggerContext;
 import com.epam.dlab.configuration.BillingToolConfiguration;
 import com.epam.dlab.configuration.BillingToolConfigurationFactory;
 import com.epam.dlab.core.parser.ParserBase;
 import com.epam.dlab.exceptions.AdapterException;
+import com.epam.dlab.exceptions.DlabException;
 import com.epam.dlab.exceptions.InitializationException;
 import com.epam.dlab.exceptions.ParseException;
 import com.epam.dlab.util.ServiceUtils;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
-import ch.qos.logback.classic.Level;
-import ch.qos.logback.classic.LoggerContext;
+import java.util.Arrays;
 
 /** Provides billing parser features.
  */
@@ -110,14 +108,14 @@
 	 * @throws InitializationException
 	 */
 	public static void main(String[] args) throws InitializationException {
-		if (ServiceUtils.printAppVersion(BillingScheduler.class, args)) {
+		if (ServiceUtils.printAppVersion(BillingServiceImpl.class, args)) {
 			return;
 		}
 
 		String confName = null;
 		String json = null;
-		
-		for(int i = 0; i < args.length; i++) {
+
+		for (int i = 0; i < args.length; i++) {
 			if (isKey("help", args[i])) {
 				i++;
 				Help.usage(i < args.length ? Arrays.copyOfRange(args, i, args.length) : null);
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/Help.java b/services/billing-aws/src/main/java/com/epam/dlab/Help.java
index 2a043c2..c2fe5c2 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/Help.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/Help.java
@@ -19,18 +19,17 @@
 
 package com.epam.dlab;
 
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.commons.lang3.StringUtils;
-
 import com.epam.dlab.core.BillingUtils;
 import com.epam.dlab.core.ModuleType;
 import com.epam.dlab.exceptions.InitializationException;
 import com.fasterxml.jackson.annotation.JsonClassDescription;
 import com.fasterxml.jackson.annotation.JsonTypeName;
+import org.apache.commons.lang3.StringUtils;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 
 /** Print help for billing tool.
  */
@@ -47,12 +46,12 @@
 	private static void printHelp(String resourceName, Map<String, String> substitute) throws InitializationException {
 		List<String> list = BillingUtils.getResourceAsList("/" + Help.class.getName() + "." + resourceName + ".txt");
 		String help = StringUtils.join(list, System.lineSeparator());
-		
+
 		if (substitute == null) {
 			substitute = new HashMap<>();
 		}
-		substitute.put("classname", BillingScheduler.class.getName());
-		
+		substitute.put("classname", BillingServiceImpl.class.getName());
+
 		for (String key : substitute.keySet()) {
 			help = StringUtils.replace(help, "${" + key.toUpperCase() + "}", substitute.get(key));
 		}
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/conf/SecurityConfig.java b/services/billing-aws/src/main/java/com/epam/dlab/conf/SecurityConfig.java
new file mode 100644
index 0000000..dba4086
--- /dev/null
+++ b/services/billing-aws/src/main/java/com/epam/dlab/conf/SecurityConfig.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.conf;
+
+import org.keycloak.adapters.KeycloakConfigResolver;
+import org.keycloak.adapters.springboot.KeycloakSpringBootConfigResolver;
+import org.keycloak.adapters.springsecurity.KeycloakConfiguration;
+import org.keycloak.adapters.springsecurity.authentication.KeycloakAuthenticationProvider;
+import org.keycloak.adapters.springsecurity.config.KeycloakWebSecurityConfigurerAdapter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.core.authority.mapping.SimpleAuthorityMapper;
+import org.springframework.security.core.session.SessionRegistryImpl;
+import org.springframework.security.web.authentication.session.RegisterSessionAuthenticationStrategy;
+import org.springframework.security.web.authentication.session.SessionAuthenticationStrategy;
+
+@KeycloakConfiguration
+class SecurityConfig extends KeycloakWebSecurityConfigurerAdapter {
+
+    @Autowired
+    public void configureGlobal(AuthenticationManagerBuilder auth) {
+        KeycloakAuthenticationProvider keycloakAuthenticationProvider = keycloakAuthenticationProvider();
+        keycloakAuthenticationProvider.setGrantedAuthoritiesMapper(new SimpleAuthorityMapper());
+        auth.authenticationProvider(keycloakAuthenticationProvider);
+    }
+
+    @Bean
+    public KeycloakConfigResolver KeycloakConfigResolver() {
+        return new KeycloakSpringBootConfigResolver();
+    }
+
+    @Bean
+    @Override
+    protected SessionAuthenticationStrategy sessionAuthenticationStrategy() {
+        return new RegisterSessionAuthenticationStrategy(new SessionRegistryImpl());
+    }
+
+    @Override
+    protected void configure(HttpSecurity http) throws Exception {
+        super.configure(http);
+        http
+                .anonymous().disable()
+                .authorizeRequests()
+                .anyRequest()
+                .authenticated();
+    }
+}
\ No newline at end of file
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/configuration/BillingToolConfiguration.java b/services/billing-aws/src/main/java/com/epam/dlab/configuration/BillingToolConfiguration.java
index 803d232..420b9e0 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/configuration/BillingToolConfiguration.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/configuration/BillingToolConfiguration.java
@@ -77,13 +77,6 @@
 	private boolean billingEnabled;
 
 	/**
-	 * Working data file name of modules.
-	 */
-	@Valid
-	@JsonProperty
-	private SchedulerConfiguration scheduler = null;
-
-	/**
 	 * Adapter for reading source data.
 	 */
 	@Valid
@@ -136,20 +129,6 @@
 	}
 
 	/**
-	 * Set the scheduler.
-	 */
-	public void setScheduler(SchedulerConfiguration scheduler) {
-		this.scheduler = scheduler;
-	}
-
-	/**
-	 * Return the scheduler.
-	 */
-	public SchedulerConfiguration getScheduler() {
-		return scheduler;
-	}
-
-	/**
 	 * Set the adapter for reading source data.
 	 */
 	public void setAdapterIn(ImmutableList<AdapterBase> adapter) {
@@ -272,14 +251,6 @@
 			f.setModuleData(moduleData);
 		}
 
-		if (scheduler != null) {
-			try {
-				scheduler.build();
-			} catch (Exception e) {
-				throw new InitializationException("Cannot configure billing scheduler. " + e.getLocalizedMessage(), e);
-			}
-		}
-
 		return parser.build(in, out, f);
 	}
 
@@ -295,7 +266,6 @@
 	public ToStringHelper toStringHelper(Object self) {
 		return MoreObjects.toStringHelper(self)
 				.add("moduleData", moduleData)
-				.add("scheduler", scheduler)
 				.add("adapterIn", adapterIn)
 				.add("adapterOut", adapterOut)
 				.add("filter", filter)
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/configuration/SchedulerConfiguration.java b/services/billing-aws/src/main/java/com/epam/dlab/configuration/SchedulerConfiguration.java
deleted file mode 100644
index b0624d6..0000000
--- a/services/billing-aws/src/main/java/com/epam/dlab/configuration/SchedulerConfiguration.java
+++ /dev/null
@@ -1,213 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.configuration;
-
-import java.text.SimpleDateFormat;
-import java.util.Calendar;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.commons.lang3.StringUtils;
-
-import com.epam.dlab.exceptions.ParseException;
-import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
-import com.google.common.base.MoreObjects.ToStringHelper;
-
-/** Provides schedule time configuration.
- */
-public class SchedulerConfiguration {
-	
-	/** User's schedule. */
-	@JsonProperty
-	private String schedule = "12, 13:30:23, 18:34, 08:50, 7:80";
-	
-	
-	/** Return the schedule of user.
-	 */
-	public String getSchedule() {
-		return schedule;
-	}
-	
-	/** Set the schedule of user.
-	 */
-	public void setSchedule(String schedule) {
-		this.schedule = schedule;
-	}
-	
-	
-	/** Schedule. */
-	private Map<String, Calendar> realSchedule = new TreeMap<>();
-	
-	/** Build the schedule from user' schedule.
-	 * @throws ParseException
-	 */
-	public void build() throws ParseException {
-		SimpleDateFormat df = new SimpleDateFormat("HH:mm:ss");
-		String [] unitArray = schedule.split(",");
-		realSchedule.clear();
-		for (int i = 0; i < unitArray.length; i++) {
-			Calendar date = Calendar.getInstance();
-			int [] time = getTime(unitArray[i]);
-			try {
-				df.parse(StringUtils.join(time, ':'));
-			} catch (Exception e) {
-				throw new ParseException("Cannot parse date " + unitArray[i] + ". " + e.getLocalizedMessage(), e);
-			}
-			date.clear();
-			date.set(1, 1, 1, time[0], time[1], time[2]);
-			realSchedule.put(df.format(date.getTime()), date);
-		}
-		adjustStartTime();
-	}
-	
-	/** Return the schedule.
-	 */
-	public Map<String, Calendar> getRealSchedule() {
-		return realSchedule;
-	}
-	
-	/** Return time array of user' schedule time.
-	 * @param time the time in format HH:mm:ss.
-	 * @throws ParseException
-	 */
-	private int [] getTime(String time) throws ParseException {
-		String [] timeString = time.trim().split(":");
-		int [] timeInt = new int[3];
-		
-		for (int i = 0; i < timeInt.length; i++) {
-			if (i < timeString.length) {
-				try {
-					timeInt[i] = Integer.parseInt(timeString[i]);
-				} catch (Exception e) {
-					throw new ParseException("Cannot parse date " + time + ". " + e.getLocalizedMessage(), e);
-				}
-			} else {
-				timeInt[i] = 0;
-			}
-		}
-		
-		return timeInt;
-	}
-
-	/** Adjust the time in schedule for current time.
-	 */
-	public void adjustStartTime() {
-		Calendar now = Calendar.getInstance();
-		for(String key : realSchedule.keySet()) {
-			Calendar time = realSchedule.get(key);
-			if (time.before(now)) {
-				time.set(now.get(Calendar.YEAR),
-						now.get(Calendar.MONTH),
-						now.get(Calendar.DAY_OF_MONTH),
-						time.get(Calendar.HOUR_OF_DAY),
-						time.get(Calendar.MINUTE),
-						time.get(Calendar.SECOND));
-				if (time.before(now)) {
-					time.add(Calendar.DAY_OF_MONTH, 1);
-				}
-				realSchedule.put(key, time);
-			}
-		}
-	}
-	
-	/** Return the key of the next start time from the schedule.
-	 */
-	public String getNextTimeKey() {
-		long now = System.currentTimeMillis();
-		String nextKey = null;
-		long nextTime = -1;
-		
-		for(String key : realSchedule.keySet()) {
-			long time = realSchedule.get(key).getTimeInMillis();
-			if ((time >= now && time < nextTime) || nextTime == -1) {
-				nextTime = time;
-				nextKey = key;
-			}
-		}
-		return nextKey;
-	}
-	
-	/** Return the next start time from the schedule.
-	 */
-	public Calendar getNextTime() {
-		String key = getNextTimeKey();
-		return (key == null ? null : realSchedule.get(key));
-	}
-	
-	/** Return the key of the near start time from the schedule to the current time.
-	 */
-	public String getNearTimeKey() {
-		long now = System.currentTimeMillis();
-		String nextKey = null;
-		long nextTime = -1;
-		
-		for(String key : realSchedule.keySet()) {
-			long time = Math.abs(now - realSchedule.get(key).getTimeInMillis());
-			if (time < nextTime || nextTime == -1) {
-				nextTime = time;
-				nextKey = key;
-			}
-		}
-		return nextKey;
-	}
-	
-	/** Return the near start time from the schedule to the current time.
-	 */
-	public Calendar getNearTime() {
-		String key = getNearTimeKey();
-		return (key == null ? null : realSchedule.get(key));
-	}
-	
-	/** Returns a string representation of the object.
-	 * @param self the object to generate the string for (typically this), used only for its class name.
-	 */
-	public ToStringHelper toStringHelper(Object self) {
-		SimpleDateFormat df = new SimpleDateFormat("dd.MM.yyyy HH:mm:ss");
-		ToStringHelper helper = MoreObjects.toStringHelper(self);
-		for(String key : realSchedule.keySet()) {
-			Calendar time = realSchedule.get(key);
-			helper.add(key, df.format(time.getTime()));
-		}
-    	return helper;
-    }
-    
-    @Override
-    public String toString() {
-    	return toStringHelper(this)
-    			.toString();
-    }
-
-	@Override
-	public boolean equals(Object o) {
-		if (this == o) return true;
-		if (!(o instanceof SchedulerConfiguration)) return false;
-
-		SchedulerConfiguration that = (SchedulerConfiguration) o;
-
-		return getRealSchedule() != null ? getRealSchedule().keySet().equals(that.getRealSchedule().keySet())
-				: that.getRealSchedule() == null;
-	}
-
-	@Override
-	public int hashCode() {
-		return getRealSchedule() != null ? getRealSchedule().keySet().hashCode() : 0;
-	}
-}
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/controller/BillingController.java b/services/billing-aws/src/main/java/com/epam/dlab/controller/BillingController.java
new file mode 100644
index 0000000..deabf44
--- /dev/null
+++ b/services/billing-aws/src/main/java/com/epam/dlab/controller/BillingController.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.controller;
+
+import com.epam.dlab.BillingService;
+import com.epam.dlab.dto.billing.BillingData;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.util.List;
+
+@RestController
+public class BillingController {
+
+    private final BillingService billingService;
+
+    public BillingController(BillingService billingService) {
+        this.billingService = billingService;
+    }
+
+    @GetMapping
+    public ResponseEntity<List<BillingData>> getBilling() {
+        return new ResponseEntity<>(billingService.getBillingData(), HttpStatus.OK);
+    }
+}
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/core/AdapterBase.java b/services/billing-aws/src/main/java/com/epam/dlab/core/AdapterBase.java
index 1569530..475404d 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/core/AdapterBase.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/core/AdapterBase.java
@@ -24,6 +24,7 @@
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.MoreObjects.ToStringHelper;
+import org.bson.Document;
 
 import java.util.List;
 
@@ -157,9 +158,10 @@
 	 * Write the row of data to adapter.
 	 *
 	 * @param row the row of common format.
+	 * @return
 	 * @throws AdapterException
 	 */
-	public abstract void writeRow(ReportLine row) throws AdapterException;
+	public abstract Document writeRow(ReportLine row) throws AdapterException;
 
 
 	@Override
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserBase.java b/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserBase.java
index f9f0eaa..bfd86bc 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserBase.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserBase.java
@@ -19,13 +19,6 @@
 
 package com.epam.dlab.core.parser;
 
-import java.util.ArrayList;
-import java.util.List;
-
-import javax.validation.constraints.NotNull;
-
-import org.apache.commons.lang3.StringUtils;
-
 import com.epam.dlab.core.AdapterBase;
 import com.epam.dlab.core.FilterBase;
 import com.epam.dlab.core.ModuleBase;
@@ -37,6 +30,12 @@
 import com.fasterxml.jackson.annotation.JsonIgnore;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.MoreObjects.ToStringHelper;
+import org.apache.commons.lang3.StringUtils;
+import org.bson.Document;
+
+import javax.validation.constraints.NotNull;
+import java.util.ArrayList;
+import java.util.List;
 
 /** Abstract module of parser.<br>
  * See description of {@link ModuleBase} how to create your own parser.
@@ -234,13 +233,16 @@
 	 * @throws InitializationException
 	 */
 	public abstract void initialize()  throws InitializationException;
-	
-	/** Parse the source data to common format and write it to output adapter.
+
+	/**
+	 * Parse the source data to common format and write it to output adapter.
+	 *
+	 * @return
 	 * @throws InitializationException
 	 * @throws AdapterException
 	 * @throws ParseException
 	 */
-	public abstract void parse() throws InitializationException, AdapterException, ParseException;
+	public abstract List<Document> parse() throws InitializationException, AdapterException, ParseException;
 	
 	/** Build parser from given modules.
 	 * @param adapterIn the adapter for reading source data.
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserByLine.java b/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserByLine.java
index 37f2070..d878cb9 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserByLine.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/core/parser/ParserByLine.java
@@ -27,10 +27,12 @@
 import com.epam.dlab.exceptions.ParseException;
 import com.epam.dlab.model.aws.ReportLine;
 import com.fasterxml.jackson.annotation.JsonIgnore;
+import org.bson.Document;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.util.ArrayList;
 import java.util.List;
 
 /**
@@ -156,11 +158,13 @@
 	/**
 	 * Parse the source data to common format and write it to output adapter.
 	 *
+	 * @return list of billing data
 	 * @throws InitializationException
 	 * @throws AdapterException
 	 * @throws ParseException
 	 */
-	public void parse() throws InitializationException, AdapterException, ParseException {
+	public List<Document> parse() throws InitializationException, AdapterException, ParseException {
+		List<Document> billingData = new ArrayList<>();
 		try {
 			if (init()) {
 				String line;
@@ -211,14 +215,14 @@
 						if (getAggregate() != AggregateGranularity.NONE) {
 							getAggregator().append(reportLine);
 						} else {
-							getAdapterOut().writeRow(reportLine);
+							billingData.add(getAdapterOut().writeRow(reportLine));
 							getCurrentStatistics().incrRowWritten();
 						}
 					}
 
 					if (getAggregate() != AggregateGranularity.NONE) {
 						for (int i = 0; i < getAggregator().size(); i++) {
-							getAdapterOut().writeRow(getAggregator().get(i));
+							billingData.add(getAdapterOut().writeRow(getAggregator().get(i)));
 							getCurrentStatistics().incrRowWritten();
 						}
 					}
@@ -255,5 +259,6 @@
 		if (getCurrentStatistics() != null) {
 			getCurrentStatistics().stop();
 		}
+		return billingData;
 	}
 }
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterConsole.java b/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterConsole.java
index 59c866d..3bffa79 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterConsole.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterConsole.java
@@ -25,6 +25,7 @@
 import com.epam.dlab.model.aws.ReportLine;
 import com.fasterxml.jackson.annotation.JsonClassDescription;
 import com.fasterxml.jackson.annotation.JsonTypeName;
+import org.bson.Document;
 
 import java.util.List;
 
@@ -84,7 +85,8 @@
 	}
 
 	@Override
-	public void writeRow(ReportLine row) throws AdapterException {
+	public Document writeRow(ReportLine row) throws AdapterException {
 		System.out.println(CommonFormat.rowToString(row));
+		return null;
 	}
 }
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterFile.java b/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterFile.java
index 7fb38f3..dd256eb 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterFile.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/module/AdapterFile.java
@@ -19,15 +19,6 @@
 
 package com.epam.dlab.module;
 
-import java.io.BufferedReader;
-import java.io.BufferedWriter;
-import java.io.FileReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.List;
-
-import javax.validation.constraints.NotNull;
-
 import com.epam.dlab.core.AdapterBase;
 import com.epam.dlab.core.parser.CommonFormat;
 import com.epam.dlab.exceptions.AdapterException;
@@ -37,6 +28,15 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.MoreObjects.ToStringHelper;
+import org.bson.Document;
+
+import javax.validation.constraints.NotNull;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.FileReader;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.util.List;
 
 /** The adapter for file system.
  */
@@ -137,15 +137,16 @@
 			throw new AdapterException("Cannot write file " + file + ". " + e.getLocalizedMessage(), e);
 		}
 	}
-	
+
 	@Override
-	public void writeRow(ReportLine row) throws AdapterException {
+	public Document writeRow(ReportLine row) throws AdapterException {
 		try {
 			writer.write(CommonFormat.rowToString(row));
 			writer.write(System.lineSeparator());
 		} catch (IOException e) {
 			throw new AdapterException("Cannot write file " + file + ". " + e.getLocalizedMessage(), e);
 		}
+		return null;
 	}
 	
 	
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/module/aws/AdapterS3File.java b/services/billing-aws/src/main/java/com/epam/dlab/module/aws/AdapterS3File.java
index 9dc7e07..0579063 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/module/aws/AdapterS3File.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/module/aws/AdapterS3File.java
@@ -33,6 +33,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.MoreObjects.ToStringHelper;
+import org.bson.Document;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -308,7 +309,7 @@
 	}
 
 	@Override
-	public void writeRow(ReportLine row) throws AdapterException {
+	public Document writeRow(ReportLine row) throws AdapterException {
 		throw new AdapterException("Unimplemented method.");
 	}
 
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/mongo/AdapterMongoDb.java b/services/billing-aws/src/main/java/com/epam/dlab/mongo/AdapterMongoDb.java
index a13ed05..db92a80 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/mongo/AdapterMongoDb.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/mongo/AdapterMongoDb.java
@@ -31,12 +31,17 @@
 import com.fasterxml.jackson.annotation.JsonTypeName;
 import com.google.common.base.MoreObjects.ToStringHelper;
 import com.mongodb.client.MongoCollection;
+import com.mongodb.client.model.UpdateOptions;
 import org.bson.Document;
 
 import java.util.ArrayList;
 import java.util.List;
 import java.util.TreeSet;
 
+import static com.epam.dlab.mongo.MongoConstants.COLLECTION_SETTINGS;
+import static com.epam.dlab.mongo.MongoConstants.FIELD_SERIVICE_BASE_NAME;
+import static com.mongodb.client.model.Filters.eq;
+
 /**
  * The adapter for file system.
  */
@@ -67,6 +72,17 @@
 	@JsonProperty
 	private boolean upsert = false;
 
+	@JsonProperty
+	private String serviceBaseName;
+
+	public String getServiceBaseName() {
+		return serviceBaseName;
+	}
+
+	public void setServiceBaseName(String serviceBaseName) {
+		this.serviceBaseName = serviceBaseName;
+	}
+
 	/**
 	 * Return the size of buffer for bulk insert.
 	 */
@@ -142,6 +158,7 @@
 				throw new AdapterException("Mode of " + getType() + " adapter may be " + Mode.WRITE + " only.");
 			}
 			connection = new MongoDbConnection(getHost(), getPort(), getDatabase(), getUsername(), getPassword());
+			setServiceBaseName();
 			collection = connection.getCollection(MongoConstants.COLLECTION_BILLING);
 			try {
 				resourceTypeDAO = new DlabResourceTypeDAO(connection);
@@ -158,6 +175,12 @@
 		}
 	}
 
+	private void setServiceBaseName() {
+		connection.getCollection(COLLECTION_SETTINGS)
+				.updateOne(eq("_id", FIELD_SERIVICE_BASE_NAME), new Document("$set", new Document("value", serviceBaseName)),
+						new UpdateOptions().upsert(true));
+	}
+
 	@Override
 	public void close() throws AdapterException {
 		if (connection != null) {
@@ -191,12 +214,12 @@
 	}
 
 	@Override
-	public void writeHeader(List<String> header) throws AdapterException {
+	public void writeHeader(List<String> header) {
 		// Nothing to do
 	}
 
 	@Override
-	public void writeRow(ReportLine row) throws AdapterException {
+	public Document writeRow(ReportLine row) throws AdapterException {
 		Document document;
 		try {
 			document = resourceTypeDAO.transform(row);
@@ -204,20 +227,21 @@
 			throw new AdapterException("Cannot transform report line. " + e.getLocalizedMessage(), e);
 		}
 
-		usageDateList.append(row.getUsageDate());
-		if (upsert) {
-			buffer.add(document);
-			if (buffer.size() >= bufferSize) {
-				connection.upsertRows(collection, buffer, usageDateList);
-			}
-		} else if (bufferSize > 0) {
-			buffer.add(document);
-			if (buffer.size() >= bufferSize) {
-				connection.insertRows(collection, buffer);
-			}
-		} else {
-			connection.insertOne(collection, document);
-		}
+//		usageDateList.append(row.getUsageDate());
+//		if (upsert) {
+//			buffer.add(document);
+//			if (buffer.size() >= bufferSize) {
+//				connection.upsertRows(collection, buffer, usageDateList);
+//			}
+//		} else if (bufferSize > 0) {
+//			buffer.add(document);
+//			if (buffer.size() >= bufferSize) {
+//				connection.insertRows(collection, buffer);
+//			}
+//		} else {
+//			connection.insertOne(collection, document);
+//		}
+		return document;
 	}
 
 	/**
@@ -235,13 +259,6 @@
 		} catch (Exception e) {
 			throw new AdapterException("Cannot update total monthly cost. " + e.getLocalizedMessage(), e);
 		}
-		try {
-			if (months.size() > 0) {
-				resourceTypeDAO.updateExploratoryCost();
-			}
-		} catch (Exception e) {
-			throw new AdapterException("Cannot update total cost of exploratory. " + e.getLocalizedMessage(), e);
-		}
 	}
 
 	@Override
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/mongo/DlabResourceTypeDAO.java b/services/billing-aws/src/main/java/com/epam/dlab/mongo/DlabResourceTypeDAO.java
index dcfe369..0c95605 100644
--- a/services/billing-aws/src/main/java/com/epam/dlab/mongo/DlabResourceTypeDAO.java
+++ b/services/billing-aws/src/main/java/com/epam/dlab/mongo/DlabResourceTypeDAO.java
@@ -19,28 +19,28 @@
 
 package com.epam.dlab.mongo;
 
-import com.epam.dlab.billing.BillingCalculationUtils;
-import com.epam.dlab.billing.DlabResourceType;
-import com.epam.dlab.dto.base.DataEngineType;
 import com.epam.dlab.exceptions.InitializationException;
 import com.epam.dlab.exceptions.ParseException;
 import com.epam.dlab.model.aws.ReportLine;
 import com.mongodb.client.AggregateIterable;
 import com.mongodb.client.MongoCollection;
-import com.mongodb.client.model.Updates;
-import org.apache.commons.lang3.StringUtils;
 import org.bson.Document;
 import org.bson.conversions.Bson;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.text.SimpleDateFormat;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
 
-import static com.mongodb.client.model.Accumulators.*;
-import static com.mongodb.client.model.Aggregates.*;
-import static com.mongodb.client.model.Filters.*;
-import static com.mongodb.client.model.Projections.*;
+import static com.mongodb.client.model.Accumulators.sum;
+import static com.mongodb.client.model.Aggregates.group;
+import static com.mongodb.client.model.Aggregates.match;
+import static com.mongodb.client.model.Filters.and;
+import static com.mongodb.client.model.Filters.eq;
+import static com.mongodb.client.model.Filters.gte;
+import static com.mongodb.client.model.Filters.lte;
 import static org.apache.commons.lang3.StringUtils.EMPTY;
 
 /**
@@ -48,8 +48,6 @@
  */
 public class DlabResourceTypeDAO implements MongoConstants {
 	private static final Logger LOGGER = LoggerFactory.getLogger(DlabResourceTypeDAO.class);
-	private static final String VOLUME_PRIMARY_SUFFIX = "-volume-primary";
-	private static final String VOLUME_SECONDARY_SUFFIX = "-volume-secondary";
 
 	/**
 	 * Mongo database connection.
@@ -63,11 +61,6 @@
 	private String serviceBaseNameId;
 
 	/**
-	 * Describe all DLab resources: SSN, EDGE, exploratory, computational and buckets.
-	 */
-	private ResourceItemList resourceList;
-
-	/**
 	 * Instantiate DAO for billing resources.
 	 *
 	 * @param connection the connection to Mongo DB.
@@ -76,7 +69,6 @@
 	public DlabResourceTypeDAO(MongoDbConnection connection) throws InitializationException {
 		this.connection = connection;
 		setServiceBaseName();
-		setResourceList();
 	}
 
 	/**
@@ -110,131 +102,6 @@
 	}
 
 	/**
-	 * Return DLab resources from Mongo DB.
-	 *
-	 * @throws InitializationException
-	 */
-	public ResourceItemList getResourceList() {
-		return resourceList;
-	}
-
-	/**
-	 * Load and return DLab resources from Mongo DB.
-	 *
-	 * @throws InitializationException
-	 */
-	private void setResourceList() {
-		resourceList = new ResourceItemList();
-
-		// Add SSN
-		String sbName = getServiceBaseName();
-		resourceList.append(sbName + "-ssn", "SSN", DlabResourceType.SSN);
-		resourceList.append(sbName + "-ssn-volume-primary", "SSN volume", DlabResourceType.VOLUME);
-		resourceList.append(sbName + "-ssn-bucket", "SSN bucket", DlabResourceType.SSN_BUCKET);
-
-		// collaboration bucket
-		resourceList.append(sbName + "-shared-bucket", "Collaboration bucket", DlabResourceType
-				.COLLABORATION_BUCKET);
-
-		// Add PROJECTS
-		Bson projection = fields(include("name", "endpoints"));
-		Iterable<Document> docs = connection.getCollection("Projects").find().projection(projection);
-		for (Document d : docs) {
-			String projectName = d.getString("name");
-			((List<Document>) d.get("endpoints"))
-					.stream()
-					.map(endpoint -> endpoint.getString("name"))
-					.forEach(endpoint -> {
-						resourceList.append(sbName + "-" + endpoint + "-shared-bucket", "Shared endpoint bucket",
-								DlabResourceType.COLLABORATION_BUCKET);
-						resourceList.append(sbName + "-" + projectName + "-" + endpoint + "-bucket", "Project bucket",
-								DlabResourceType.COLLABORATION_BUCKET, null, null, projectName);
-						resourceList.append(sbName + "-" + projectName + "-" + endpoint + "-edge", "EDGE Node",
-								DlabResourceType.EDGE, null, null, projectName);
-						resourceList.append(sbName + "-" + projectName+ "-" + endpoint + "-edge-volume-primary",
-								"EDGE Volume", DlabResourceType.VOLUME, null, null, projectName);
-					});
-		}
-
-		// Add exploratory
-		projection = fields(include(FIELD_USER,
-				FIELD_EXPLORATORY_NAME,
-				FIELD_EXPLORATORY_ID,
-				FIELD_PROJECT,
-				FIELD_COMPUTATIONAL_RESOURCES + "." + FIELD_COMPUTATIONAL_ID,
-				FIELD_COMPUTATIONAL_RESOURCES + "." + FIELD_COMPUTATIONAL_NAME,
-				FIELD_COMPUTATIONAL_RESOURCES + "." + FIELD_IMAGE,
-				FIELD_COMPUTATIONAL_RESOURCES + "." + FIELD_DATAENGINE_INSTANCE_COUNT),
-				excludeId());
-		docs = connection.getCollection(COLLECTION_USER_INSTANCES).find().projection(projection);
-		for (Document exp : docs) {
-			String username = exp.getString(FIELD_USER);
-			String exploratoryName = exp.getString(FIELD_EXPLORATORY_NAME);
-			String exploratoryId = exp.getString(FIELD_EXPLORATORY_ID);
-			String project = exp.getString(FIELD_PROJECT);
-			resourceList.append(exploratoryId, exploratoryName, DlabResourceType.EXPLORATORY, username,
-					exploratoryName, project);
-			appendExploratoryVolumes(username, exploratoryName, exploratoryId, project);
-
-			// Add computational
-			@SuppressWarnings("unchecked")
-			List<Document> compList = (List<Document>) exp.get(FIELD_COMPUTATIONAL_RESOURCES);
-			if (compList == null) {
-				continue;
-			}
-			for (Document comp : compList) {
-				String computationalId = comp.getString(FIELD_COMPUTATIONAL_ID);
-				String computationalName = comp.getString(FIELD_COMPUTATIONAL_NAME);
-				final DataEngineType dataEngineType = DataEngineType.fromDockerImageName(comp.getString(FIELD_IMAGE));
-				resourceList.append(computationalId, computationalName, DlabResourceType.COMPUTATIONAL, username,
-						exploratoryName, project);
-				if (DataEngineType.CLOUD_SERVICE == dataEngineType) {
-					appendDataengineServiceVolumes(username, exploratoryName, computationalId, computationalName,
-							project);
-				} else {
-					appendDataengineVolumes(username, exploratoryName, comp, computationalId, computationalName,
-							project);
-				}
-			}
-		}
-		LOGGER.debug("resourceList is {}", resourceList);
-	}
-
-	private void appendExploratoryVolumes(String username, String exploratoryName, String exploratoryId,
-										  String project) {
-		resourceList.append(exploratoryId + VOLUME_PRIMARY_SUFFIX, "Volume primary", DlabResourceType.VOLUME,
-				username, exploratoryName, project);
-		resourceList.append(exploratoryId + VOLUME_SECONDARY_SUFFIX, "Volume secondary", DlabResourceType.VOLUME,
-				username, exploratoryName, project);
-	}
-
-	private void appendDataengineServiceVolumes(String username, String exploratoryName, String computationalId,
-												String computationalName, String project) {
-		resourceList.append(computationalId + VOLUME_PRIMARY_SUFFIX, computationalName + " volume primary",
-				DlabResourceType.VOLUME, username, exploratoryName, project);
-		resourceList.append(computationalId + VOLUME_SECONDARY_SUFFIX, computationalName + " volume secondary",
-				DlabResourceType.VOLUME, username, exploratoryName, project);
-	}
-
-	private void appendDataengineVolumes(String username, String exploratoryName, Document comp, String
-			computationalId, String computationalName, String project) {
-		resourceList.append(computationalId + "-m-volume-primary", computationalName + " master volume primary",
-				DlabResourceType.VOLUME, username, exploratoryName, project);
-		resourceList.append(computationalId + "-m-volume-secondary", computationalName + " master volume secondary",
-				DlabResourceType.VOLUME, username, exploratoryName, project);
-		final Integer instanceCount = Integer.valueOf(comp.getString(FIELD_DATAENGINE_INSTANCE_COUNT));
-		for (int i = instanceCount - 1; i > 0; i--) {
-			final String slaveId = computationalId + "-s" + i;
-			final String slaveName = computationalName + "-s" + i;
-			resourceList.append(slaveId + VOLUME_PRIMARY_SUFFIX, slaveName + " volume primary", DlabResourceType
-					.VOLUME, username, exploratoryName, project);
-			resourceList.append(slaveId + VOLUME_SECONDARY_SUFFIX, slaveName + " volume secondary", DlabResourceType
-					.VOLUME, username, exploratoryName, project);
-		}
-	}
-
-
-	/**
 	 * Convert and return the report line of billing to Mongo document.
 	 *
 	 * @param row report line.
@@ -248,21 +115,7 @@
 					resourceId);
 		}
 		resourceId = resourceId.substring(serviceBaseNameId.length());
-
-		ResourceItem resource = resourceList.getById(resourceId);
 		Document d = new Document(ReportLine.FIELD_DLAB_ID, resourceId);
-		if (resource == null) {
-			d.put(FIELD_DLAB_RESOURCE_ID, null);
-			d.put(FIELD_DLAB_RESOURCE_TYPE, null);
-			d.put(ReportLine.FIELD_USER_ID, null);
-			d.put(FIELD_EXPLORATORY_NAME, null);
-		} else {
-			d.put(FIELD_DLAB_RESOURCE_ID, resource.getResourceId());
-			d.put(FIELD_DLAB_RESOURCE_TYPE, resource.getType().toString());
-			d.put(ReportLine.FIELD_USER_ID, resource.getUser());
-			d.put(FIELD_EXPLORATORY_NAME, resource.getExploratoryName());
-			d.put(FIELD_PROJECT, resource.getProject());
-		}
 		return d.append(ReportLine.FIELD_USAGE_DATE, row.getUsageDate())
 				.append(ReportLine.FIELD_PRODUCT, row.getProduct())
 				.append(ReportLine.FIELD_USAGE_TYPE, row.getUsageType())
@@ -280,7 +133,7 @@
 	 *
 	 * @param fieldNames the list of field names.
 	 */
-	private Document getGrouppingFields(String... fieldNames) {
+	private Document getGroupingFields(String... fieldNames) {
 		Document d = new Document();
 		for (String name : fieldNames) {
 			d.put(name, "$" + name);
@@ -307,7 +160,7 @@
 		List<? extends Bson> pipeline = Arrays.asList(
 				match(and(gte(ReportLine.FIELD_USAGE_DATE, month + "-01"),
 						lte(ReportLine.FIELD_USAGE_DATE, month + "-31"))),
-				group(getGrouppingFields(FIELD_DLAB_RESOURCE_ID,
+				group(getGroupingFields(FIELD_DLAB_RESOURCE_ID,
 						FIELD_DLAB_RESOURCE_TYPE,
 						FIELD_USER,
 						FIELD_EXPLORATORY_NAME,
@@ -333,128 +186,4 @@
 			collection.insertMany(totals);
 		}
 	}
-
-	/**
-	 * Comparator to sort billing exploratory details.
-	 */
-	class BillingComparator implements Comparator<Document> {
-		@Override
-		public int compare(Document d1, Document d2) {
-			int result = StringUtils.compare(d1.getString(FIELD_RESOURCE_NAME), d2.getString(FIELD_RESOURCE_NAME));
-			if (result == 0) {
-				result = StringUtils.compare(d1.getString(ReportLine.FIELD_PRODUCT), d2.getString(ReportLine
-						.FIELD_PRODUCT));
-				if (result == 0) {
-					return StringUtils.compare(d1.getString(ReportLine.FIELD_RESOURCE_TYPE), d2.getString(ReportLine
-							.FIELD_RESOURCE_TYPE));
-				}
-			}
-			return result;
-		}
-	}
-
-	/**
-	 * Update exploratory cost in Mongo DB.
-	 *
-	 * @param user            the name of user.
-	 * @param exploratoryName id of exploratory.
-	 */
-	private void updateExploratoryCost(String user, String exploratoryName) {
-		LOGGER.debug("Update explorartory {} cost for user {}", exploratoryName, user);
-		List<? extends Bson> pipeline = Arrays.asList(
-				match(and(eq(FIELD_USER, user),
-						eq(FIELD_EXPLORATORY_NAME, exploratoryName))),
-				group(getGrouppingFields(FIELD_DLAB_RESOURCE_ID,
-						ReportLine.FIELD_PRODUCT,
-						ReportLine.FIELD_RESOURCE_TYPE,
-						ReportLine.FIELD_CURRENCY_CODE),
-						sum(ReportLine.FIELD_COST, "$" + ReportLine.FIELD_COST),
-						min(FIELD_USAGE_DATE_START, "$" + ReportLine.FIELD_USAGE_DATE),
-						max(FIELD_USAGE_DATE_END, "$" + ReportLine.FIELD_USAGE_DATE)
-				),
-				sort(new Document(FIELD_ID + "." + FIELD_DLAB_RESOURCE_ID, 1).append(FIELD_ID + "." + ReportLine
-						.FIELD_PRODUCT, 1))
-		);
-		AggregateIterable<Document> docs = connection.getCollection(COLLECTION_BILLING)
-				.aggregate(pipeline);
-		LinkedList<Document> billing = new LinkedList<>();
-		ResourceItemList resources = getResourceList();
-		Double costTotal = null;
-		String currencyCode = null;
-		for (Document d : docs) {
-			Document id = (Document) d.get(FIELD_ID);
-			double cost = BillingCalculationUtils.round(d.getDouble(ReportLine.FIELD_COST), 2);
-			costTotal = (costTotal == null ? cost : costTotal + cost);
-			if (currencyCode == null) {
-				currencyCode = id.getString(ReportLine.FIELD_CURRENCY_CODE);
-			}
-
-			Document total = new Document()
-					.append(FIELD_RESOURCE_NAME, resources.getById(id.getString(FIELD_DLAB_RESOURCE_ID))
-							.getResourceName())
-					.append(ReportLine.FIELD_PRODUCT, id.getString(ReportLine.FIELD_PRODUCT))
-					.append(ReportLine.FIELD_RESOURCE_TYPE, id.getString(ReportLine.FIELD_RESOURCE_TYPE))
-					.append(ReportLine.FIELD_COST, BillingCalculationUtils.formatDouble(cost))
-					.append(ReportLine.FIELD_CURRENCY_CODE, id.getString(ReportLine.FIELD_CURRENCY_CODE))
-					.append(FIELD_USAGE_DATE_START, d.getString(FIELD_USAGE_DATE_START))
-					.append(FIELD_USAGE_DATE_END, d.getString(FIELD_USAGE_DATE_END));
-			billing.add(total);
-		}
-
-		LOGGER.debug("Total explorartory {} cost for user {} is {} {}, detail count is {}",
-				exploratoryName, user, costTotal, currencyCode, billing.size());
-		billing.sort(new BillingComparator());
-
-		MongoCollection<Document> cExploratory = connection.getCollection(COLLECTION_USER_INSTANCES);
-		Bson values = Updates.combine(
-				Updates.set(ReportLine.FIELD_COST, BillingCalculationUtils.formatDouble(costTotal)),
-				Updates.set(FIELD_CURRENCY_CODE, currencyCode),
-				Updates.set(COLLECTION_BILLING, (!billing.isEmpty() ? billing : null)));
-		cExploratory.updateOne(
-				and(and(eq(FIELD_USER, user),
-						eq(FIELD_EXPLORATORY_NAME, exploratoryName))),
-				values);
-	}
-
-	/**
-	 * Update EDGE cost in Mongo DB.
-	 *
-	 * @param user the name of user.
-	 */
-	private void updateEdgeCost(String user) {
-		List<? extends Bson> pipeline = Arrays.asList(
-				match(and(eq(FIELD_USER, user),
-						eq(FIELD_EXPLORATORY_NAME, null))),
-				group(getGrouppingFields(ReportLine.FIELD_CURRENCY_CODE),
-						sum(ReportLine.FIELD_COST, "$" + ReportLine.FIELD_COST))
-		);
-		AggregateIterable<Document> docs = connection.getCollection(COLLECTION_BILLING_TOTAL)
-				.aggregate(pipeline);
-
-		MongoCollection<Document> cEdge = connection.getCollection(COLLECTION_USER_EDGE);
-		for (Document d : docs) {
-			Document id = (Document) d.get(FIELD_ID);
-			Bson values = Updates.combine(
-					Updates.set(ReportLine.FIELD_COST, BillingCalculationUtils.round(d.getDouble(ReportLine
-							.FIELD_COST), 2)),
-					Updates.set(FIELD_CURRENCY_CODE, id.get(ReportLine.FIELD_CURRENCY_CODE)));
-			cEdge.updateOne(
-					eq(FIELD_ID, user),
-					values);
-		}
-	}
-
-	/**
-	 * Update the cost of exploratory environment for all users in Mongo DB.
-	 */
-	public void updateExploratoryCost() {
-		for (int i = 0; i < resourceList.size(); i++) {
-			ResourceItem item = resourceList.get(i);
-			if (item.getType() == DlabResourceType.EXPLORATORY) {
-				updateExploratoryCost(item.getUser(), item.getExploratoryName());
-			} else if (item.getType() == DlabResourceType.EDGE) {
-				updateEdgeCost(item.getUser());
-			}
-		}
-	}
 }
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/mongo/ResourceItem.java b/services/billing-aws/src/main/java/com/epam/dlab/mongo/ResourceItem.java
deleted file mode 100644
index d92b2d6..0000000
--- a/services/billing-aws/src/main/java/com/epam/dlab/mongo/ResourceItem.java
+++ /dev/null
@@ -1,152 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.mongo;
-
-import com.epam.dlab.billing.DlabResourceType;
-import com.google.common.base.MoreObjects;
-import com.google.common.base.MoreObjects.ToStringHelper;
-import org.apache.commons.lang3.StringUtils;
-
-/**
- * The resource of DLab environment.
- */
-public class ResourceItem implements Comparable<ResourceItem> {
-
-	private final String project;
-	/**
-	 * Resource ID.
-	 */
-	String resourceId;
-
-	/**
-	 * User friendly name of resource.
-	 */
-	String resourceName;
-
-	/**
-	 * Type of resource.
-	 */
-	DlabResourceType type;
-
-	/**
-	 * Name of user.
-	 */
-	String user;
-
-	/**
-	 * Name of exploratory.
-	 */
-	String exploratoryName;
-
-	/**
-	 * Instantiate resource of DLab environment.
-	 *
-	 * @param resourceId      resource id.
-	 * @param type            the type of resource.
-	 * @param user            the name of user.
-	 * @param exploratoryName the name of exploratory.
-	 */
-	public ResourceItem(String resourceId, String resourceName, DlabResourceType type,
-						String user, String exploratoryName, String project) {
-		this.resourceId = resourceId;
-		this.resourceName = resourceName;
-		this.type = type;
-		this.user = user;
-		this.exploratoryName = exploratoryName;
-		this.project = project;
-	}
-
-	@Override
-	public int compareTo(ResourceItem o) {
-		if (o == null) {
-			return -1;
-		}
-		int result = StringUtils.compare(resourceId, o.resourceId);
-		if (result == 0) {
-			result = StringUtils.compare(exploratoryName, o.exploratoryName);
-			if (result == 0) {
-				result = StringUtils.compare(type.name(), o.type.name());
-				if (result == 0) {
-					return StringUtils.compare(user, o.user);
-				}
-			}
-		}
-		return result;
-	}
-
-
-	/**
-	 * Returns the resource id.
-	 */
-	public String getResourceId() {
-		return resourceId;
-	}
-
-	/**
-	 * Return user friendly name of resource.
-	 */
-	public String getResourceName() {
-		return resourceName;
-	}
-
-	/**
-	 * Returns the type of resource.
-	 */
-	public DlabResourceType getType() {
-		return type;
-	}
-
-	/**
-	 * Returns the name of user.
-	 */
-	public String getUser() {
-		return user;
-	}
-
-	/**
-	 * Returns the name of exploratory.
-	 */
-	public String getExploratoryName() {
-		return exploratoryName;
-	}
-
-	public String getProject() {
-		return project;
-	}
-
-	/**
-	 * Returns a string representation of the object.
-	 *
-	 * @param self the object to generate the string for (typically this), used only for its class name.
-	 */
-	public ToStringHelper toStringHelper(Object self) {
-		return MoreObjects.toStringHelper(self)
-				.add("resourceId", resourceId)
-				.add("resourceName", resourceName)
-				.add("type", type)
-				.add("user", user)
-				.add("exploratoryName", exploratoryName);
-	}
-
-	@Override
-	public String toString() {
-		return toStringHelper(this).toString();
-	}
-}
diff --git a/services/billing-aws/src/main/java/com/epam/dlab/mongo/ResourceItemList.java b/services/billing-aws/src/main/java/com/epam/dlab/mongo/ResourceItemList.java
deleted file mode 100644
index d5c0091..0000000
--- a/services/billing-aws/src/main/java/com/epam/dlab/mongo/ResourceItemList.java
+++ /dev/null
@@ -1,136 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.mongo;
-
-import com.epam.dlab.billing.DlabResourceType;
-import com.google.common.base.MoreObjects;
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.Collections;
-import java.util.Comparator;
-import java.util.Vector;
-
-/**
- * List of the DLab's resources.
- */
-public class ResourceItemList {
-	/**
-	 * List of the resources.
-	 */
-	private final Vector<ResourceItem> list;
-
-
-	/**
-	 * Constructs an empty list of resources.
-	 */
-	public ResourceItemList() {
-		list = new Vector<>();
-	}
-
-
-	/**
-	 * Appends the resource to the list and returns it.
-	 *
-	 * @param resourceId      the resource id.
-	 * @param resourceName    the user friendly name of resource.
-	 * @param type            the type of resource.
-	 * @param user            the name of user.
-	 * @param exploratoryName the name of exploratory.
-	 * @return Instance of the resource.
-	 */
-	public ResourceItem append(String resourceId, String resourceName, DlabResourceType type, String user,
-							   String exploratoryName, String project) {
-		ResourceItem item = new ResourceItem(resourceId, resourceName, type, user, exploratoryName, project);
-		synchronized (this) {
-			int index = Collections.binarySearch(list, item);
-			if (index < 0) {
-				index = -index;
-				if (index > list.size()) {
-					list.add(item);
-				} else {
-					list.add(index - 1, item);
-				}
-			} else {
-				item = list.get(index);
-			}
-		}
-		return item;
-	}
-
-	public ResourceItem append(String resourceId, String resourceName, DlabResourceType type) {
-		return append(resourceId, resourceName, type, null, null, null);
-	}
-
-	/**
-	 * Returns the number of the range in list.
-	 */
-	public int size() {
-		return list.size();
-	}
-
-	/**
-	 * Returns the resource.
-	 *
-	 * @param index index of the resource.
-	 */
-	public ResourceItem get(int index) {
-		return list.get(index);
-	}
-
-	/**
-	 * Comparator for search resource item by resource id.
-	 */
-	private final ResourceItem findItemById = new ResourceItem(null, null, null, null, null, null);
-	private final ComparatorByName compareByName = new ComparatorByName();
-
-	private class ComparatorByName implements Comparator<ResourceItem> {
-
-		@Override
-		public int compare(ResourceItem o1, ResourceItem o2) {
-			return StringUtils.compare(o1.resourceId, o2.resourceId);
-		}
-
-	}
-
-	/**
-	 * Find and return the resource by resource id.
-	 *
-	 * @param resourceId index of the resource.
-	 */
-	public ResourceItem getById(String resourceId) {
-		findItemById.resourceId = resourceId;
-		int index = Collections.binarySearch(list, findItemById, compareByName);
-
-		return (index < 0 ? null : list.get(index));
-	}
-
-	/**
-	 * Removes all of the elements from list.
-	 */
-	public void clear() {
-		list.clear();
-	}
-
-
-	@Override
-	public String toString() {
-		return MoreObjects.toStringHelper(this).add("items", list).toString();
-	}
-}
diff --git a/services/billing-aws/src/main/resources/application.yml b/services/billing-aws/src/main/resources/application.yml
new file mode 100644
index 0000000..8bd3a4f
--- /dev/null
+++ b/services/billing-aws/src/main/resources/application.yml
@@ -0,0 +1,55 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+spring:
+  main:
+    allow-bean-definition-overriding: true
+  data:
+    mongodb:
+      username: admin
+      password: MONGO_PASSWORD
+      database: dlabdb
+      port: 27017
+      host: MONGO_HOST
+
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
+
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /home/OS_USER/keys/ssn.keystore.jks
+server.ssl.key-store-password: KEY_STORE_PASSWORD
+server.ssl.key-alias: ssn
+
+logging:
+  file: /var/opt/dlab/log/ssn/billing.log
+  level:
+    com:
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: dlab
+  resource: KEYCLOAK_CLIENT_ID
+  credentials.secret: KEYCLOAK_CLIENT_SECRET
+  ssl-required: none
+  auth-server-url: KEYCLOAK_AUTH_SERVER_URL
diff --git a/services/billing-aws/src/test/java/com/epam/dlab/mongo/ResourceItemListTest.java b/services/billing-aws/src/test/java/com/epam/dlab/mongo/ResourceItemListTest.java
deleted file mode 100644
index 1c90bb8..0000000
--- a/services/billing-aws/src/test/java/com/epam/dlab/mongo/ResourceItemListTest.java
+++ /dev/null
@@ -1,54 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.mongo;
-
-import static junit.framework.TestCase.assertEquals;
-
-import com.epam.dlab.billing.DlabResourceType;
-import org.junit.Test;
-
-public class ResourceItemListTest {
-
-	@Test
-	public void append() {
-		ResourceItemList list = new ResourceItemList();
-		list.append("tag-user-nb-exp", "exp", DlabResourceType.EXPLORATORY, "user", "exp", "");
-		list.append("tag-user-emr-exp-comp", "comp", DlabResourceType.COMPUTATIONAL, "user", "exp", "");
-		
-		assertEquals(2, list.size());
-		
-		ResourceItem comp = list.get(0);
-		assertEquals("tag-user-emr-exp-comp", comp.getResourceId());
-		assertEquals("comp", comp.getResourceName());
-		assertEquals(DlabResourceType.COMPUTATIONAL, comp.getType());
-		assertEquals("user", comp.getUser());
-		assertEquals("exp", comp.getExploratoryName());
-		
-		ResourceItem exp = list.get(1);
-		assertEquals("tag-user-nb-exp", exp.getResourceId());
-		assertEquals("exp", exp.getResourceName());
-		assertEquals(DlabResourceType.EXPLORATORY, exp.getType());
-		assertEquals("user", exp.getUser());
-		assertEquals("exp", exp.getExploratoryName());
-		
-		list.clear();
-		assertEquals(0, list.size());
-	}
-}
diff --git a/services/billing-azure/billing.yml b/services/billing-azure/billing.yml
index da47bd1..5361d90 100644
--- a/services/billing-azure/billing.yml
+++ b/services/billing-azure/billing.yml
@@ -19,59 +19,74 @@
 #
 # ******************************************************************************
 
-# Authentication info
+spring:
+  main:
+    allow-bean-definition-overriding: true
+  data:
+    mongodb:
+      username: admin
+      password: MONGO_PASSWORD
+      database: dlabdb
+      port: MONGO_PORT
+      host: MONGO_HOST
 
-# Explicit azure authentication parameters
-clientId: <CLIENT_ID>
-clientSecret: <CLIENT_SECRET>
-tenantId: <TENANT_ID>
-subscriptionId: <SUBSCRIPTION_ID>
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
 
-# Contains authentication info (clientId, clientSecret, tenantId, subscriptionId) received after Azure CLI authentication
-# Overrides explicit azure authentication parameters above
-authenticationFile: <AUTHENTICATION_FILE>
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /home/OS_USER/keys/ssn.keystore.jks
+server.ssl.key-store-password: KEY_STORE_PASSWORD
+server.ssl.key-alias: ssn
 
-# Billing configuration for RateCard API. For more details please see https://msdn.microsoft.com/en-us/library/mt219004.aspx
-offerNumber: <OFFER_NUMBER>
-currency: <CURRENCY>
-locale: <LOCALE>
-regionInfo: <REGION_INFO>
-
-# Azure provides with aggregated data by date. Scheduler tries to retrieve data every <period> with <initialDelay> from
-# application startup in minutes.
-# Scheduler retrieves data only for the past period of time from midnight of start day to midnight of end date.
-# Scheduler does not retrieve data for the current date
-initialDelay: 10
-period: 60
-
-billingEnabled: false
-
-aggregationOutputMongoDataSource:
-    host: localhost
-    port: 27017
-    username: admin
-    password: <MONGODB_PASSWORD>
-    database: dlabdb
-
-ssnStorageAccountTagName: <AZURE_SSN_STORAGE_ACCOUNT_TAG>
-sharedStorageAccountTagName: <AZURE_SHARED_STORAGE_ACCOUNT_TAG>
-datalakeTagName: <AZURE_DATALAKE_TAG>
-
-# Logging configuration.
 logging:
-  # Default logging level
-  level: INFO
-  # Logging levels for appenders.
-  loggers:
-    com.epam: DEBUG
-    org.apache.http: WARN
-    org.mongodb.driver: WARN
-    org.hibernate: WARN
-  #Logging appenders
-  appenders:
-    #- type: console
-    - type: file
-      currentLogFilename: /var/opt/dlab/log/ssn/billing.log
-      archive: true
-      archivedLogFilenamePattern: /var/opt/dlab/log/ssn/billing-%d{yyyy-MM-dd}.log.gz
-      archivedFileCount: 10
\ No newline at end of file
+  file: /var/opt/dlab/log/ssn/billing.log
+  level:
+    com:
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: dlab
+  resource: KEYCLOAK_CLIENT_ID
+  credentials.secret: KEYCLOAK_CLIENT_SECRET
+  ssl-required: none
+  auth-server-url: KEYCLOAK_AUTH_SERVER_URL
+
+dlab:
+  sbn: SERVICE_BASE_NAME
+  billingEnabled: true
+
+  # Authentication info
+
+  # Explicit azure authentication parameters
+  clientId: CLIENT_ID
+  clientSecret: CLIENT_SECRET
+  tenantId: TENANT_ID
+  subscriptionId: SUBSCRIPTION_ID
+
+  # Contains authentication info (clientId, clientSecret, tenantId, subscriptionId) received after Azure CLI authentication
+  # Overrides explicit azure authentication parameters above
+  authenticationFile: AUTHENTICATION_FILE
+  # Billing configuration for RateCard API. For more details please see https://msdn.microsoft.com/en-us/library/mt219004.aspx
+  offerNumber: OFFER_NUMBER
+  currency: CURRENCY
+  locale: LOCALE
+  regionInfo: REGION_INFO
+
+  # Azure provides with aggregated data by date. Scheduler tries to retrieve data every <period> with <initialDelay> from
+  # application startup in minutes.
+  # Scheduler retrieves data only for the past period of time from midnight of start day to midnight of end date.
+  # Scheduler does not retrieve data for the current date
+  initialDelay: 10
+  period: 60
+  aggregationOutputMongoDataSource:
+    host: MONGO_HOST
+    port: MONGO_PORT
+    username: admin
+    password: MONGO_PASSWORD
+    database: dlabdb
+  ssnStorageAccountTagName: <AZURE_SSN_STORAGE_ACCOUNT_TAG>
+  sharedStorageAccountTagName: <AZURE_SHARED_STORAGE_ACCOUNT_TAG>
+  datalakeTagName: <AZURE_DATALAKE_TAG>
\ No newline at end of file
diff --git a/services/billing-azure/pom.xml b/services/billing-azure/pom.xml
index 41edc3c..75c8e35 100644
--- a/services/billing-azure/pom.xml
+++ b/services/billing-azure/pom.xml
@@ -28,16 +28,65 @@
         <relativePath>../../pom.xml</relativePath>
     </parent>
     <modelVersion>4.0.0</modelVersion>
-
     <artifactId>billing-azure</artifactId>
 
+    <dependencyManagement>
+        <dependencies>
+            <dependency>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-dependencies</artifactId>
+                <version>2.1.3.RELEASE</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+            <dependency>
+                <groupId>org.keycloak.bom</groupId>
+                <artifactId>keycloak-adapter-bom</artifactId>
+                <version>4.8.3.Final</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
+        </dependencies>
+    </dependencyManagement>
+
     <dependencies>
         <dependency>
-            <groupId>com.microsoft.azure</groupId>
-            <artifactId>azure-client-authentication</artifactId>
-            <version>1.2.1</version>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-configuration-processor</artifactId>
+            <optional>true</optional>
         </dependency>
-
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-data-mongodb</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-web</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-security</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.keycloak</groupId>
+            <artifactId>keycloak-spring-boot-starter</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.keycloak</groupId>
+            <artifactId>keycloak-spring-security-adapter</artifactId>
+            <version>4.8.3.Final</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-test</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-core</artifactId>
+            <version>${org.mockito.version}</version>
+            <scope>test</scope>
+        </dependency>
         <dependency>
             <groupId>com.epam.dlab</groupId>
             <artifactId>dlab-model</artifactId>
@@ -45,6 +94,12 @@
         </dependency>
 
         <dependency>
+            <groupId>com.microsoft.azure</groupId>
+            <artifactId>azure-client-authentication</artifactId>
+            <version>1.2.1</version>
+        </dependency>
+
+        <dependency>
             <groupId>org.mongodb</groupId>
             <artifactId>mongo-java-driver</artifactId>
             <version>${org.mongodb.version}</version>
@@ -82,51 +137,13 @@
     <build>
         <plugins>
             <plugin>
-                <artifactId>maven-shade-plugin</artifactId>
-                <version>${maven-shade-plugin.version}</version>
+                <groupId>org.springframework.boot</groupId>
+                <artifactId>spring-boot-maven-plugin</artifactId>
                 <executions>
                     <execution>
-                        <phase>package</phase>
                         <goals>
-                            <goal>shade</goal>
+                            <goal>repackage</goal>
                         </goals>
-                        <configuration>
-                            <createDependencyReducedPom>false</createDependencyReducedPom>
-                            <minimizeJar>true</minimizeJar>
-                            <filters>
-                                <filter>
-                                    <artifact>commons-logging:commons-logging</artifact>
-                                    <includes>**</includes>
-                                </filter>
-                                <filter>
-                                    <artifact>*:*</artifact>
-                                    <excludes>
-                                        <exclude>META-INF/*.SF</exclude>
-                                        <exclude>META-INF/*.DSA</exclude>
-                                        <exclude>META-INF/*.RSA</exclude>
-                                    </excludes>
-                                </filter>
-                            </filters>
-                            <transformers>
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer"/>
-                                <transformer
-                                        implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
-                                    <mainClass>com.epam.dlab.billing.azure.BillingSchedulerAzure</mainClass>
-                                    <manifestEntries>
-                                        <Created-By>&lt;EPAM&gt; Systems</Created-By>
-                                        <Name>com/epam/dlab</Name>
-                                        <Implementation-Title>DLab Azure Billing Module</Implementation-Title>
-                                        <Implementation-Version>${dlab.version}</Implementation-Version>
-                                        <Implementation-Vendor>&lt;EPAM&gt; Systems</Implementation-Vendor>
-                                        <Build-Time>${maven.build.timestamp}</Build-Time>
-                                        <Build-OS>${os.name}</Build-OS>
-                                        <GIT-Branch>${scmBranch}</GIT-Branch>
-                                        <GIT-Commit>${buildNumber}</GIT-Commit>
-                                    </manifestEntries>
-                                </transformer>
-                            </transformers>
-                        </configuration>
                     </execution>
                 </executions>
             </plugin>
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureBillableResourcesService.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureBillableResourcesService.java
deleted file mode 100644
index c4ee5b4..0000000
--- a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureBillableResourcesService.java
+++ /dev/null
@@ -1,315 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.azure;
-
-import com.epam.dlab.MongoKeyWords;
-import com.epam.dlab.billing.DlabResourceType;
-import com.epam.dlab.billing.azure.model.AzureDlabBillableResource;
-import com.epam.dlab.dto.UserInstanceDTO;
-import com.epam.dlab.dto.azure.edge.EdgeInfoAzure;
-import com.epam.dlab.dto.computational.UserComputationalResource;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.collect.Sets;
-import com.mongodb.client.FindIterable;
-import com.mongodb.client.model.Filters;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.StringUtils;
-import org.bson.Document;
-
-import java.io.IOException;
-import java.util.*;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-import java.util.stream.StreamSupport;
-
-import static com.mongodb.client.model.Projections.exclude;
-import static com.mongodb.client.model.Projections.fields;
-
-/**
- * Helps to retrieve billable resources that are created in scope of DLab usage. Uses MongoDB as data source
- * for created resources
- */
-@Slf4j
-public class AzureBillableResourcesService {
-	private static final String SHARED_RESOURCE = "Shared resource";
-	private static final String[] USER_INSTANCES_EXCLUDED_FIELDS = {"scheduler_data", "last_activity",
-			"computational_resources.scheduler_data", "computational_resources.last_activity"};
-	private final ObjectMapper objectMapper = new ObjectMapper();
-
-	private MongoDbBillingClient mongoDbBillingClient;
-	private String serviceBaseName;
-	private String sharedStorageAccountTagName;
-	private String ssnStorageAccountTagName;
-	private String azureDataLakeTagName;
-
-	/**
-	 * Constructs the service class
-	 *
-	 * @param mongoDbBillingClient        mongodb client to retrieve all billable resources
-	 * @param sharedStorageAccountTagName shared storage account tag name
-	 * @param ssnStorageAccountTagName    ssn storage account tag name
-	 * @param azureDataLakeTagName        azure DataLake tag name
-	 */
-	public AzureBillableResourcesService(MongoDbBillingClient mongoDbBillingClient, String sharedStorageAccountTagName,
-										 String ssnStorageAccountTagName, String azureDataLakeTagName) {
-		this.mongoDbBillingClient = mongoDbBillingClient;
-
-		this.serviceBaseName = getConfigurationSettingValue(MongoKeyWords.SERVICE_BASE_NAME_KEY)
-				.replace('_', '-').toLowerCase();
-
-		this.sharedStorageAccountTagName = sharedStorageAccountTagName;
-		this.ssnStorageAccountTagName = ssnStorageAccountTagName;
-		this.azureDataLakeTagName = azureDataLakeTagName;
-	}
-
-
-	/**
-	 * Collects billable resources
-	 *
-	 * @return set of all billable resources that were created in scope by DLab from its installation to current time
-	 */
-	public Set<AzureDlabBillableResource> getBillableResources() {
-
-		Set<AzureDlabBillableResource> billableResources = new HashSet<>();
-
-		billableResources.addAll(getSsn());
-		billableResources.addAll(getDataLake());
-		billableResources.addAll(getEdgeAndStorageAccount());
-		billableResources.addAll(getNotebooksAndClusters());
-
-		List<AzureDlabBillableResource> list = new ArrayList<>(billableResources);
-		list.sort(Comparator.comparing(AzureDlabBillableResource::getId));
-
-		try {
-			log.debug("Billable resources is \n {}", objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString
-					(list));
-		} catch (JsonProcessingException e) {
-			log.debug("Error during pretty printing. Show simple list", e);
-			log.debug("Billable resources is {}", list);
-		}
-
-		return billableResources;
-	}
-
-	private String getConfigurationSettingValue(String key) {
-
-		Document document = mongoDbBillingClient.getDatabase().getCollection(MongoKeyWords.SETTINGS_COLLECTION)
-				.find(Filters.eq(MongoKeyWords.MONGO_ID, key)).first();
-
-		if (document != null) {
-			String value = document.getString("value");
-			if (StringUtils.isEmpty(value)) {
-				throw new IllegalStateException("Configuration " + key + " does not have value in settings");
-			}
-			log.info("Key {} has value {}", key, value);
-			return value;
-		} else {
-			throw new IllegalStateException("Configuration " + key + " is not present in settings");
-		}
-
-	}
-
-	private String getConfigurationSettingValueOrEmpty(String key) {
-		try {
-			return getConfigurationSettingValue(key);
-		} catch (IllegalStateException e) {
-			log.warn("key {} is not found", key, e);
-			return null;
-		}
-	}
-
-	private Set<AzureDlabBillableResource> getSsn() {
-
-		return Sets.newHashSet(
-				AzureDlabBillableResource.builder().id(serviceBaseName + "-ssn").type(DlabResourceType.SSN).build(),
-				AzureDlabBillableResource.builder().id(ssnStorageAccountTagName).type(DlabResourceType
-						.SSN_STORAGE_ACCOUNT).build(),
-				AzureDlabBillableResource.builder().id(sharedStorageAccountTagName).type(DlabResourceType
-						.COLLABORATION_STORAGE_ACCOUNT).build(),
-				AzureDlabBillableResource.builder().id(serviceBaseName + "-ssn-volume-primary")
-						.type(DlabResourceType.VOLUME).build()
-		);
-	}
-
-	private Set<AzureDlabBillableResource> getDataLake() {
-
-		if (azureDataLakeTagName != null) {
-			return Sets.newHashSet(AzureDlabBillableResource.builder().id(azureDataLakeTagName)
-					.type(DlabResourceType.DATA_LAKE_STORE).build());
-		}
-
-		return Sets.newHashSet();
-	}
-
-	private Set<AzureDlabBillableResource> getEdgeAndStorageAccount() {
-		Map<String, List<Document>> projectEndpoints = StreamSupport.stream(mongoDbBillingClient.getDatabase()
-				.getCollection("Projects").find().spliterator(), false)
-				.collect(Collectors.toMap(key -> key.getString("name").toLowerCase(),
-						value -> (List<Document>) value.get("endpoints")));
-
-		return projectEndpoints.entrySet()
-				.stream()
-				.flatMap(projectEndpoint -> getEdgeAndStoragePerProject(projectEndpoint.getKey(), projectEndpoint.getValue()))
-				.collect(Collectors.toSet());
-	}
-
-	private Stream<AzureDlabBillableResource> getEdgeAndStoragePerProject(String projectName, List<Document> endpoints) {
-		return endpoints
-				.stream()
-				.flatMap(endpoint -> {
-					try {
-						return getEdgeAndStorageAccount(projectName, objectMapper.readValue(
-								objectMapper.writeValueAsString(endpoint.get("edgeInfo")),
-								new com.fasterxml.jackson.core.type.TypeReference<EdgeInfoAzure>() {
-								})).stream();
-					} catch (IOException e) {
-						log.error("Error during preparation of billable resources", e);
-					}
-					return Stream.empty();
-				});
-	}
-
-	private Set<AzureDlabBillableResource> getEdgeAndStorageAccount(String projectName, EdgeInfoAzure edgeInfoAzure) {
-		Set<AzureDlabBillableResource> billableResources = new HashSet<>();
-
-		if (StringUtils.isNotEmpty(edgeInfoAzure.getUserContainerName())) {
-			billableResources.add(AzureDlabBillableResource.builder()
-					.id(edgeInfoAzure.getUserStorageAccountTagName())
-					.type(DlabResourceType.EDGE_STORAGE_ACCOUNT)
-					.user(SHARED_RESOURCE)
-					.project(projectName)
-					.build());
-		}
-
-		if (StringUtils.isNotEmpty(edgeInfoAzure.getInstanceId())) {
-			billableResources.add(AzureDlabBillableResource.builder()
-					.id(edgeInfoAzure.getInstanceId())
-					.type(DlabResourceType.EDGE)
-					.user(SHARED_RESOURCE)
-					.project(projectName)
-					.build());
-
-			billableResources.add(AzureDlabBillableResource.builder()
-					.id(edgeInfoAzure.getInstanceId() + "-volume-primary")
-					.type(DlabResourceType.VOLUME)
-					.user(SHARED_RESOURCE)
-					.project(projectName)
-					.build());
-		}
-
-		return billableResources;
-	}
-
-	private Set<AzureDlabBillableResource> getNotebooksAndClusters() {
-
-		Set<AzureDlabBillableResource> billableResources = new HashSet<>();
-
-		try {
-			final FindIterable<Document> userInstanceDocuments = mongoDbBillingClient.getDatabase()
-					.getCollection(MongoKeyWords.NOTEBOOK_COLLECTION)
-					.find()
-					.projection(fields(exclude(USER_INSTANCES_EXCLUDED_FIELDS)));
-			List<UserInstanceDTO> userInstanceDTOS = objectMapper.readValue(
-					objectMapper.writeValueAsString(userInstanceDocuments),
-					new com.fasterxml.jackson.core.type.TypeReference<List<UserInstanceDTO>>() {
-					});
-
-			if (userInstanceDTOS != null && !userInstanceDTOS.isEmpty()) {
-				userInstanceDTOS.forEach(e -> billableResources.addAll(getNotebookAndClusters(e)));
-			}
-
-		} catch (IOException e) {
-			log.error("Error during preparation of billable resources", e);
-		}
-
-		return billableResources;
-	}
-
-	private Set<AzureDlabBillableResource> getNotebookAndClusters(UserInstanceDTO userInstanceDTO) {
-		Set<AzureDlabBillableResource> notebookResources = new HashSet<>();
-
-		if (StringUtils.isNotEmpty(userInstanceDTO.getExploratoryId())) {
-			notebookResources.add(AzureDlabBillableResource.builder()
-					.id(userInstanceDTO.getExploratoryId())
-					.type(DlabResourceType.EXPLORATORY)
-					.user(userInstanceDTO.getUser())
-					.project(userInstanceDTO.getProject())
-					.notebookId(userInstanceDTO.getExploratoryId())
-					.resourceName(userInstanceDTO.getExploratoryName())
-					.build());
-			notebookResources.addAll(getVolumes(userInstanceDTO, userInstanceDTO.getExploratoryId(), "Volume primary",
-					"Volume secondary"));
-
-			if (userInstanceDTO.getResources() != null && !userInstanceDTO.getResources().isEmpty()) {
-				for (UserComputationalResource userComputationalResource : userInstanceDTO.getResources()) {
-					if (StringUtils.isNotEmpty(userComputationalResource.getComputationalId())) {
-
-						notebookResources.add(AzureDlabBillableResource.builder()
-								.id(userComputationalResource.getComputationalId())
-								.type(DlabResourceType.COMPUTATIONAL)
-								.user(userInstanceDTO.getUser())
-								.project(userInstanceDTO.getProject())
-								.notebookId(userInstanceDTO.getExploratoryId())
-								.resourceName(userComputationalResource.getComputationalName())
-								.build());
-						final List<AzureDlabBillableResource> volumes = getVolumes(userInstanceDTO,
-								userComputationalResource.getComputationalId(),
-								userComputationalResource.getComputationalName() + " volume primary",
-								userComputationalResource.getComputationalName() + " volume secondary");
-						notebookResources.addAll(volumes);
-
-					} else {
-						log.error("Computational with empty id {} is found in notebook {}. Skip it.",
-								userComputationalResource, userInstanceDTO);
-					}
-				}
-			}
-
-		} else {
-			log.error("Notebook {} with empty id id found. Skip it.", userInstanceDTO);
-		}
-
-		return notebookResources;
-	}
-
-	private List<AzureDlabBillableResource> getVolumes(UserInstanceDTO userInstanceDTO, String exploratoryId, String
-			primaryVolumeName, String secondaryVolumeName) {
-
-		return Arrays.asList(
-				AzureDlabBillableResource.builder()
-						.id(exploratoryId + "-volume-primary")
-						.type(DlabResourceType.VOLUME)
-						.user(userInstanceDTO.getUser())
-						.project(userInstanceDTO.getProject())
-						.notebookId(userInstanceDTO.getExploratoryId())
-						.resourceName(primaryVolumeName)
-						.build(),
-				AzureDlabBillableResource.builder()
-						.id(exploratoryId + "-volume-secondary")
-						.type(DlabResourceType.VOLUME)
-						.user(userInstanceDTO.getUser())
-						.project(userInstanceDTO.getProject())
-						.notebookId(userInstanceDTO.getExploratoryId())
-						.resourceName(secondaryVolumeName)
-						.build()
-		);
-	}
-}
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureBillingDetailsService.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureBillingDetailsService.java
deleted file mode 100644
index 6d84eb8..0000000
--- a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureBillingDetailsService.java
+++ /dev/null
@@ -1,194 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.azure;
-
-import com.epam.dlab.MongoKeyWords;
-import com.epam.dlab.billing.BillingCalculationUtils;
-import com.epam.dlab.billing.DlabResourceType;
-import com.google.common.collect.Lists;
-import com.mongodb.client.AggregateIterable;
-import com.mongodb.client.model.*;
-import com.mongodb.client.result.UpdateResult;
-import lombok.extern.slf4j.Slf4j;
-import org.bson.Document;
-import org.bson.conversions.Bson;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.function.Consumer;
-import java.util.stream.StreamSupport;
-
-@Slf4j
-public class AzureBillingDetailsService {
-	private MongoDbBillingClient mongoDbBillingClient;
-	private String currencyCode;
-
-	public AzureBillingDetailsService(MongoDbBillingClient mongoDbBillingClient, String currencyCode) {
-		this.mongoDbBillingClient = mongoDbBillingClient;
-		this.currencyCode = currencyCode;
-	}
-
-	public void updateBillingDetails() {
-		StreamSupport.stream(mongoDbBillingClient.getDatabase()
-				.getCollection(MongoKeyWords.NOTEBOOK_COLLECTION).find().spliterator(), false)
-				.map(a -> a.getString(MongoKeyWords.DLAB_USER))
-				.forEach(this::updateBillingDetails);
-	}
-
-	public void updateBillingDetails(String user) {
-		log.debug("Updating billing details for user {}", user);
-
-		try {
-			AggregateIterable<Document> aggregateIterable = mongoDbBillingClient.getDatabase()
-					.getCollection(MongoKeyWords.BILLING_DETAILS)
-					.aggregate(Lists.newArrayList(
-							Aggregates.match(
-									Filters.and(
-											Filters.eq(MongoKeyWords.DLAB_USER, user),
-											Filters.in(MongoKeyWords.RESOURCE_TYPE,
-													DlabResourceType.EXPLORATORY.toString(),
-													DlabResourceType.COMPUTATIONAL.toString(),
-													DlabResourceType.VOLUME.toString())
-									)
-							),
-
-							Aggregates.group(getGroupingFields(
-									MongoKeyWords.DLAB_ID,
-									MongoKeyWords.DLAB_USER,
-									MongoKeyWords.EXPLORATORY_ID,
-									MongoKeyWords.RESOURCE_TYPE,
-									MongoKeyWords.RESOURCE_NAME,
-									MongoKeyWords.COMPUTATIONAL_ID,
-									MongoKeyWords.METER_CATEGORY),
-									Accumulators.sum(MongoKeyWords.COST, MongoKeyWords.prepend$(MongoKeyWords.COST)),
-									Accumulators.min(MongoKeyWords.USAGE_FROM, MongoKeyWords.prepend$(MongoKeyWords
-											.USAGE_DAY)),
-									Accumulators.max(MongoKeyWords.USAGE_TO, MongoKeyWords.prepend$(MongoKeyWords
-											.USAGE_DAY))
-							),
-
-							Aggregates.sort(Sorts.ascending(
-									MongoKeyWords.prependId(MongoKeyWords.RESOURCE_NAME),
-									MongoKeyWords.prependId(MongoKeyWords.METER_CATEGORY)))
-							)
-					);
-
-			updateBillingDetails(user, mapToDetails(aggregateIterable));
-		} catch (RuntimeException e) {
-			log.error("Updating billing details for user {} is failed", user, e);
-		}
-	}
-
-	private List<Document> mapToDetails(AggregateIterable<Document> aggregateIterable) {
-		List<Document> billingDetails = new ArrayList<>();
-		for (Document document : aggregateIterable) {
-			Document oldRef = (Document) document.get(MongoKeyWords.MONGO_ID);
-			Document newDocument = new Document();
-
-			newDocument.append(MongoKeyWords.USAGE_FROM, document.getString(MongoKeyWords.USAGE_FROM));
-			newDocument.append(MongoKeyWords.USAGE_TO, document.getString(MongoKeyWords.USAGE_TO));
-			newDocument.append(MongoKeyWords.COST, document.getDouble(MongoKeyWords.COST));
-
-			newDocument.append(MongoKeyWords.METER_CATEGORY, oldRef.getString(MongoKeyWords.METER_CATEGORY));
-			newDocument.append(MongoKeyWords.RESOURCE_NAME, oldRef.getString(MongoKeyWords.RESOURCE_NAME));
-			newDocument.append(MongoKeyWords.EXPLORATORY_ID, oldRef.getString(MongoKeyWords.EXPLORATORY_ID));
-			newDocument.append(MongoKeyWords.RESOURCE_TYPE, oldRef.getString(MongoKeyWords.RESOURCE_TYPE));
-			newDocument.append(MongoKeyWords.CURRENCY_CODE, currencyCode);
-
-			billingDetails.add(newDocument);
-		}
-
-		return billingDetails;
-	}
-
-
-	private void updateBillingDetails(String user, List<Document> billingDetails) {
-		if (!billingDetails.isEmpty()) {
-			Map<String, List<Document>> info = new HashMap<>();
-
-			Consumer<Document> aggregator = e -> {
-
-				String notebookId = e.getString(MongoKeyWords.EXPLORATORY_ID);
-				List<Document> documents = info.get(notebookId);
-				if (documents == null) {
-					documents = new ArrayList<>();
-				}
-
-				documents.add(e);
-				info.put(notebookId, documents);
-			};
-
-			billingDetails.stream()
-					.filter(e -> DlabResourceType.EXPLORATORY.toString().equals(e.getString(MongoKeyWords
-							.RESOURCE_TYPE)))
-					.forEach(aggregator);
-
-			billingDetails.stream()
-					.filter(e -> DlabResourceType.COMPUTATIONAL.toString().equals(e.getString(MongoKeyWords
-							.RESOURCE_TYPE)))
-					.forEach(aggregator);
-
-			billingDetails.stream()
-					.filter(e -> DlabResourceType.VOLUME.toString().equals(e.getString(MongoKeyWords.RESOURCE_TYPE)))
-					.forEach(aggregator);
-
-
-			for (Map.Entry<String, List<Document>> entry : info.entrySet()) {
-				double sum = entry.getValue().stream().mapToDouble(e -> e.getDouble(MongoKeyWords.COST)).sum();
-
-				entry.getValue().forEach(e -> e.put(MongoKeyWords.COST_STRING,
-						BillingCalculationUtils.formatDouble(e.getDouble(MongoKeyWords.COST))));
-
-				log.debug("Update billing for notebook {}, cost is {} {}", entry.getKey(), sum, currencyCode);
-
-				Bson updates = Updates.combine(
-						Updates.set(MongoKeyWords.COST_STRING, BillingCalculationUtils.formatDouble(sum)),
-						Updates.set(MongoKeyWords.COST, sum),
-						Updates.set(MongoKeyWords.CURRENCY_CODE, currencyCode),
-						Updates.set(MongoKeyWords.BILLING_DETAILS, entry.getValue()));
-
-				UpdateResult updateResult = mongoDbBillingClient.getDatabase()
-						.getCollection(MongoKeyWords.NOTEBOOK_COLLECTION)
-						.updateOne(
-								Filters.and(
-										Filters.eq(MongoKeyWords.DLAB_USER, user),
-										Filters.eq(MongoKeyWords.EXPLORATORY_ID_OLD, entry.getKey())
-								),
-								updates
-						);
-
-				log.debug("Update result for {}/{} is {}", user, entry.getKey(), updateResult);
-			}
-		} else {
-			log.warn("No billing details found for notebooks for user {}", user);
-		}
-	}
-
-
-	private Document getGroupingFields(String... fieldNames) {
-		Document d = new Document();
-		for (String name : fieldNames) {
-			d.put(name, "$" + name);
-		}
-		return d;
-	}
-}
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureInvoiceCalculationService.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureInvoiceCalculationService.java
index 0fd6098..b3eec4f 100644
--- a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureInvoiceCalculationService.java
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/AzureInvoiceCalculationService.java
@@ -37,7 +37,10 @@
 
 import java.io.IOException;
 import java.net.URISyntaxException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
 import java.util.stream.Collectors;
 
 /**
@@ -61,13 +64,9 @@
 	 * Constructs service class
 	 *
 	 * @param billingConfigurationAzure contains <code>billing-azure</code> module configuration
-	 * @param billableResources         resources that invoices should be calculated for
 	 */
-	public AzureInvoiceCalculationService(BillingConfigurationAzure billingConfigurationAzure,
-										  Set<AzureDlabBillableResource> billableResources) {
+	public AzureInvoiceCalculationService(BillingConfigurationAzure billingConfigurationAzure) {
 		this.billingConfigurationAzure = billingConfigurationAzure;
-		this.billableResources = billableResources.stream().collect(Collectors.toMap(AzureDlabBillableResource::getId,
-                e -> e));
 	}
 
 	/**
@@ -135,8 +134,6 @@
 
 		if (usageAggregateRecordList != null && !usageAggregateRecordList.isEmpty()) {
 			log.info("Processing {} usage records", usageAggregateRecordList.size());
-
-
 			usageAggregateRecordList = usageAggregateRecordList.stream().filter(e ->
 					matchProperStructure(e) && isBillableDlabResource(e))
 					.collect(Collectors.toList());
@@ -164,7 +161,6 @@
 	}
 
 	private boolean matchProperStructure(UsageAggregateRecord record) {
-
 		if (record.getProperties() == null) {
 			return false;
 		}
@@ -181,13 +177,10 @@
 
 	private boolean isBillableDlabResource(UsageAggregateRecord record) {
 		String dlabId = record.getProperties().getParsedInstanceData().getMicrosoftResources().getTags().get("Name");
-		return dlabId != null && !dlabId.isEmpty() && billableResources.containsKey(dlabId);
+		return dlabId != null && !dlabId.isEmpty() && dlabId.startsWith(billingConfigurationAzure.getSbn());
 	}
 
-	private AzureDailyResourceInvoice calculateInvoice(Map<String, Meter> rates, UsageAggregateRecord record,
-													   String dlabId) {
-
-		AzureDlabBillableResource azureDlabBillableResource = billableResources.get(dlabId);
+	private AzureDailyResourceInvoice calculateInvoice(Map<String, Meter> rates, UsageAggregateRecord record, String dlabId) {
 		String meterId = record.getProperties().getMeterId();
 		Meter rateCard = rates.get(meterId);
 
@@ -196,34 +189,25 @@
 			if (meterRates != null) {
 				Double rate = meterRates.get(AzureRateCardClient.MAIN_RATE_KEY);
 				if (rate != null) {
-
-					AzureDailyResourceInvoice azureDailyResourceInvoice = new AzureDailyResourceInvoice
-                            (azureDlabBillableResource);
-					azureDailyResourceInvoice.setUsageStartDate(record.getProperties().getUsageStartTime());
-					azureDailyResourceInvoice.setUsageEndDate(record.getProperties().getUsageEndTime());
-					azureDailyResourceInvoice.setMeterCategory(record.getProperties().getMeterCategory());
-					azureDailyResourceInvoice.setCost(
-							BillingCalculationUtils.round(rate * record.getProperties().getQuantity(), 2));
-					azureDailyResourceInvoice.setDay(getDay(record.getProperties().getUsageStartTime()));
-					azureDailyResourceInvoice.setCurrencyCode(billingConfigurationAzure.getCurrency());
-
-					log.trace("Generated invoice for azure resource {}", azureDailyResourceInvoice);
-
-					return azureDailyResourceInvoice;
-
+					return AzureDailyResourceInvoice.builder()
+							.dlabId(dlabId)
+							.usageStartDate(getDay(record.getProperties().getUsageStartTime()))
+							.usageEndDate(getDay(record.getProperties().getUsageEndTime()))
+							.meterCategory(record.getProperties().getMeterCategory())
+							.cost(BillingCalculationUtils.round(rate * record.getProperties().getQuantity(), 3))
+							.day(getDay(record.getProperties().getUsageStartTime()))
+							.currencyCode(billingConfigurationAzure.getCurrency())
+							.build();
 				} else {
-					log.error("Rate Card {} has no rate for meter id {} and rate id {}. Skip record {}. Azure resource" +
-                                    " {}",
-							rateCard, meterId, AzureRateCardClient.MAIN_RATE_KEY, record, azureDlabBillableResource);
+					log.error("Rate Card {} has no rate for meter id {} and rate id {}. Skip record {}.",
+							rateCard, meterId, AzureRateCardClient.MAIN_RATE_KEY, record);
 				}
 			} else {
-				log.error("Rate Card {} has no meter rates fro meter id {}. Skip record {}. Azure resource {}",
-						rateCard, meterId, record, azureDlabBillableResource);
+				log.error("Rate Card {} has no meter rates fro meter id {}. Skip record {}",
+						rateCard, meterId, record);
 			}
 		} else {
-			log.error("Meter rate {} form usage aggregate is not found in rate card. Skip record {}.  Azure resource " +
-                            "{}",
-					meterId, record, azureDlabBillableResource);
+			log.error("Meter rate {} form usage aggregate is not found in rate card. Skip record {}.", meterId, record);
 		}
 
 		return null;
@@ -231,7 +215,6 @@
 
 	private String getNewToken() {
 		try {
-
 			log.info("Requesting authentication token ... ");
 			ApplicationTokenCredentials applicationTokenCredentials = new ApplicationTokenCredentials(
 					billingConfigurationAzure.getClientId(),
@@ -247,7 +230,6 @@
 	}
 
 	private String getDay(String dateTime) {
-
 		if (dateTime != null) {
 			String[] parts = dateTime.split("T");
 			if (parts.length == 2) {
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/BillingAzureApplication.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/BillingAzureApplication.java
new file mode 100644
index 0000000..1a40767
--- /dev/null
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/BillingAzureApplication.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.billing.azure;
+
+import org.springframework.boot.SpringApplication;
+import org.springframework.boot.autoconfigure.SpringBootApplication;
+import org.springframework.boot.context.properties.EnableConfigurationProperties;
+import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
+
+@SpringBootApplication
+@EnableMongoRepositories
+@EnableConfigurationProperties
+public class BillingAzureApplication {
+
+    public static void main(String[] args) {
+        SpringApplication.run(BillingAzureApplication.class, args);
+    }
+
+}
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/BillingSchedulerAzure.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/BillingSchedulerAzure.java
deleted file mode 100644
index d0b4dba..0000000
--- a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/BillingSchedulerAzure.java
+++ /dev/null
@@ -1,314 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.azure;
-
-import com.epam.dlab.MongoKeyWords;
-import com.epam.dlab.billing.azure.config.AzureAuthFile;
-import com.epam.dlab.billing.azure.config.BillingConfigurationAzure;
-import com.epam.dlab.billing.azure.logging.AppenderConsole;
-import com.epam.dlab.billing.azure.logging.AppenderFile;
-import com.epam.dlab.billing.azure.model.AzureDailyResourceInvoice;
-import com.epam.dlab.billing.azure.model.AzureDlabBillableResource;
-import com.epam.dlab.billing.azure.model.BillingPeriod;
-import com.epam.dlab.exceptions.DlabException;
-import com.epam.dlab.exceptions.InitializationException;
-import com.epam.dlab.util.mongo.modules.IsoDateModule;
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
-import com.fasterxml.jackson.datatype.guava.GuavaModule;
-import com.mongodb.BasicDBObject;
-import com.mongodb.client.model.Filters;
-import com.mongodb.client.model.UpdateOptions;
-import com.mongodb.client.result.UpdateResult;
-import lombok.extern.slf4j.Slf4j;
-import org.bson.Document;
-import org.joda.time.DateTime;
-import org.joda.time.DateTimeZone;
-import org.joda.time.format.DateTimeFormat;
-import org.joda.time.format.DateTimeFormatter;
-
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.nio.file.Path;
-import java.nio.file.Paths;
-import java.util.List;
-import java.util.Set;
-import java.util.concurrent.Executors;
-import java.util.concurrent.ScheduledExecutorService;
-import java.util.concurrent.TimeUnit;
-import java.util.stream.Collectors;
-
-@Slf4j
-public class BillingSchedulerAzure {
-	private final ScheduledExecutorService executorService = Executors.newSingleThreadScheduledExecutor();
-	private BillingConfigurationAzure billingConfigurationAzure;
-	private MongoDbBillingClient mongoDbBillingClient;
-
-	public BillingSchedulerAzure(String filePath) throws IOException, InitializationException {
-		try (FileInputStream fin = new FileInputStream(filePath)) {
-			final ObjectMapper objectMapper = new ObjectMapper(new YAMLFactory()).registerModule(new GuavaModule());
-			objectMapper.registerSubtypes(AppenderFile.class, AppenderConsole.class);
-			this.billingConfigurationAzure = objectMapper.readValue(fin,
-							BillingConfigurationAzure.class);
-
-			Path path = Paths.get(billingConfigurationAzure.getAuthenticationFile());
-
-			if (path.toFile().exists()) {
-
-				log.info("Read and override configs using auth file");
-
-				try {
-					AzureAuthFile azureAuthFile = new ObjectMapper().readValue(path.toFile(), AzureAuthFile.class);
-					this.billingConfigurationAzure.setClientId(azureAuthFile.getClientId());
-					this.billingConfigurationAzure.setClientSecret(azureAuthFile.getClientSecret());
-					this.billingConfigurationAzure.setTenantId(azureAuthFile.getTenantId());
-					this.billingConfigurationAzure.setSubscriptionId(azureAuthFile.getSubscriptionId());
-				} catch (IOException e) {
-					log.error("Cannot read configuration file", e);
-					throw e;
-				}
-				log.info("Configs from auth file are used");
-			} else {
-				log.info("Configs from yml file are used");
-			}
-
-			this.mongoDbBillingClient = new MongoDbBillingClient
-					(billingConfigurationAzure.getAggregationOutputMongoDataSource().getHost(),
-							billingConfigurationAzure.getAggregationOutputMongoDataSource().getPort(),
-							billingConfigurationAzure.getAggregationOutputMongoDataSource().getDatabase(),
-							billingConfigurationAzure.getAggregationOutputMongoDataSource().getUsername(),
-							billingConfigurationAzure.getAggregationOutputMongoDataSource().getPassword());
-			this.billingConfigurationAzure.getLogging().configure();
-		}
-	}
-
-	public static void main(String[] args) throws Exception {
-		if (args != null && args.length == 2) {
-			BillingSchedulerAzure billingSchedulerAzure = new BillingSchedulerAzure(args[1]);
-			billingSchedulerAzure.start();
-
-		} else {
-			log.info("Wrong arguments. Please provide with path to billing configuration");
-		}
-	}
-
-	public void start() {
-		if (billingConfigurationAzure.isBillingEnabled()) {
-			executorService.scheduleWithFixedDelay(new CalculateBilling(billingConfigurationAzure,
-							mongoDbBillingClient), billingConfigurationAzure.getInitialDelay(),
-					billingConfigurationAzure.getPeriod(), TimeUnit.MINUTES);
-		} else {
-			log.info("======Billing is disabled======");
-		}
-	}
-
-	public void stop() {
-		try {
-			log.info("Stopping Azure billing scheduler");
-			if (!executorService.awaitTermination(30, TimeUnit.SECONDS)) {
-				log.error("Force shut down");
-				executorService.shutdownNow();
-			}
-			mongoDbBillingClient.getClient().close();
-		} catch (InterruptedException e) {
-			executorService.shutdownNow();
-			mongoDbBillingClient.getClient().close();
-			Thread.currentThread().interrupt();
-		}
-	}
-
-
-	@Slf4j
-	private static class CalculateBilling implements Runnable {
-		private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormat.forPattern
-				("yyyy-MM-dd'T'HH:mm:ss" +
-						".SSS'Z");
-		private static final String SCHEDULER_ID = "azureBillingScheduler";
-		private AzureBillingDetailsService azureBillingDetailsService;
-		private BillingConfigurationAzure billingConfigurationAzure;
-		private MongoDbBillingClient client;
-		private ObjectMapper objectMapper = new ObjectMapper().registerModule(new IsoDateModule());
-
-
-		public CalculateBilling(BillingConfigurationAzure billingConfigurationAzure, MongoDbBillingClient client) {
-			this.billingConfigurationAzure = billingConfigurationAzure;
-			this.client = client;
-			this.azureBillingDetailsService = new AzureBillingDetailsService(client,
-					billingConfigurationAzure.getCurrency());
-		}
-
-		@Override
-		public void run() {
-			try {
-				BillingPeriod billingPeriod = getBillingPeriod();
-				DateTime currentTime = new DateTime().withZone(DateTimeZone.UTC);
-				if (billingPeriod == null) {
-					saveBillingPeriod(initialSchedulerInfo(currentTime));
-				} else {
-					log.info("Billing period from db is {}", billingPeriod);
-
-					if (shouldTriggerJobByTime(currentTime, billingPeriod)) {
-
-						boolean hasNew = run(billingPeriod);
-						if (hasNew) {
-							log.info("Updating billing details");
-							azureBillingDetailsService.updateBillingDetails();
-						}
-
-
-						updateBillingPeriod(billingPeriod, currentTime, hasNew);
-					}
-				}
-			} catch (RuntimeException e) {
-				log.error("Cannot update billing information", e);
-			}
-		}
-
-		private BillingPeriod initialSchedulerInfo(DateTime currentTime) {
-
-			BillingPeriod initialBillingPeriod = new BillingPeriod();
-			initialBillingPeriod.setFrom(currentTime.minusDays(2).toDateMidnight().toDate());
-			initialBillingPeriod.setTo(currentTime.toDateMidnight().toDate());
-
-			log.info("Initial scheduler info {}", initialBillingPeriod);
-
-			return initialBillingPeriod;
-
-		}
-
-		private boolean shouldTriggerJobByTime(DateTime currentTime, BillingPeriod billingPeriod) {
-
-			DateTime dateTimeToFromBillingPeriod = new DateTime(billingPeriod.getTo()).withZone(DateTimeZone.UTC);
-
-			log.info("Comparing current time[{}, {}] and from scheduler info [{}, {}]", currentTime,
-					currentTime.toDateMidnight(),
-					dateTimeToFromBillingPeriod, dateTimeToFromBillingPeriod.toDateMidnight());
-
-			if (currentTime.toDateMidnight().isAfter(dateTimeToFromBillingPeriod.toDateMidnight())
-					|| currentTime.toDateMidnight().isEqual(dateTimeToFromBillingPeriod.toDateMidnight())) {
-				log.info("Should trigger the job by time");
-				return true;
-			}
-
-			log.info("Should not trigger the job by time");
-			return false;
-		}
-
-		private boolean run(BillingPeriod billingPeriod) {
-
-			AzureBillableResourcesService azureBillableResourcesService = new AzureBillableResourcesService(client,
-					billingConfigurationAzure.getSharedStorageAccountTagName(),
-					billingConfigurationAzure.getSsnStorageAccountTagName(),
-					billingConfigurationAzure.getDatalakeTagName());
-			Set<AzureDlabBillableResource> billableResources = azureBillableResourcesService.getBillableResources();
-
-			AzureInvoiceCalculationService azureInvoiceCalculationService
-					= new AzureInvoiceCalculationService(billingConfigurationAzure, billableResources);
-
-			List<AzureDailyResourceInvoice> dailyInvoices = azureInvoiceCalculationService.generateInvoiceData(
-					DATE_TIME_FORMATTER.print(new DateTime(billingPeriod.getFrom()).withZone(DateTimeZone.UTC)),
-					DATE_TIME_FORMATTER.print(new DateTime(billingPeriod.getTo()).withZone(DateTimeZone.UTC)));
-
-
-			if (!dailyInvoices.isEmpty()) {
-
-				client.getDatabase().getCollection(MongoKeyWords.BILLING_DETAILS)
-						.insertMany(dailyInvoices.stream().map(AzureDailyResourceInvoice::to)
-								.collect(Collectors.toList()));
-
-				return true;
-
-			} else {
-				log.warn("Daily invoices is empty for period {}", billingPeriod);
-
-				return false;
-			}
-		}
-
-		private void updateBillingPeriod(BillingPeriod billingPeriod, DateTime currentTime, boolean updates) {
-
-			try {
-				client.getDatabase().getCollection(MongoKeyWords.AZURE_BILLING_SCHEDULER_HISTORY).insertOne(
-						Document.parse(objectMapper.writeValueAsString(billingPeriod)).append("updates", updates));
-				log.debug("History of billing periods is updated with {}",
-						objectMapper.writeValueAsString(billingPeriod));
-			} catch (JsonProcessingException e) {
-				log.error("Cannot update history of billing periods", e);
-
-			}
-
-			billingPeriod.setFrom(billingPeriod.getTo());
-
-			if (new DateTime(billingPeriod.getFrom()).withZone(DateTimeZone.UTC).toDateMidnight()
-					.isEqual(currentTime.toDateMidnight())) {
-
-				log.info("Setting billing to one day later");
-				billingPeriod.setTo(currentTime.plusDays(1).toDateMidnight().toDate());
-
-			} else {
-				billingPeriod.setTo(currentTime.toDateMidnight().toDate());
-			}
-
-			saveBillingPeriod(billingPeriod);
-		}
-
-		private boolean saveBillingPeriod(BillingPeriod billingPeriod) {
-			log.debug("Saving billing period {}", billingPeriod);
-
-			try {
-				UpdateResult updateResult = client.getDatabase().getCollection(MongoKeyWords.AZURE_BILLING_SCHEDULER)
-						.updateMany(Filters.eq(MongoKeyWords.MONGO_ID, SCHEDULER_ID),
-								new BasicDBObject("$set",
-										Document.parse(objectMapper.writeValueAsString(billingPeriod))
-												.append(MongoKeyWords.MONGO_ID, SCHEDULER_ID))
-								, new UpdateOptions().upsert(true)
-						);
-
-				log.debug("Billing period save operation result is {}", updateResult);
-				return true;
-			} catch (JsonProcessingException e) {
-				log.error("Cannot save billing period", e);
-			}
-
-			return false;
-		}
-
-		private BillingPeriod getBillingPeriod() {
-			log.debug("Get billing period");
-
-			try {
-				Document document = client.getDatabase().getCollection(MongoKeyWords.AZURE_BILLING_SCHEDULER)
-						.find(Filters.eq(MongoKeyWords.MONGO_ID, SCHEDULER_ID)).first();
-
-				log.debug("Retrieved billing period document {}", document);
-				if (document != null) {
-					return objectMapper.readValue(document.toJson(), BillingPeriod.class);
-				}
-
-				return null;
-
-			} catch (IOException e) {
-				log.error("Cannot save billing period", e);
-				throw new DlabException("Cannot parse string", e);
-			}
-		}
-	}
-}
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/CalculateBillingService.java
similarity index 80%
copy from integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java
copy to services/billing-azure/src/main/java/com/epam/dlab/billing/azure/CalculateBillingService.java
index 1e49a60..d432337 100644
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/CalculateBillingService.java
@@ -17,7 +17,12 @@
  * under the License.
  */
 
-package com.epam.dlab.automation.docker;
+package com.epam.dlab.billing.azure;
 
-class Labels {
+import com.epam.dlab.dto.billing.BillingData;
+
+import java.util.List;
+
+public interface CalculateBillingService {
+    List<BillingData> getBillingData();
 }
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/CalculateBillingServiceImpl.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/CalculateBillingServiceImpl.java
new file mode 100644
index 0000000..3b3d60b
--- /dev/null
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/CalculateBillingServiceImpl.java
@@ -0,0 +1,245 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.billing.azure;
+
+import com.epam.dlab.MongoKeyWords;
+import com.epam.dlab.billing.azure.config.AzureAuthFile;
+import com.epam.dlab.billing.azure.config.BillingConfigurationAzure;
+import com.epam.dlab.billing.azure.model.AzureDailyResourceInvoice;
+import com.epam.dlab.billing.azure.model.BillingPeriod;
+import com.epam.dlab.dto.billing.BillingData;
+import com.epam.dlab.exceptions.DlabException;
+import com.epam.dlab.util.mongo.modules.IsoDateModule;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.mongodb.BasicDBObject;
+import com.mongodb.client.model.Filters;
+import com.mongodb.client.model.UpdateOptions;
+import com.mongodb.client.result.UpdateResult;
+import lombok.extern.slf4j.Slf4j;
+import org.bson.Document;
+import org.joda.time.DateTime;
+import org.joda.time.DateTimeZone;
+import org.joda.time.format.DateTimeFormat;
+import org.joda.time.format.DateTimeFormatter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.io.IOException;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.time.LocalDate;
+import java.util.Collections;
+import java.util.List;
+import java.util.Optional;
+import java.util.stream.Collectors;
+
+@Slf4j
+@Service
+public class CalculateBillingServiceImpl implements CalculateBillingService {
+    private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z");
+    private static final String SCHEDULER_ID = "azureBillingScheduler";
+    private final BillingConfigurationAzure billingConfigurationAzure;
+    private final MongoDbBillingClient mongoDbBillingClient;
+    private ObjectMapper objectMapper;
+
+    @Autowired
+    public CalculateBillingServiceImpl(BillingConfigurationAzure configuration) throws IOException {
+        billingConfigurationAzure = configuration;
+        objectMapper = new ObjectMapper().registerModule(new IsoDateModule());
+        Path path = Paths.get(billingConfigurationAzure.getAuthenticationFile());
+
+        if (path.toFile().exists()) {
+            log.info("Read and override configs using auth file");
+            try {
+                AzureAuthFile azureAuthFile = new ObjectMapper().readValue(path.toFile(), AzureAuthFile.class);
+                this.billingConfigurationAzure.setClientId(azureAuthFile.getClientId());
+                this.billingConfigurationAzure.setClientSecret(azureAuthFile.getClientSecret());
+                this.billingConfigurationAzure.setTenantId(azureAuthFile.getTenantId());
+                this.billingConfigurationAzure.setSubscriptionId(azureAuthFile.getSubscriptionId());
+            } catch (IOException e) {
+                log.error("Cannot read configuration file", e);
+                throw e;
+            }
+            log.info("Configs from auth file are used");
+        } else {
+            log.info("Configs from yml file are used");
+        }
+
+        this.mongoDbBillingClient = new MongoDbBillingClient
+                (billingConfigurationAzure.getAggregationOutputMongoDataSource().getHost(),
+                        billingConfigurationAzure.getAggregationOutputMongoDataSource().getPort(),
+                        billingConfigurationAzure.getAggregationOutputMongoDataSource().getDatabase(),
+                        billingConfigurationAzure.getAggregationOutputMongoDataSource().getUsername(),
+                        billingConfigurationAzure.getAggregationOutputMongoDataSource().getPassword());
+    }
+
+    @Override
+    public List<BillingData> getBillingData() {
+        try {
+            BillingPeriod billingPeriod = getBillingPeriod();
+            DateTime currentTime = new DateTime().withZone(DateTimeZone.UTC);
+            if (billingPeriod == null) {
+                saveBillingPeriod(initialSchedulerInfo(currentTime));
+            } else {
+                log.info("Billing period from db is {}", billingPeriod);
+
+                if (shouldTriggerJobByTime(currentTime, billingPeriod)) {
+                    List<BillingData> billingData = getBillingData(billingPeriod);
+                    boolean hasNew = !billingData.isEmpty();
+                    updateBillingPeriod(billingPeriod, currentTime, hasNew);
+                    return billingData;
+                }
+            }
+        } catch (RuntimeException e) {
+            log.error("Cannot update billing information", e);
+        }
+        return Collections.emptyList();
+    }
+
+    private BillingPeriod initialSchedulerInfo(DateTime currentTime) {
+
+        BillingPeriod initialBillingPeriod = new BillingPeriod();
+        initialBillingPeriod.setFrom(currentTime.minusDays(2).toDateMidnight().toDate());
+        initialBillingPeriod.setTo(currentTime.toDateMidnight().toDate());
+
+        log.info("Initial scheduler info {}", initialBillingPeriod);
+
+        return initialBillingPeriod;
+
+    }
+
+    private boolean shouldTriggerJobByTime(DateTime currentTime, BillingPeriod billingPeriod) {
+
+        DateTime dateTimeToFromBillingPeriod = new DateTime(billingPeriod.getTo()).withZone(DateTimeZone.UTC);
+
+        log.info("Comparing current time[{}, {}] and from scheduler info [{}, {}]", currentTime,
+                currentTime.toDateMidnight(),
+                dateTimeToFromBillingPeriod, dateTimeToFromBillingPeriod.toDateMidnight());
+
+        if (currentTime.toDateMidnight().isAfter(dateTimeToFromBillingPeriod.toDateMidnight())
+                || currentTime.toDateMidnight().isEqual(dateTimeToFromBillingPeriod.toDateMidnight())) {
+            log.info("Should trigger the job by time");
+            return true;
+        }
+
+        log.info("Should not trigger the job by time");
+        return false;
+    }
+
+    private List<BillingData> getBillingData(BillingPeriod billingPeriod) {
+        AzureInvoiceCalculationService azureInvoiceCalculationService
+                = new AzureInvoiceCalculationService(billingConfigurationAzure);
+
+        List<AzureDailyResourceInvoice> dailyInvoices = azureInvoiceCalculationService.generateInvoiceData(
+                DATE_TIME_FORMATTER.print(new DateTime(billingPeriod.getFrom()).withZone(DateTimeZone.UTC)),
+                DATE_TIME_FORMATTER.print(new DateTime(billingPeriod.getTo()).withZone(DateTimeZone.UTC)));
+
+        if (!dailyInvoices.isEmpty()) {
+            return dailyInvoices
+                    .stream()
+                    .map(this::toBillingData)
+                    .collect(Collectors.toList());
+        } else {
+            log.warn("Daily invoices is empty for period {}", billingPeriod);
+            return Collections.emptyList();
+        }
+    }
+
+    private void updateBillingPeriod(BillingPeriod billingPeriod, DateTime currentTime, boolean updates) {
+
+        try {
+            mongoDbBillingClient.getDatabase().getCollection(MongoKeyWords.AZURE_BILLING_SCHEDULER_HISTORY).insertOne(
+                    Document.parse(objectMapper.writeValueAsString(billingPeriod)).append("updates", updates));
+            log.debug("History of billing periods is updated with {}",
+                    objectMapper.writeValueAsString(billingPeriod));
+        } catch (JsonProcessingException e) {
+            log.error("Cannot update history of billing periods", e);
+
+        }
+
+        billingPeriod.setFrom(billingPeriod.getTo());
+
+        if (new DateTime(billingPeriod.getFrom()).withZone(DateTimeZone.UTC).toDateMidnight()
+                .isEqual(currentTime.toDateMidnight())) {
+
+            log.info("Setting billing to one day later");
+            billingPeriod.setTo(currentTime.plusDays(1).toDateMidnight().toDate());
+
+        } else {
+            billingPeriod.setTo(currentTime.toDateMidnight().toDate());
+        }
+
+        saveBillingPeriod(billingPeriod);
+    }
+
+    private boolean saveBillingPeriod(BillingPeriod billingPeriod) {
+        log.debug("Saving billing period {}", billingPeriod);
+
+        try {
+            UpdateResult updateResult = mongoDbBillingClient.getDatabase().getCollection(MongoKeyWords.AZURE_BILLING_SCHEDULER)
+                    .updateMany(Filters.eq(MongoKeyWords.MONGO_ID, SCHEDULER_ID),
+                            new BasicDBObject("$set",
+                                    Document.parse(objectMapper.writeValueAsString(billingPeriod))
+                                            .append(MongoKeyWords.MONGO_ID, SCHEDULER_ID))
+                            , new UpdateOptions().upsert(true)
+                    );
+
+            log.debug("Billing period save operation result is {}", updateResult);
+            return true;
+        } catch (JsonProcessingException e) {
+            log.error("Cannot save billing period", e);
+        }
+
+        return false;
+    }
+
+    private BillingPeriod getBillingPeriod() {
+        log.debug("Get billing period");
+
+        try {
+            Document document = mongoDbBillingClient.getDatabase().getCollection(MongoKeyWords.AZURE_BILLING_SCHEDULER)
+                    .find(Filters.eq(MongoKeyWords.MONGO_ID, SCHEDULER_ID)).first();
+
+            log.debug("Retrieved billing period document {}", document);
+            if (document != null) {
+                return objectMapper.readValue(document.toJson(), BillingPeriod.class);
+            }
+
+            return null;
+
+        } catch (IOException e) {
+            log.error("Cannot save billing period", e);
+            throw new DlabException("Cannot parse string", e);
+        }
+    }
+
+    private BillingData toBillingData(AzureDailyResourceInvoice billingData) {
+        return BillingData.builder()
+                .tag(billingData.getDlabId().toLowerCase())
+                .usageDateFrom(Optional.ofNullable(billingData.getUsageStartDate()).map(LocalDate::parse).orElse(null))
+                .usageDateTo(Optional.ofNullable(billingData.getUsageEndDate()).map(LocalDate::parse).orElse(null))
+                .usageDate(billingData.getDay())
+                .product(billingData.getMeterCategory())
+                .cost(billingData.getCost())
+                .currency(billingData.getCurrencyCode())
+                .build();
+    }
+}
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/config/BillingConfigurationAzure.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/config/BillingConfigurationAzure.java
index 4bd69ce..0a28828 100644
--- a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/config/BillingConfigurationAzure.java
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/config/BillingConfigurationAzure.java
@@ -20,9 +20,14 @@
 package com.epam.dlab.billing.azure.config;
 
 import lombok.Data;
+import org.springframework.boot.context.properties.ConfigurationProperties;
+import org.springframework.context.annotation.Configuration;
 
+@Configuration
+@ConfigurationProperties("dlab")
 @Data
 public class BillingConfigurationAzure {
+    private String sbn;
     private long initialDelay;
     private long period;
 
@@ -43,6 +48,5 @@
     private String sharedStorageAccountTagName;
     private String datalakeTagName;
 
-    private LoggingConfigurationFactory logging;
     private AggregationOutputMongoDataSource aggregationOutputMongoDataSource;
 }
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/config/SecurityConfig.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/config/SecurityConfig.java
new file mode 100644
index 0000000..35e341c
--- /dev/null
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/config/SecurityConfig.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.billing.azure.config;
+
+import org.keycloak.adapters.KeycloakConfigResolver;
+import org.keycloak.adapters.springboot.KeycloakSpringBootConfigResolver;
+import org.keycloak.adapters.springsecurity.KeycloakConfiguration;
+import org.keycloak.adapters.springsecurity.authentication.KeycloakAuthenticationProvider;
+import org.keycloak.adapters.springsecurity.config.KeycloakWebSecurityConfigurerAdapter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.core.authority.mapping.SimpleAuthorityMapper;
+import org.springframework.security.core.session.SessionRegistryImpl;
+import org.springframework.security.web.authentication.session.RegisterSessionAuthenticationStrategy;
+import org.springframework.security.web.authentication.session.SessionAuthenticationStrategy;
+
+@KeycloakConfiguration
+class SecurityConfig extends KeycloakWebSecurityConfigurerAdapter {
+
+    @Autowired
+    public void configureGlobal(AuthenticationManagerBuilder auth) {
+        KeycloakAuthenticationProvider keycloakAuthenticationProvider = keycloakAuthenticationProvider();
+        keycloakAuthenticationProvider.setGrantedAuthoritiesMapper(new SimpleAuthorityMapper());
+        auth.authenticationProvider(keycloakAuthenticationProvider);
+    }
+
+    @Bean
+    public KeycloakConfigResolver KeycloakConfigResolver() {
+        return new KeycloakSpringBootConfigResolver();
+    }
+
+    @Bean
+    @Override
+    protected SessionAuthenticationStrategy sessionAuthenticationStrategy() {
+        return new RegisterSessionAuthenticationStrategy(new SessionRegistryImpl());
+    }
+
+    @Override
+    protected void configure(HttpSecurity http) throws Exception {
+        super.configure(http);
+        http
+                .anonymous().disable()
+                .authorizeRequests()
+                .anyRequest()
+                .authenticated();
+    }
+}
\ No newline at end of file
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/controller/BillingController.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/controller/BillingController.java
new file mode 100644
index 0000000..9018791
--- /dev/null
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/controller/BillingController.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.billing.azure.controller;
+
+import com.epam.dlab.billing.azure.CalculateBillingService;
+import com.epam.dlab.dto.billing.BillingData;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.util.List;
+
+@RestController
+public class BillingController {
+
+    private final CalculateBillingService billingService;
+
+    public BillingController(CalculateBillingService billingService) {
+        this.billingService = billingService;
+    }
+
+    @GetMapping
+    public ResponseEntity<List<BillingData>> getBilling() {
+        return new ResponseEntity<>(billingService.getBillingData(), HttpStatus.OK);
+    }
+}
diff --git a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/model/AzureDailyResourceInvoice.java b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/model/AzureDailyResourceInvoice.java
index ff132a2..486ddd5 100644
--- a/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/model/AzureDailyResourceInvoice.java
+++ b/services/billing-azure/src/main/java/com/epam/dlab/billing/azure/model/AzureDailyResourceInvoice.java
@@ -19,35 +19,21 @@
 
 package com.epam.dlab.billing.azure.model;
 
-import com.epam.dlab.billing.DlabResourceType;
 import com.epam.dlab.billing.azure.MongoDocument;
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import lombok.Builder;
 import lombok.Data;
 import lombok.EqualsAndHashCode;
-import lombok.NoArgsConstructor;
 
 @Data
-@NoArgsConstructor
+@Builder
 @EqualsAndHashCode(callSuper = true)
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class AzureDailyResourceInvoice extends MongoDocument<AzureDailyResourceInvoice> {
 	@JsonProperty
 	private String dlabId;
 	@JsonProperty
-	private String user;
-	@JsonProperty
-	private String project;
-	@JsonProperty
-	private String exploratoryId;
-	@JsonProperty
-	private String computationalId;
-	@JsonProperty
-	private DlabResourceType resourceType;
-	@JsonProperty
-	private String resourceName;
-	@JsonProperty
 	private String meterCategory;
 	@JsonProperty
 	private String usageStartDate;
@@ -59,22 +45,4 @@
 	private double cost;
 	@JsonProperty
 	private String currencyCode;
-
-	@Builder
-	public AzureDailyResourceInvoice(AzureDlabBillableResource azureDlabBillableResource) {
-		this.dlabId = azureDlabBillableResource.getId();
-		this.user = azureDlabBillableResource.getUser();
-		this.project = azureDlabBillableResource.getProject();
-		this.resourceType = azureDlabBillableResource.getType();
-		this.resourceName = azureDlabBillableResource.getResourceName();
-
-		if (resourceType == DlabResourceType.EXPLORATORY) {
-			this.exploratoryId = azureDlabBillableResource.getId();
-		} else if (resourceType == DlabResourceType.COMPUTATIONAL) {
-			this.computationalId = azureDlabBillableResource.getId();
-			this.exploratoryId = azureDlabBillableResource.getNotebookId();
-		} else if (resourceType == DlabResourceType.VOLUME) {
-			this.exploratoryId = azureDlabBillableResource.getNotebookId();
-		}
-	}
 }
diff --git a/services/billing-azure/src/main/resources/application.yml b/services/billing-azure/src/main/resources/application.yml
new file mode 100644
index 0000000..482a78d
--- /dev/null
+++ b/services/billing-azure/src/main/resources/application.yml
@@ -0,0 +1,80 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+spring:
+  main:
+    allow-bean-definition-overriding: true
+  data:
+    mongodb:
+      username: admin
+      password: admin
+      database: dlabdb
+      port: 27017
+      host: localhost
+
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
+
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /Users/ofuks/keys/dlabcert/billing.jks
+server.ssl.key-store-password: KEYSTORE_PASSWORD
+server.ssl.key-alias: billing
+
+logging:
+  file: /var/opt/dlab/log/ssn/billing.log
+  level:
+    com:
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: DLAB_bhliva
+  resource: sss
+  credentials.secret: cf5a484b-039b-4161-8707-ad65c0f25962
+  ssl-required: none
+  auth-server-url: http://52.11.45.11:8080/auth
+
+dlab:
+  sbn: <CONF_SERVICE_BASE_NAME>
+  billingEnabled: true
+  clientId: <CLIENT_ID>
+  clientSecret: <CLIENT_SECRET>
+  tenantId: <TENANT_ID>
+  subscriptionId: <SUBSCRIPTION_ID>
+  authenticationFile: <AUTHENTICATION_FILE>
+  # Billing configuration for RateCard API. For more details please see https://msdn.microsoft.com/en-us/library/mt219004.aspx
+  offerNumber: <OFFER_NUMBER>
+  currency: <CURRENCY>
+  locale: <LOCALE>
+  regionInfo: <REGION_INFO>
+  initialDelay: 1
+  period: 60
+  aggregationOutputMongoDataSource:
+    host: localhost
+    port: 27017
+    username: admin
+    password: <MONGODB_PASSWORD>
+    database: dlabdb
+  ssnStorageAccountTagName: <AZURE_SSN_STORAGE_ACCOUNT_TAG>
+  sharedStorageAccountTagName: <AZURE_SHARED_STORAGE_ACCOUNT_TAG>
+  datalakeTagName: <AZURE_DATALAKE_TAG>
\ No newline at end of file
diff --git a/services/billing-gcp/billing.yml b/services/billing-gcp/billing.yml
index 522c7ad..ecdcc45 100644
--- a/services/billing-gcp/billing.yml
+++ b/services/billing-gcp/billing.yml
@@ -1,20 +1,59 @@
-server:
-  port: 8088
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 spring:
+  main:
+    allow-bean-definition-overriding: true
   data:
     mongodb:
       username: admin
-      password: <MONGO_PASSWORD>
+      password: MONGO_PASSWORD
       database: dlabdb
       port: 27017
-      host: localhost
+      host: MONGO_HOST
 dlab:
-  sbn: <CONF_SERVICE_BASE_NAME>
-  bigQueryDataset: <BILLING_DATASET_NAME>
-  cron: 0 0 */1 * * *
+  sbn: SERVICE_BASE_NAME
+  bigQueryDataset: DATASET_NAME
+  cron: 0 0 * * * *
+
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
+
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /home/OS_USER/keys/ssn.keystore.jks
+server.ssl.key-store-password: KEY_STORE_PASSWORD
+server.ssl.key-alias: ssn
 
 logging:
   file: /var/opt/dlab/log/ssn/billing.log
   level:
     com:
-      epam: trace
\ No newline at end of file
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: dlab
+  resource: KEYCLOAK_CLIENT_ID
+  credentials.secret: KEYCLOAK_CLIENT_SECRET
+  ssl-required: none
+  auth-server-url: KEYCLOAK_AUTH_SERVER_URL
\ No newline at end of file
diff --git a/services/billing-gcp/pom.xml b/services/billing-gcp/pom.xml
index 114b25d..43dff3b 100644
--- a/services/billing-gcp/pom.xml
+++ b/services/billing-gcp/pom.xml
@@ -39,6 +39,13 @@
                 <type>pom</type>
                 <scope>import</scope>
             </dependency>
+            <dependency>
+                <groupId>org.keycloak.bom</groupId>
+                <artifactId>keycloak-adapter-bom</artifactId>
+                <version>4.8.3.Final</version>
+                <type>pom</type>
+                <scope>import</scope>
+            </dependency>
         </dependencies>
     </dependencyManagement>
 
@@ -57,6 +64,19 @@
             <artifactId>spring-boot-starter-web</artifactId>
         </dependency>
         <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-security</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.keycloak</groupId>
+            <artifactId>keycloak-spring-boot-starter</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.keycloak</groupId>
+            <artifactId>keycloak-spring-security-adapter</artifactId>
+            <version>4.8.3.Final</version>
+        </dependency>
+        <dependency>
             <groupId>org.springframework</groupId>
             <artifactId>spring-test</artifactId>
             <scope>test</scope>
@@ -73,6 +93,16 @@
             <version>${org.mockito.version}</version>
             <scope>test</scope>
         </dependency>
+        <dependency>
+            <groupId>com.epam.dlab</groupId>
+            <artifactId>dlab-model</artifactId>
+            <version>${project.parent.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+            <version>3.7</version>
+        </dependency>
     </dependencies>
 
     <build>
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/BillingGcpApplication.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/BillingGcpApplication.java
index 0e31323..c454038 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/BillingGcpApplication.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/BillingGcpApplication.java
@@ -23,10 +23,8 @@
 import org.springframework.boot.autoconfigure.SpringBootApplication;
 import org.springframework.boot.context.properties.EnableConfigurationProperties;
 import org.springframework.data.mongodb.repository.config.EnableMongoRepositories;
-import org.springframework.scheduling.annotation.EnableScheduling;
 
 @SpringBootApplication
-@EnableScheduling
 @EnableMongoRepositories
 @EnableConfigurationProperties
 public class BillingGcpApplication {
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/conf/BillingApplicationConfiguration.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/conf/BillingApplicationConfiguration.java
index f565c6f..79c1d9e 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/conf/BillingApplicationConfiguration.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/conf/BillingApplicationConfiguration.java
@@ -27,10 +27,8 @@
 @Configuration
 public class BillingApplicationConfiguration {
 
-
     @Bean
     public BigQuery bigQueryService() {
         return BigQueryOptions.getDefaultInstance().getService();
     }
-
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/conf/SecurityConfig.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/conf/SecurityConfig.java
new file mode 100644
index 0000000..ad960b0
--- /dev/null
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/conf/SecurityConfig.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.billing.gcp.conf;
+
+import org.keycloak.adapters.KeycloakConfigResolver;
+import org.keycloak.adapters.springboot.KeycloakSpringBootConfigResolver;
+import org.keycloak.adapters.springsecurity.KeycloakConfiguration;
+import org.keycloak.adapters.springsecurity.authentication.KeycloakAuthenticationProvider;
+import org.keycloak.adapters.springsecurity.config.KeycloakWebSecurityConfigurerAdapter;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.context.annotation.Bean;
+import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
+import org.springframework.security.config.annotation.web.builders.HttpSecurity;
+import org.springframework.security.core.authority.mapping.SimpleAuthorityMapper;
+import org.springframework.security.core.session.SessionRegistryImpl;
+import org.springframework.security.web.authentication.session.RegisterSessionAuthenticationStrategy;
+import org.springframework.security.web.authentication.session.SessionAuthenticationStrategy;
+
+@KeycloakConfiguration
+class SecurityConfig extends KeycloakWebSecurityConfigurerAdapter {
+
+    @Autowired
+    public void configureGlobal(AuthenticationManagerBuilder auth) {
+        KeycloakAuthenticationProvider keycloakAuthenticationProvider = keycloakAuthenticationProvider();
+        keycloakAuthenticationProvider.setGrantedAuthoritiesMapper(new SimpleAuthorityMapper());
+        auth.authenticationProvider(keycloakAuthenticationProvider);
+    }
+
+    @Bean
+    public KeycloakConfigResolver KeycloakConfigResolver() {
+        return new KeycloakSpringBootConfigResolver();
+    }
+
+    @Bean
+    @Override
+    protected SessionAuthenticationStrategy sessionAuthenticationStrategy() {
+        return new RegisterSessionAuthenticationStrategy(new SessionRegistryImpl());
+    }
+
+    @Override
+    protected void configure(HttpSecurity http) throws Exception {
+        super.configure(http);
+        http
+                .anonymous().disable()
+                .authorizeRequests()
+                .anyRequest()
+                .authenticated();
+    }
+}
\ No newline at end of file
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/controller/BillingController.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/controller/BillingController.java
new file mode 100644
index 0000000..ea45d89
--- /dev/null
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/controller/BillingController.java
@@ -0,0 +1,44 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.billing.gcp.controller;
+
+import com.epam.dlab.billing.gcp.service.BillingService;
+import com.epam.dlab.dto.billing.BillingData;
+import org.springframework.http.HttpStatus;
+import org.springframework.http.ResponseEntity;
+import org.springframework.web.bind.annotation.GetMapping;
+import org.springframework.web.bind.annotation.RestController;
+
+import java.util.List;
+
+@RestController
+public class BillingController {
+
+    private final BillingService billingService;
+
+    public BillingController(BillingService billingService) {
+        this.billingService = billingService;
+    }
+
+    @GetMapping
+    public ResponseEntity<List<BillingData>> getBilling() {
+        return new ResponseEntity<>(billingService.getBillingData(), HttpStatus.OK);
+    }
+}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/BillingDAO.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/BillingDAO.java
index 7e6b0b7..7c791df 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/BillingDAO.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/BillingDAO.java
@@ -19,11 +19,10 @@
 
 package com.epam.dlab.billing.gcp.dao;
 
-import com.epam.dlab.billing.gcp.model.GcpBillingData;
+import com.epam.dlab.dto.billing.BillingData;
 
 import java.util.List;
 
 public interface BillingDAO {
-
-    List<GcpBillingData> getBillingData() throws InterruptedException;
+    List<BillingData> getBillingData() throws InterruptedException;
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/impl/BigQueryBillingDAO.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/impl/BigQueryBillingDAO.java
index b0ece02..061283d 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/impl/BigQueryBillingDAO.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/dao/impl/BigQueryBillingDAO.java
@@ -22,16 +22,23 @@
 import com.epam.dlab.billing.gcp.conf.DlabConfiguration;
 import com.epam.dlab.billing.gcp.dao.BillingDAO;
 import com.epam.dlab.billing.gcp.model.BillingHistory;
-import com.epam.dlab.billing.gcp.model.GcpBillingData;
 import com.epam.dlab.billing.gcp.repository.BillingHistoryRepository;
-import com.google.cloud.bigquery.*;
+import com.epam.dlab.dto.billing.BillingData;
+import com.google.cloud.bigquery.BigQuery;
+import com.google.cloud.bigquery.FieldValueList;
+import com.google.cloud.bigquery.QueryJobConfiguration;
+import com.google.cloud.bigquery.QueryParameterValue;
+import com.google.cloud.bigquery.Table;
+import com.google.cloud.bigquery.TableInfo;
 import lombok.extern.slf4j.Slf4j;
 import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.data.mongodb.core.MongoTemplate;
 import org.springframework.stereotype.Component;
 
 import java.time.Instant;
 import java.time.LocalDate;
 import java.time.ZoneId;
+import java.time.format.DateTimeFormatter;
 import java.util.List;
 import java.util.Map;
 import java.util.stream.Collectors;
@@ -41,10 +48,14 @@
 @Component
 @Slf4j
 public class BigQueryBillingDAO implements BillingDAO {
-
+	private static final String DATE_FORMAT = "yyyy-MM-dd";
 	private static final String SBN_PARAM = "sbn";
 	private static final String DATASET_PARAM = "dataset";
+
 	private final BillingHistoryRepository billingHistoryRepo;
+	private final MongoTemplate mongoTemplate;
+	private final BigQuery service;
+	private final String dataset;
 	private final String sbn;
 
 	private static final String GET_BILLING_DATA_QUERY = "SELECT b.sku.description usageType," +
@@ -55,19 +66,19 @@
 			"CROSS JOIN UNNEST(b.labels) as label\n" +
 			"where label.key = 'name' and cost != 0 and label.value like @sbn\n" +
 			"group by usageType, usage_date_from, usage_date_to, product, value, currency";
-	private final BigQuery service;
-	private final String dataset;
 
 	@Autowired
-	public BigQueryBillingDAO(DlabConfiguration conf, BigQuery service, BillingHistoryRepository billingHistoryRepo) {
+	public BigQueryBillingDAO(DlabConfiguration conf, BillingHistoryRepository billingHistoryRepo,
+							  BigQuery service, MongoTemplate mongoTemplate) {
 		dataset = conf.getBigQueryDataset();
-		sbn = conf.getSbn();
 		this.service = service;
 		this.billingHistoryRepo = billingHistoryRepo;
+		this.mongoTemplate = mongoTemplate;
+		sbn = conf.getSbn();
 	}
 
 	@Override
-	public List<GcpBillingData> getBillingData() {
+	public List<BillingData> getBillingData() {
 		final Map<String, Long> processedBillingTables = billingHistoryRepo.findAll()
 				.stream()
 				.collect(Collectors.toMap(BillingHistory::getTableName, BillingHistory::getLastModified));
@@ -82,7 +93,7 @@
 				.collect(Collectors.toList());
 	}
 
-	private Stream<? extends GcpBillingData> bigQueryResultSetStream(Table table) {
+	private Stream<? extends BillingData> bigQueryResultSetStream(Table table) {
 		try {
 			final String tableName = table.getTableId().getTable();
 			final String tableId = table.getTableId().getDataset() + "." + tableName;
@@ -91,9 +102,9 @@
 					.addNamedParameter(SBN_PARAM, QueryParameterValue.string(sbn + "%"))
 					.addNamedParameter(DATASET_PARAM, QueryParameterValue.string(tableId))
 					.build();
-			final Stream<GcpBillingData> gcpBillingDataStream =
+			final Stream<BillingData> gcpBillingDataStream =
 					StreamSupport.stream(service.query(queryConfig).getValues().spliterator(), false)
-							.map(this::toBillingData);
+							.map(this::toGcpBillingData);
 			billingHistoryRepo.save(new BillingHistory(tableName, table.getLastModifiedTime()));
 			return gcpBillingDataStream;
 		} catch (InterruptedException e) {
@@ -101,16 +112,17 @@
 		}
 	}
 
-	private GcpBillingData toBillingData(FieldValueList fields) {
-
-		return GcpBillingData.builder()
+	private BillingData toGcpBillingData(FieldValueList fields) {
+		return BillingData.builder()
 				.usageDateFrom(toLocalDate(fields, "usage_date_from"))
 				.usageDateTo(toLocalDate(fields, "usage_date_to"))
-				.cost(fields.get("cost").getNumericValue())
+				.cost(fields.get("cost").getNumericValue().doubleValue())
 				.product(fields.get("product").getStringValue())
 				.usageType(fields.get("usageType").getStringValue())
 				.currency(fields.get("currency").getStringValue())
-				.tag(fields.get("value").getStringValue()).build();
+				.tag(fields.get("value").getStringValue().toLowerCase())
+				.usageDate(toLocalDate(fields, "usage_date_from").format((DateTimeFormatter.ofPattern(DATE_FORMAT))))
+				.build();
 	}
 
 	private LocalDate toLocalDate(FieldValueList fieldValues, String timestampFieldName) {
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/UserInstance.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/UserInstance.java
deleted file mode 100644
index b5a61ba..0000000
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/UserInstance.java
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.gcp.documents;
-
-import com.epam.dlab.billing.gcp.model.BillingData;
-import lombok.Data;
-import org.springframework.data.annotation.Id;
-import org.springframework.data.mongodb.core.mapping.Document;
-import org.springframework.data.mongodb.core.mapping.Field;
-
-import java.util.List;
-
-@Document(collection = "userInstances")
-@Data
-public class UserInstance {
-
-    @Id
-    private String id;
-    @Field("user")
-    private String user;
-    @Field("exploratory_name")
-    private String exploratoryName;
-    @Field("exploratory_id")
-    private String exploratoryId;
-    private String project;
-    private List<BillingData> billing;
-    private String cost;
-    @Field("computational_resources")
-    private List<ComputationalResource> computationalResources;
-
-    @Data
-    public class ComputationalResource {
-        @Field("computational_name")
-        private String computationalName;
-        @Field("computational_id")
-        private String computationalId;
-    }
-}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/BillingHistory.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/BillingHistory.java
index 6abb2d9..a232ecc 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/BillingHistory.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/BillingHistory.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.billing.gcp.model;
 
 import lombok.AllArgsConstructor;
@@ -7,7 +26,7 @@
 @Data
 @AllArgsConstructor
 public class BillingHistory {
-	@Id
-	private String tableName;
-	private final long lastModified;
+    @Id
+    private String tableName;
+    private final long lastModified;
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/GcpBillingData.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/GcpBillingData.java
index a11dcce..a2bd12b 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/GcpBillingData.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/GcpBillingData.java
@@ -21,18 +21,24 @@
 
 import lombok.Builder;
 import lombok.Data;
+import org.springframework.data.mongodb.core.mapping.Document;
+import org.springframework.data.mongodb.core.mapping.Field;
 
-import java.math.BigDecimal;
 import java.time.LocalDate;
 
 @Data
 @Builder
+@Document(collection = "billing")
 public class GcpBillingData {
+    @Field("from")
     private final LocalDate usageDateFrom;
+    @Field("to")
     private final LocalDate usageDateTo;
     private final String product;
     private final String usageType;
-    private final BigDecimal cost;
+    private final Double cost;
     private final String currency;
+    @Field("dlabId")
     private final String tag;
+    private final String usageDate;
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingHistoryRepository.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingHistoryRepository.java
index c375904..957ced7 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingHistoryRepository.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingHistoryRepository.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.billing.gcp.repository;
 
 import com.epam.dlab.billing.gcp.model.BillingHistory;
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingRepository.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingRepository.java
index 9dbfe98..2d4c5c1 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingRepository.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/BillingRepository.java
@@ -19,9 +19,9 @@
 
 package com.epam.dlab.billing.gcp.repository;
 
-import com.epam.dlab.billing.gcp.model.BillingData;
+import com.epam.dlab.billing.gcp.model.GcpBillingData;
 import org.springframework.data.mongodb.repository.MongoRepository;
 
-public interface BillingRepository extends MongoRepository<BillingData, String> {
+public interface BillingRepository extends MongoRepository<GcpBillingData, String> {
 	void deleteByUsageDateRegex(String usageDateRegex);
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/ProjectRepository.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/ProjectRepository.java
deleted file mode 100644
index 955c6a2..0000000
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/ProjectRepository.java
+++ /dev/null
@@ -1,26 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.gcp.repository;
-
-import com.epam.dlab.billing.gcp.documents.Project;
-import org.springframework.data.mongodb.repository.MongoRepository;
-
-public interface ProjectRepository extends MongoRepository<Project, String> {
-}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/UserInstanceRepository.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/UserInstanceRepository.java
deleted file mode 100644
index a95d033..0000000
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/repository/UserInstanceRepository.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.gcp.repository;
-
-import com.epam.dlab.billing.gcp.documents.UserInstance;
-import org.springframework.data.mongodb.repository.MongoRepository;
-
-import java.util.Optional;
-
-public interface UserInstanceRepository extends MongoRepository<UserInstance, String> {
-
-    Optional<UserInstance> findByUserAndExploratoryName(String user, String exploratoryName);
-}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/scheduler/BillingScheduler.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/scheduler/BillingScheduler.java
deleted file mode 100644
index 9724d43..0000000
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/scheduler/BillingScheduler.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.gcp.scheduler;
-
-import com.epam.dlab.billing.gcp.service.BillingService;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.scheduling.annotation.Scheduled;
-import org.springframework.stereotype.Component;
-
-@Component
-public class BillingScheduler {
-
-	private final BillingService billingService;
-
-	@Autowired
-	public BillingScheduler(BillingService billingService) {
-		this.billingService = billingService;
-	}
-
-
-	@Scheduled(cron = "${dlab.cron}")
-	public void getBillingReport() {
-		billingService.updateBillingData();
-	}
-}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/BillingService.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/BillingService.java
index 71015aa..7bb3246 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/BillingService.java
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/BillingService.java
@@ -19,7 +19,10 @@
 
 package com.epam.dlab.billing.gcp.service;
 
-public interface BillingService {
+import com.epam.dlab.dto.billing.BillingData;
 
-    void updateBillingData();
+import java.util.List;
+
+public interface BillingService {
+    List<BillingData> getBillingData();
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/BillingServiceImpl.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/BillingServiceImpl.java
deleted file mode 100644
index 2a20206..0000000
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/BillingServiceImpl.java
+++ /dev/null
@@ -1,186 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.gcp.service;
-
-import com.epam.dlab.billing.gcp.dao.BillingDAO;
-import com.epam.dlab.billing.gcp.documents.Project;
-import com.epam.dlab.billing.gcp.documents.UserInstance;
-import com.epam.dlab.billing.gcp.model.BillingData;
-import com.epam.dlab.billing.gcp.model.GcpBillingData;
-import com.epam.dlab.billing.gcp.repository.BillingRepository;
-import com.epam.dlab.billing.gcp.repository.ProjectRepository;
-import com.epam.dlab.billing.gcp.repository.UserInstanceRepository;
-import com.epam.dlab.billing.gcp.util.BillingUtils;
-import lombok.extern.slf4j.Slf4j;
-import org.springframework.beans.factory.annotation.Autowired;
-import org.springframework.beans.factory.annotation.Value;
-import org.springframework.data.mongodb.core.MongoTemplate;
-import org.springframework.data.mongodb.core.query.Query;
-import org.springframework.data.mongodb.core.query.Update;
-import org.springframework.stereotype.Service;
-
-import java.math.BigDecimal;
-import java.time.format.DateTimeFormatter;
-import java.util.List;
-import java.util.Map;
-import java.util.Objects;
-import java.util.stream.Collector;
-import java.util.stream.Collectors;
-import java.util.stream.Stream;
-
-import static com.epam.dlab.billing.gcp.util.BillingUtils.edgeBillingDataStream;
-import static org.springframework.data.mongodb.core.query.Criteria.where;
-
-@Service
-@Slf4j
-public class BillingServiceImpl implements BillingService {
-
-	private static final String DATE_FORMAT = "yyyy-MM-dd";
-	private static final String USAGE_DATE_FORMAT = "yyyy-MM";
-	private final BillingDAO billingDAO;
-	private final ProjectRepository projectRepository;
-	private final UserInstanceRepository userInstanceRepository;
-	private final BillingRepository billingRepository;
-	private final MongoTemplate mongoTemplate;
-	@Value("${dlab.sbn}")
-	private String sbn;
-
-	@Autowired
-	public BillingServiceImpl(BillingDAO billingDAO, ProjectRepository projectRepository,
-							  UserInstanceRepository userInstanceRepository, BillingRepository billingRepository,
-							  MongoTemplate mongoTemplate) {
-		this.billingDAO = billingDAO;
-		this.projectRepository = projectRepository;
-		this.userInstanceRepository = userInstanceRepository;
-		this.billingRepository = billingRepository;
-		this.mongoTemplate = mongoTemplate;
-	}
-
-	@Override
-	public void updateBillingData() {
-		try {
-
-			final Stream<BillingData> ssnBillingDataStream = BillingUtils.ssnBillingDataStream(sbn);
-			final Stream<BillingData> billableUserInstances = userInstanceRepository.findAll()
-					.stream()
-					.filter(userInstance -> userInstance.getExploratoryId() != null)
-					.flatMap(BillingUtils::exploratoryBillingDataStream);
-
-			final Stream<BillingData> billableEdges = projectRepository.findAll()
-					.stream()
-					.collect(Collectors.toMap(Project::getName, Project::getEndpoints))
-					.entrySet()
-					.stream()
-					.flatMap(e -> projectEdges(e.getKey(), e.getValue()));
-
-
-			final Map<String, BillingData> billableResources = Stream.of(billableUserInstances, billableEdges,
-					ssnBillingDataStream)
-					.flatMap(s -> s)
-					.filter(bd -> bd.getDlabId() != null)
-					.collect(Collectors.toMap(BillingData::getDlabId, b -> b));
-			log.info("Billable resources are: {}", billableResources);
-			final Map<String, List<BillingData>> billingDataMap = billingDAO.getBillingData()
-					.stream()
-					.map(bd -> toBillingData(bd, getOrDefault(billableResources, bd.getTag())))
-					.collect(Collectors.groupingBy(bd -> bd.getUsageDate().substring(0,
-							USAGE_DATE_FORMAT.length())));
-
-			billingDataMap.forEach((usageDate, billingDataList) -> {
-				log.info("Updating billing information for month {}", usageDate);
-				billingRepository.deleteByUsageDateRegex("^" + usageDate);
-				billingRepository.insert(billingDataList);
-				updateExploratoryCost(billingDataList);
-			});
-
-			log.info("Finished updating billing data");
-
-
-		} catch (Exception e) {
-			log.error("Can not update billing due to: {}", e.getMessage(), e);
-		}
-	}
-
-	private Stream<BillingData> projectEdges(String projectName, List<Project.Endpoint> endpoints) {
-		return endpoints
-				.stream()
-				.flatMap(endpoint -> edgeBillingDataStream(projectName, sbn, endpoint.getName()));
-	}
-
-	private BillingData getOrDefault(Map<String, BillingData> billableResources, String tag) {
-		return billableResources.getOrDefault(tag, BillingData.builder().dlabId(tag).build());
-	}
-
-	private void updateExploratoryCost(List<BillingData> billingDataList) {
-		billingDataList.stream()
-				.filter(this::userAndExploratoryNamePresent)
-				.collect(groupByUserNameExploratoryNameCollector())
-				.forEach(this::updateUserExploratoryBillingData);
-	}
-
-	private void updateUserExploratoryBillingData(String user,
-												  Map<String, List<BillingData>> billableExploratoriesMap) {
-		billableExploratoriesMap.forEach((exploratoryName, billingInfoList) ->
-				updateExploratoryBillingData(user, exploratoryName, billingInfoList)
-		);
-	}
-
-	private Collector<BillingData, ?, Map<String, Map<String, List<BillingData>>>> groupByUserNameExploratoryNameCollector() {
-		return Collectors.groupingBy(BillingData::getUser, Collectors.groupingBy(BillingData::getExploratoryName));
-	}
-
-	private boolean userAndExploratoryNamePresent(BillingData bd) {
-		return Objects.nonNull(bd.getUser()) && Objects.nonNull(bd.getExploratoryName());
-	}
-
-	private void updateExploratoryBillingData(String user, String exploratoryName, List<BillingData> billingInfoList) {
-		userInstanceRepository.findByUserAndExploratoryName(user, exploratoryName).ifPresent(userInstance ->
-				mongoTemplate.updateFirst(Query.query(where("user").is(user).and("exploratory_name").is(exploratoryName)),
-						Update.update("cost", getTotalCost(billingInfoList) + "$").set("billing", billingInfoList),
-						UserInstance.class));
-	}
-
-	private double getTotalCost(List<BillingData> billingInfoList) {
-		return new BigDecimal(billingInfoList.stream().mapToDouble(BillingData::getCost).sum())
-				.setScale(2, BigDecimal.ROUND_HALF_UP)
-				.doubleValue();
-
-	}
-
-	private BillingData toBillingData(GcpBillingData bd, BillingData billableResource) {
-
-		return BillingData.builder()
-				.displayName(billableResource.getDisplayName())
-				.cost(bd.getCost().setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue())
-				.currency(bd.getCurrency())
-				.product(bd.getProduct())
-				.project(billableResource.getProject())
-				.usageDateTo(bd.getUsageDateTo())
-				.usageDateFrom(bd.getUsageDateFrom())
-				.usageDate(bd.getUsageDateFrom().format((DateTimeFormatter.ofPattern(DATE_FORMAT))))
-				.usageType(bd.getUsageType())
-				.user(billableResource.getUser())
-				.exploratoryName(billableResource.getExploratoryName())
-				.computationalName(billableResource.getComputationalName())
-				.dlabId(bd.getTag())
-				.resourceType(billableResource.getResourceType())
-				.build();
-	}
-}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/impl/BillingServiceImpl.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/impl/BillingServiceImpl.java
new file mode 100644
index 0000000..5661dfb
--- /dev/null
+++ b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/service/impl/BillingServiceImpl.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.billing.gcp.service.impl;
+
+import com.epam.dlab.billing.gcp.dao.BillingDAO;
+import com.epam.dlab.billing.gcp.service.BillingService;
+import com.epam.dlab.dto.billing.BillingData;
+import lombok.extern.slf4j.Slf4j;
+import org.springframework.beans.factory.annotation.Autowired;
+import org.springframework.stereotype.Service;
+
+import java.util.Collections;
+import java.util.List;
+
+@Service
+@Slf4j
+public class BillingServiceImpl implements BillingService {
+
+	private final BillingDAO billingDAO;
+
+	@Autowired
+	public BillingServiceImpl(BillingDAO billingDAO) {
+		this.billingDAO = billingDAO;
+	}
+
+	@Override
+	public List<BillingData> getBillingData() {
+		try {
+			return billingDAO.getBillingData();
+		} catch (Exception e) {
+			log.error("Can not update billing due to: {}", e.getMessage(), e);
+			return Collections.emptyList();
+		}
+	}
+}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/util/BillingUtils.java b/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/util/BillingUtils.java
deleted file mode 100644
index 2f26f10..0000000
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/util/BillingUtils.java
+++ /dev/null
@@ -1,95 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.billing.gcp.util;
-
-import com.epam.dlab.billing.gcp.documents.UserInstance;
-import com.epam.dlab.billing.gcp.model.BillingData;
-
-import java.util.stream.Stream;
-
-public class BillingUtils {
-
-	private static final String EDGE_FORMAT = "%s-%s-%s-edge";
-	private static final String EDGE_VOLUME_FORMAT = "%s-%s-%s-edge-volume-primary";
-	private static final String EDGE_BUCKET_FORMAT = "%s-%s-bucket";
-	private static final String VOLUME_PRIMARY_FORMAT = "%s-volume-primary";
-	private static final String VOLUME_SECONDARY_FORMAT = "%s-volume-secondary";
-	private static final String VOLUME_PRIMARY = "Volume primary";
-	private static final String VOLUME_SECONDARY = "Volume secondary";
-	private static final String SHARED_RESOURCE = "Shared resource";
-
-	public static Stream<BillingData> edgeBillingDataStream(String project, String sbn, String endpoint) {
-		final String userEdgeId = String.format(EDGE_FORMAT, sbn, project.toLowerCase(), endpoint);
-		final String edgeVolumeId = String.format(EDGE_VOLUME_FORMAT, sbn, project.toLowerCase(), endpoint);
-		final String edgeBucketId = String.format(EDGE_BUCKET_FORMAT, sbn, project.toLowerCase());
-		return Stream.of(
-				BillingData.builder().displayName("EDGE node").user(SHARED_RESOURCE).project(project).dlabId(userEdgeId).resourceType(BillingData.ResourceType.EDGE).build(),
-				BillingData.builder().displayName("EDGE volume").user(SHARED_RESOURCE).project(project).dlabId(edgeVolumeId).resourceType(BillingData.ResourceType.VOLUME).build(),
-				BillingData.builder().displayName("EDGE bucket").user(SHARED_RESOURCE).project(project).dlabId(edgeBucketId).resourceType(BillingData.ResourceType.EDGE_BUCKET).build()
-		);
-	}
-
-	public static Stream<BillingData> ssnBillingDataStream(String sbn) {
-		final String ssnId = sbn + "-ssn";
-		final String bucketName = sbn.replaceAll("_", "-");
-		return Stream.of(
-				BillingData.builder().user(SHARED_RESOURCE).displayName("SSN").dlabId(ssnId).resourceType(BillingData.ResourceType.SSN).build(),
-				BillingData.builder().user(SHARED_RESOURCE).displayName("SSN Volume").dlabId(String.format(VOLUME_PRIMARY_FORMAT, ssnId)).resourceType(BillingData.ResourceType.VOLUME).build(),
-				BillingData.builder().user(SHARED_RESOURCE).displayName("SSN bucket").dlabId(bucketName + "-ssn" +
-						"-bucket").resourceType(BillingData.ResourceType.SSN_BUCKET).build(),
-				BillingData.builder().user(SHARED_RESOURCE).displayName("Collaboration bucket").dlabId(bucketName +
-						"-shared-bucket").resourceType(BillingData.ResourceType.SHARED_BUCKET).build()
-		);
-	}
-
-	public static Stream<BillingData> exploratoryBillingDataStream(UserInstance userInstance) {
-		final Stream<BillingData> computationalStream = userInstance.getComputationalResources()
-				.stream()
-				.filter(cr -> cr.getComputationalId() != null)
-				.flatMap(cr -> Stream.of(computationalBillableResource(userInstance, cr),
-						withExploratoryName(userInstance).displayName(cr.getComputationalName() + ":" + VOLUME_PRIMARY).dlabId(String.format(VOLUME_PRIMARY_FORMAT, cr.getComputationalId()))
-								.resourceType(BillingData.ResourceType.VOLUME).computationalName(cr.getComputationalName()).build()));
-		final String exploratoryId = userInstance.getExploratoryId();
-		final String primaryVolumeId = String.format(VOLUME_PRIMARY_FORMAT, exploratoryId);
-		final String secondaryVolumeId = String.format(VOLUME_SECONDARY_FORMAT, exploratoryId);
-		final Stream<BillingData> exploratoryStream = Stream.of(
-				withExploratoryName(userInstance).displayName(userInstance.getExploratoryName()).dlabId(exploratoryId).resourceType(BillingData.ResourceType.EXPLORATORY).build(),
-				withExploratoryName(userInstance).displayName(VOLUME_PRIMARY).dlabId(primaryVolumeId).resourceType(BillingData.ResourceType.VOLUME).build(),
-				withExploratoryName(userInstance).displayName(VOLUME_SECONDARY).dlabId(secondaryVolumeId).resourceType(BillingData.ResourceType.VOLUME).build());
-		return Stream.concat(computationalStream, exploratoryStream);
-	}
-
-	private static BillingData computationalBillableResource(UserInstance userInstance,
-															 UserInstance.ComputationalResource cr) {
-		return withExploratoryName(userInstance)
-				.dlabId(cr.getComputationalId())
-				.displayName(cr.getComputationalName())
-				.resourceType(BillingData.ResourceType.COMPUTATIONAL)
-				.computationalName(cr.getComputationalName())
-				.project(userInstance.getProject())
-				.build();
-	}
-
-	private static BillingData.BillingDataBuilder withExploratoryName(UserInstance userInstance) {
-		return BillingData.builder().user(userInstance.getUser()).exploratoryName(userInstance.getExploratoryName())
-				.project(userInstance.getProject());
-	}
-
-}
diff --git a/services/billing-gcp/src/main/resources/application.yml b/services/billing-gcp/src/main/resources/application.yml
index f1f3ce6..45bab37 100644
--- a/services/billing-gcp/src/main/resources/application.yml
+++ b/services/billing-gcp/src/main/resources/application.yml
@@ -1,18 +1,59 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
 spring:
+  main:
+    allow-bean-definition-overriding: true
   data:
     mongodb:
       username: admin
       password: admin
-      database: <MONGO_PASSWORD>
+      database: dlabdb
       port: 27017
       host: localhost
 dlab:
   sbn: <CONF_SERVICE_BASE_NAME>
   bigQueryDataset: <DATASET_NAME>
-  cron: 0 * * * * *
+  cron: 0 0 * * * *
+
+server:
+  port: 8088
+  servlet:
+    contextPath: /api/billing
+
+server.ssl.key-store-type: JKS
+server.ssl.key-store: /Users/ofuks/keys/dlabcert/billing.jks
+server.ssl.key-store-password: KEYSTORE_PASSWORD
+server.ssl.key-alias: billing
 
 logging:
   file: /var/opt/dlab/log/ssn/billing.log
   level:
     com:
-      epam: trace
\ No newline at end of file
+      epam: trace
+
+keycloak:
+  bearer-only: true
+  realm: DLAB_bhliva
+  resource: sss
+  credentials.secret: cf5a484b-039b-4161-8707-ad65c0f25962
+  ssl-required: none
+  auth-server-url: http://52.11.45.11:8080/auth
\ No newline at end of file
diff --git a/services/billing-gcp/src/test/java/com/epam/dlab/billing/gcp/service/BillingServiceImplTest.java b/services/billing-gcp/src/test/java/com/epam/dlab/billing/gcp/service/BillingServiceImplTest.java
deleted file mode 100644
index e002419..0000000
--- a/services/billing-gcp/src/test/java/com/epam/dlab/billing/gcp/service/BillingServiceImplTest.java
+++ /dev/null
@@ -1,87 +0,0 @@
-package com.epam.dlab.billing.gcp.service;
-
-import com.epam.dlab.billing.gcp.dao.BillingDAO;
-import com.epam.dlab.billing.gcp.documents.UserInstance;
-import com.epam.dlab.billing.gcp.model.GcpBillingData;
-import com.epam.dlab.billing.gcp.repository.BillingRepository;
-import com.epam.dlab.billing.gcp.repository.ProjectRepository;
-import com.epam.dlab.billing.gcp.repository.UserInstanceRepository;
-import org.junit.Before;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
-import org.springframework.data.mongodb.core.MongoTemplate;
-import org.springframework.test.util.ReflectionTestUtils;
-
-import java.math.BigDecimal;
-import java.time.LocalDate;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import static org.mockito.Mockito.*;
-
-
-@RunWith(MockitoJUnitRunner.class)
-public class BillingServiceImplTest {
-    @Mock
-    private BillingDAO billingDAO;
-    @Mock
-    private ProjectRepository projectRepository;
-    @Mock
-    private UserInstanceRepository userInstanceRepository;
-    @Mock
-    private BillingRepository billingRepository;
-    @Mock
-    private MongoTemplate mongoTemplate;
-    @InjectMocks
-    private BillingServiceImpl billingService;
-
-    @Before
-    public void setUp() {
-        ReflectionTestUtils.setField(billingService, "sbn", "CONF_SERVICE_BASE_NAME");
-    }
-
-    @Test
-    public void updateBillingData() throws InterruptedException {
-        when(userInstanceRepository.findAll()).thenReturn(getUserInstances());
-        when(billingDAO.getBillingData()).thenReturn(getBillingData());
-
-        billingService.updateBillingData();
-
-        verify(userInstanceRepository).findAll();
-        verify(userInstanceRepository, times(1)).findAll();
-        verify(billingDAO).getBillingData();
-        verify(billingDAO, times(1)).getBillingData();
-        verify(projectRepository, times(1)).findAll();
-        verify(billingRepository, times(1)).deleteByUsageDateRegex(anyString());
-        verify(billingRepository, times(1)).insert(anyCollection());
-
-        verifyNoMoreInteractions(billingDAO, userInstanceRepository, projectRepository);
-    }
-
-    private List<UserInstance> getUserInstances() {
-        UserInstance userInstance1 = new UserInstance();
-        userInstance1.setComputationalResources(Collections.emptyList());
-
-        UserInstance userInstance2 = new UserInstance();
-        userInstance2.setComputationalResources(Collections.emptyList());
-        userInstance2.setExploratoryId("exploratoryIId");
-
-        return Arrays.asList(userInstance1, userInstance1, userInstance2);
-    }
-
-    private List<GcpBillingData> getBillingData() {
-        return Collections.singletonList(GcpBillingData.builder()
-                .usageDateFrom(LocalDate.MIN)
-                .usageDateTo(LocalDate.MAX)
-                .product("product")
-                .usageType("usageType")
-                .cost(new BigDecimal(1))
-                .currency("USD")
-                .tag("exploratoryId")
-                .build());
-    }
-}
\ No newline at end of file
diff --git a/services/common/src/main/java/com/epam/dlab/billing/DlabResourceType.java b/services/common/src/main/java/com/epam/dlab/billing/DlabResourceType.java
index 54a590e..dfec0dc 100644
--- a/services/common/src/main/java/com/epam/dlab/billing/DlabResourceType.java
+++ b/services/common/src/main/java/com/epam/dlab/billing/DlabResourceType.java
@@ -19,10 +19,6 @@
 
 package com.epam.dlab.billing;
 
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.List;
-
 public enum DlabResourceType {
 	SSN,
 	SSN_BUCKET,
@@ -51,88 +47,6 @@
 		return null;
 	}
 
-	public static String getResourceTypeName(String id) {
-		DlabResourceType resourceTypeId = DlabResourceType.of(id);
-		if (resourceTypeId != null) {
-			switch (resourceTypeId) {
-				case COMPUTATIONAL:
-					return "Cluster";
-				case EXPLORATORY:
-					return "Notebook";
-				case EDGE:
-					return "Edge Node";
-				case VOLUME:
-					return "Volume";
-				case EDGE_BUCKET:
-				case SSN_BUCKET:
-				case COLLABORATION_BUCKET:
-					return "Bucket";
-				case EDGE_CONTAINER:
-				case SSN_CONTAINER:
-				case COLLABORATION_CONTAINER:
-					return "Container";
-				case SSN_STORAGE_ACCOUNT:
-				case EDGE_STORAGE_ACCOUNT:
-				case COLLABORATION_STORAGE_ACCOUNT:
-					return "Storage Account";
-				case SSN:
-					return "SSN";
-				case DATA_LAKE_STORE:
-					return "Data Lake Store Account";
-			}
-		}
-		return id;
-	}
-
-	public static List<String> getResourceTypeIds(List<String> names) {
-		if (names == null || names.isEmpty()) {
-			return Collections.emptyList();
-		}
-
-		List<String> list = new ArrayList<>();
-		names.forEach(e -> {
-			switch (e) {
-				case "Cluster":
-					list.add(DlabResourceType.COMPUTATIONAL.toString());
-					break;
-				case "Notebook":
-					list.add(DlabResourceType.EXPLORATORY.toString());
-					break;
-				case "Edge Node":
-					list.add(DlabResourceType.EDGE.toString());
-					break;
-				case "Bucket":
-					list.add(DlabResourceType.EDGE_BUCKET.toString());
-					list.add(DlabResourceType.SSN_BUCKET.toString());
-					list.add(DlabResourceType.COLLABORATION_BUCKET.toString());
-					break;
-				case "Container":
-					list.add(DlabResourceType.EDGE_CONTAINER.toString());
-					list.add(DlabResourceType.SSN_CONTAINER.toString());
-					list.add(DlabResourceType.COLLABORATION_CONTAINER.toString());
-					break;
-				case "SSN":
-					list.add(DlabResourceType.SSN.toString());
-					break;
-				case "Storage Account":
-					list.add(DlabResourceType.SSN_STORAGE_ACCOUNT.toString());
-					list.add(DlabResourceType.EDGE_STORAGE_ACCOUNT.toString());
-					list.add(DlabResourceType.COLLABORATION_STORAGE_ACCOUNT.toString());
-					break;
-				case "Data Lake Store Account":
-					list.add(DlabResourceType.DATA_LAKE_STORE.toString());
-					break;
-				case "Volume":
-					list.add(DlabResourceType.VOLUME.toString());
-					break;
-				default:
-					list.add(e);
-			}
-		});
-
-		return list;
-	}
-
 	@Override
 	public String toString() {
 		return super.toString().toUpperCase();
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/base/project/ProjectResult.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/base/project/ProjectResult.java
index 11a6db6..0c88022 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/base/project/ProjectResult.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/base/project/ProjectResult.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.dto.base.project;
 
 import com.epam.dlab.dto.StatusBaseDTO;
@@ -9,10 +28,10 @@
 @Data
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class ProjectResult extends StatusBaseDTO<ProjectResult> {
-	private EdgeInfo edgeInfo;
-	@JsonProperty("project_name")
-	private String projectName;
-	@JsonProperty("endpoint_name")
-	private String endpointName;
+    private EdgeInfo edgeInfo;
+    @JsonProperty("project_name")
+    private String projectName;
+    @JsonProperty("endpoint_name")
+    private String endpointName;
 
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/Project.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/billing/BillingData.java
similarity index 61%
copy from services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/Project.java
copy to services/dlab-model/src/main/java/com/epam/dlab/dto/billing/BillingData.java
index 0b40235..c95a02e 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/Project.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/billing/BillingData.java
@@ -17,25 +17,28 @@
  * under the License.
  */
 
-package com.epam.dlab.billing.gcp.documents;
+package com.epam.dlab.dto.billing;
 
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import com.fasterxml.jackson.annotation.JsonProperty;
+import lombok.Builder;
 import lombok.Data;
-import org.springframework.data.mongodb.core.mapping.Document;
 
-import java.util.List;
+import java.time.LocalDate;
 
-@Document(collection = "Projects")
 @Data
-public class Project {
-
-	@JsonProperty("name")
-	private String name;
-	private List<Endpoint> endpoints;
-
-
-	@Data
-	public class Endpoint {
-		private final String name;
-	}
+@Builder
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class BillingData {
+    private final String tag;
+    private String application;
+    @JsonProperty("from")
+    private LocalDate usageDateFrom;
+    @JsonProperty("to")
+    private LocalDate usageDateTo;
+    private String product;
+    private String usageType;
+    private Double cost;
+    private String currency;
+    private final String usageDate;
 }
diff --git a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/billing/BillingResourceType.java
similarity index 81%
copy from integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java
copy to services/dlab-model/src/main/java/com/epam/dlab/dto/billing/BillingResourceType.java
index 1e49a60..7ad1082 100644
--- a/integration-tests/src/main/java/com/epam/dlab/automation/docker/Labels.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/billing/BillingResourceType.java
@@ -17,7 +17,15 @@
  * under the License.
  */
 
-package com.epam.dlab.automation.docker;
+package com.epam.dlab.dto.billing;
 
-class Labels {
+public enum BillingResourceType {
+    EDGE,
+    SSN,
+    ENDPOINT,
+    BUCKET,
+    VOLUME,
+    EXPLORATORY,
+    COMPUTATIONAL,
+    IMAGE
 }
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/UserComputationalResource.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/UserComputationalResource.java
index 678025f..9f8c021 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/UserComputationalResource.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/UserComputationalResource.java
@@ -59,17 +59,21 @@
 	private LocalDateTime lastActivity;
 	@JsonProperty("master_node_shape")
 	private String masterNodeShape;
+	@JsonProperty("slave_node_shape")
+	private String slaveNodeShape;
 	@JsonProperty("dataengine_instance_shape")
 	private String dataengineShape;
+	@JsonProperty("dataengine_instance_count")
+	private int dataengineInstanceCount;
 	@JsonProperty("instance_id")
 	private String instanceId;
 	protected List<ClusterConfig> config;
-	private Map<String,String> tags;
+	private Map<String, String> tags;
 
 	public UserComputationalResource(String computationalName, String computationalId, String imageName,
 									 String templateName, String status, Date uptime, SchedulerJobDTO schedulerData,
 									 boolean reuploadKeyRequired, List<ResourceURL> resourceUrl,
-									 LocalDateTime lastActivity, Map<String,String> tags) {
+									 LocalDateTime lastActivity, Map<String, String> tags) {
 		this.computationalName = computationalName;
 		this.computationalId = computationalId;
 		this.imageName = imageName;
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryImageDTO.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryImageDTO.java
index 0dad8e4..b41f432 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryImageDTO.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryImageDTO.java
@@ -35,6 +35,8 @@
 	private Map<String, String> tags;
 	@JsonProperty("endpoint_name")
 	private String endpoint;
+	@JsonProperty("conf_shared_image_enabled")
+	private String sharedImageEnabled;
 
 	public ExploratoryImageDTO withImageName(String imageName) {
 		this.imageName = imageName;
@@ -51,6 +53,11 @@
 		return this;
 	}
 
+	public ExploratoryImageDTO withSharedImageEnabled(String sharedImageEnabled) {
+		this.sharedImageEnabled = sharedImageEnabled;
+		return this;
+	}
+
 	@Override
 	public MoreObjects.ToStringHelper toStringHelper(Object self) {
 		return super.toStringHelper(self)
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectActionDTO.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectActionDTO.java
index 23039be..93b955e 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectActionDTO.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectActionDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.dto.project;
 
 import com.epam.dlab.dto.ResourceBaseDTO;
@@ -8,8 +27,8 @@
 @Data
 @AllArgsConstructor
 public class ProjectActionDTO extends ResourceBaseDTO<ProjectActionDTO> {
-	@JsonProperty("project_name")
-	private final String name;
-	@JsonProperty("endpoint_name")
-	private final String endpoint;
+    @JsonProperty("project_name")
+    private final String name;
+    @JsonProperty("endpoint_name")
+    private final String endpoint;
 }
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectCreateDTO.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectCreateDTO.java
index c64c505..47b49b2 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectCreateDTO.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/project/ProjectCreateDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.dto.project;
 
 import com.epam.dlab.dto.ResourceBaseDTO;
@@ -8,11 +27,11 @@
 @Data
 @Builder
 public class ProjectCreateDTO extends ResourceBaseDTO<ProjectCreateDTO> {
-	private final String key;
-	@JsonProperty("project_name")
-	private final String name;
-	@JsonProperty("project_tag")
-	private final String tag;
-	@JsonProperty("endpoint_name")
-	private final String endpoint;
+    private final String key;
+    @JsonProperty("project_name")
+    private final String name;
+    @JsonProperty("project_tag")
+    private final String tag;
+    @JsonProperty("endpoint_name")
+    private final String endpoint;
 }
diff --git a/services/dlab-webapp-common/src/main/java/com/epam/dlab/ServiceConfiguration.java b/services/dlab-webapp-common/src/main/java/com/epam/dlab/ServiceConfiguration.java
index 85d4318..3297abd 100644
--- a/services/dlab-webapp-common/src/main/java/com/epam/dlab/ServiceConfiguration.java
+++ b/services/dlab-webapp-common/src/main/java/com/epam/dlab/ServiceConfiguration.java
@@ -54,6 +54,11 @@
 
     @Valid
     @NotNull
+    @JsonProperty(ServiceConsts.BILLING_SERVICE_NAME)
+    private RESTServiceFactory billingFactory = new RESTServiceFactory();
+
+    @Valid
+    @NotNull
     @JsonProperty(ServiceConsts.SECURITY_SERVICE_NAME)
     private RESTServiceFactory securityFactory;
 
@@ -85,6 +90,10 @@
         return provisioningFactory;
     }
 
+    public RESTServiceFactory getBillingFactory() {
+        return billingFactory;
+    }
+
     public RESTServiceFactory getSecurityFactory() {
         return securityFactory;
     }
diff --git a/services/dlab-webapp-common/src/main/java/com/epam/dlab/constants/ServiceConsts.java b/services/dlab-webapp-common/src/main/java/com/epam/dlab/constants/ServiceConsts.java
index d376665..e1bcf23 100644
--- a/services/dlab-webapp-common/src/main/java/com/epam/dlab/constants/ServiceConsts.java
+++ b/services/dlab-webapp-common/src/main/java/com/epam/dlab/constants/ServiceConsts.java
@@ -20,13 +20,14 @@
 package com.epam.dlab.constants;
 
 public final class ServiceConsts {
-	public static final String MONGO_NAME = "mongo";
-	public static final String PROVISIONING_SERVICE_NAME = "provisioningService";
-	public static final String MAVEN_SEARCH_API = "mavenSearchService";
-	public static final String SECURITY_SERVICE_NAME = "securityService";
-	public static final String SELF_SERVICE_NAME = "selfService";
-	public static final String PROVISIONING_USER_AGENT = "provisioning-service";
+    public static final String MONGO_NAME = "mongo";
+    public static final String PROVISIONING_SERVICE_NAME = "provisioningService";
+    public static final String BILLING_SERVICE_NAME = "billingService";
+    public static final String MAVEN_SEARCH_API = "mavenSearchService";
+    public static final String SECURITY_SERVICE_NAME = "securityService";
+    public static final String SELF_SERVICE_NAME = "selfService";
+    public static final String PROVISIONING_USER_AGENT = "provisioning-service";
 
-	private ServiceConsts() {
-	}
+    private ServiceConsts() {
+    }
 }
diff --git a/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/client/RESTService.java b/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/client/RESTService.java
index ab1d29e..5b4dbba 100644
--- a/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/client/RESTService.java
+++ b/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/client/RESTService.java
@@ -19,17 +19,15 @@
 
 package com.epam.dlab.rest.client;
 
-import com.epam.dlab.exceptions.DlabException;
 import lombok.extern.slf4j.Slf4j;
 
-import javax.ws.rs.ProcessingException;
 import javax.ws.rs.client.Client;
 import javax.ws.rs.client.Entity;
 import javax.ws.rs.client.Invocation;
 import javax.ws.rs.client.WebTarget;
+import javax.ws.rs.core.GenericType;
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.MediaType;
-import java.net.ConnectException;
 import java.net.URI;
 import java.util.Collections;
 import java.util.Map;
@@ -71,6 +69,20 @@
 		return builder.get(clazz);
 	}
 
+	public <T> T get(String path, GenericType<T> genericType) {
+		return get(path, null, genericType);
+	}
+
+	public <T> T get(String path, String accessToken, GenericType<T> genericType) {
+		return get(path, accessToken, genericType, Collections.emptyMap());
+	}
+
+	public <T> T get(String path, String accessToken, GenericType<T> genericType, Map<String, Object> queryParams) {
+		Invocation.Builder builder = getBuilder(path, accessToken, queryParams);
+		log.debug("REST get secured {} {}", path, accessToken);
+		return builder.get(genericType);
+	}
+
 	public <T> T post(String path, Object parameter, Class<T> clazz) {
 		return post(path, null, parameter, clazz);
 	}
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/DropwizardBearerTokenFilterImpl.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/DropwizardBearerTokenFilterImpl.java
index 9be4251..fc2659c 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/DropwizardBearerTokenFilterImpl.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/DropwizardBearerTokenFilterImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi;
 
 import org.keycloak.adapters.AdapterDeploymentContext;
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/ProjectCallbackHandler.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/ProjectCallbackHandler.java
index 688edb8..d25a2e1 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/ProjectCallbackHandler.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/ProjectCallbackHandler.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.core.response.handlers;
 
 import com.epam.dlab.backendapi.core.commands.DockerAction;
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java
index 005ad9f..2113e8c 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java
index 65d4b70..1840fbb 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
@@ -6,11 +25,11 @@
 
 public interface ProjectService {
 
-	String create(UserInfo userInfo, ProjectCreateDTO projectCreateDTO);
+    String create(UserInfo userInfo, ProjectCreateDTO projectCreateDTO);
 
-	String terminate(UserInfo userInfo, ProjectActionDTO dto);
+    String terminate(UserInfo userInfo, ProjectActionDTO dto);
 
-	String start(UserInfo userInfo, ProjectActionDTO dto);
+    String start(UserInfo userInfo, ProjectActionDTO dto);
 
-	String stop(UserInfo userInfo, ProjectActionDTO dto);
+    String stop(UserInfo userInfo, ProjectActionDTO dto);
 }
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java
index 1daf93f..229e21c 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service.impl;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/entrypoint.sh b/services/self-service/entrypoint.sh
index f436c05..f2d7149 100644
--- a/services/self-service/entrypoint.sh
+++ b/services/self-service/entrypoint.sh
@@ -1,4 +1,24 @@
 #!/bin/sh
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
 
 checkfile () {
 if [ -s /root/step-certs/ca.crt ]
diff --git a/services/self-service/pom.xml b/services/self-service/pom.xml
index 25dbf42..382dc7c 100644
--- a/services/self-service/pom.xml
+++ b/services/self-service/pom.xml
@@ -184,6 +184,12 @@
             <artifactId>guacamole-common</artifactId>
             <version>1.0.0</version>
         </dependency>
+
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-collections4</artifactId>
+            <version>4.4</version>
+        </dependency>
     </dependencies>
 
     <build>
diff --git a/services/self-service/self-service.yml b/services/self-service/self-service.yml
index 53bd131..df92c25 100644
--- a/services/self-service/self-service.yml
+++ b/services/self-service/self-service.yml
@@ -140,16 +140,19 @@
     cron: "*/20 * * ? * * *"
   checkQuoteScheduler:
     enabled: true
-    cron: "0 0 * ? * * *"
+    cron: "0 2/15 * ? * *"
   checkUserQuoteScheduler:
     enabled: false
     cron: "0 0 * ? * * *"
   checkProjectQuoteScheduler:
     enabled: true
-    cron: "0 * * ? * * *"
+    cron: "0 4/15 * ? * *"
   checkEndpointStatusScheduler:
     enabled: true
-    cron: "0 */15 * ? * *"
+    cron: "0 6/15 * ? * *"
+  billingScheduler:
+    enabled: true
+    cron: "0 0/15 * ? * *"
 
 
 guacamole:
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/Project.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/Project.java
index 5a1e5d4..0ce3414 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/Project.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/Project.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.annotation;
 
 import java.lang.annotation.ElementType;
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestDescription.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/ProjectAdmin.java
similarity index 87%
rename from integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestDescription.java
rename to services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/ProjectAdmin.java
index 5c156b4..2fca3cd 100644
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestDescription.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/ProjectAdmin.java
@@ -17,15 +17,14 @@
  * under the License.
  */
 
-package com.epam.dlab.automation.test.libs;
+package com.epam.dlab.backendapi.annotation;
 
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
+@Target(ElementType.METHOD)
 @Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.TYPE)
-public @interface TestDescription {
-    String value() default "";
+public @interface ProjectAdmin {
 }
diff --git a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestDescription.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/User.java
similarity index 87%
copy from integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestDescription.java
copy to services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/User.java
index 5c156b4..b56dd20 100644
--- a/integration-tests/src/test/java/com/epam/dlab/automation/test/libs/TestDescription.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/annotation/User.java
@@ -17,15 +17,14 @@
  * under the License.
  */
 
-package com.epam.dlab.automation.test.libs;
+package com.epam.dlab.backendapi.annotation;
 
 import java.lang.annotation.ElementType;
 import java.lang.annotation.Retention;
 import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 
+@Target(ElementType.PARAMETER)
 @Retention(RetentionPolicy.RUNTIME)
-@Target(ElementType.TYPE)
-public @interface TestDescription {
-    String value() default "";
+public @interface User {
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/KeycloakAuthenticator.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/KeycloakAuthenticator.java
index 5c24010..2c2e0d1 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/KeycloakAuthenticator.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/KeycloakAuthenticator.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.auth;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/filters/DropwizardBearerTokenFilterImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/filters/DropwizardBearerTokenFilterImpl.java
index 8c72669..df6f5fa 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/filters/DropwizardBearerTokenFilterImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/auth/filters/DropwizardBearerTokenFilterImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.auth.filters;
 
 import org.keycloak.adapters.AdapterDeploymentContext;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/CloudConfiguration.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/CloudConfiguration.java
index bdc6ba5..c86c8ae 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/CloudConfiguration.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/CloudConfiguration.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.conf;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/KeycloakConfiguration.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/KeycloakConfiguration.java
index b252de9..212d565 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/KeycloakConfiguration.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/conf/KeycloakConfiguration.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.conf;
 
 import lombok.Data;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseBillingDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseBillingDAO.java
index 9edfaa8..28a6c64 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseBillingDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseBillingDAO.java
@@ -19,89 +19,62 @@
 
 package com.epam.dlab.backendapi.dao;
 
-import com.epam.dlab.MongoKeyWords;
-import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.domain.BaseShape;
-import com.epam.dlab.backendapi.domain.DataEngineServiceShape;
-import com.epam.dlab.backendapi.domain.DataEngineShape;
-import com.epam.dlab.backendapi.domain.EndpointShape;
-import com.epam.dlab.backendapi.domain.ExploratoryShape;
-import com.epam.dlab.backendapi.domain.SsnShape;
+import com.epam.dlab.backendapi.domain.BillingReportLine;
 import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.backendapi.roles.RoleType;
-import com.epam.dlab.backendapi.roles.UserRoles;
-import com.epam.dlab.billing.BillingCalculationUtils;
-import com.epam.dlab.billing.DlabResourceType;
-import com.epam.dlab.dto.UserInstanceStatus;
-import com.epam.dlab.dto.base.DataEngineType;
-import com.epam.dlab.model.aws.ReportLine;
-import com.google.common.collect.Lists;
+import com.epam.dlab.dto.billing.BillingResourceType;
 import com.google.inject.Inject;
-import com.mongodb.client.AggregateIterable;
-import com.mongodb.client.FindIterable;
 import com.mongodb.client.model.Aggregates;
 import com.mongodb.client.model.Filters;
 import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.collections4.CollectionUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.bson.Document;
 import org.bson.conversions.Bson;
 
 import java.math.BigDecimal;
+import java.time.ZoneId;
 import java.util.ArrayList;
 import java.util.Arrays;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 import java.util.Optional;
 import java.util.function.Supplier;
+import java.util.stream.Collectors;
 import java.util.stream.StreamSupport;
 
-import static com.epam.dlab.backendapi.dao.ComputationalDAO.COMPUTATIONAL_ID;
-import static com.epam.dlab.backendapi.dao.ExploratoryDAO.COMPUTATIONAL_RESOURCES;
-import static com.epam.dlab.backendapi.dao.ExploratoryDAO.EXPLORATORY_ID;
 import static com.epam.dlab.backendapi.dao.MongoCollections.BILLING;
-import static com.epam.dlab.backendapi.dao.MongoCollections.USER_INSTANCES;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_RESOURCE_TYPE;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_USAGE_DATE;
+import static com.mongodb.client.model.Accumulators.max;
+import static com.mongodb.client.model.Accumulators.min;
 import static com.mongodb.client.model.Accumulators.sum;
 import static com.mongodb.client.model.Aggregates.group;
 import static com.mongodb.client.model.Aggregates.match;
+import static com.mongodb.client.model.Filters.and;
 import static com.mongodb.client.model.Filters.eq;
 import static com.mongodb.client.model.Filters.gte;
 import static com.mongodb.client.model.Filters.in;
 import static com.mongodb.client.model.Filters.lte;
 import static com.mongodb.client.model.Filters.regex;
-import static com.mongodb.client.model.Projections.excludeId;
-import static com.mongodb.client.model.Projections.fields;
-import static com.mongodb.client.model.Projections.include;
 import static java.util.Collections.singletonList;
 
 @Slf4j
-public abstract class BaseBillingDAO extends BaseDAO implements BillingDAO {
-
-	public static final String SHAPE = "shape";
-	public static final String SERVICE_BASE_NAME = "service_base_name";
-	public static final String ITEMS = "lines";
-	public static final String COST_TOTAL = "cost_total";
-	public static final String FULL_REPORT = "full_report";
-
-	private static final String PROJECT = "project";
-	private static final String MASTER_NODE_SHAPE = "master_node_shape";
-	private static final String SLAVE_NODE_SHAPE = "slave_node_shape";
-	private static final String TOTAL_INSTANCE_NUMBER = "total_instance_number";
-
-	private static final String DATAENGINE_SHAPE = "dataengine_instance_shape";
-	private static final String DATAENGINE_INSTANCE_COUNT = "dataengine_instance_count";
-
-	private static final String DATAENGINE_DOCKER_IMAGE = "image";
+public class BaseBillingDAO extends BaseDAO implements BillingDAO {
 	private static final int ONE_HUNDRED = 100;
-	private static final String TOTAL_FIELD_NAME = "total";
 	private static final String COST_FIELD = "$cost";
-	public static final String SHARED_RESOURCE_NAME = "Shared resource";
-	protected static final String FIELD_PROJECT = "project";
-	private static final String EDGE_FORMAT = "%s-%s-%s-edge";
-	private static final String PROJECT_COLLECTION = "Projects";
-	private static final String TAGS = "tags";
+	private static final String TOTAL_FIELD_NAME = "total";
+	private static final String PROJECT = "project";
+	private static final String APPLICATION = "application";
+	private static final String USAGE_DATE = "usageDate";
+	private static final String USER = "user";
+	private static final String RESOURCE_TYPE = "resource_type";
+	private static final String DLAB_ID = "dlabId";
+	private static final String FROM = "from";
+	private static final String TO = "to";
+	private static final String PRODUCT = "product";
+	private static final String CURRENCY = "currency";
+	private static final String COST = "cost";
+	private static final String RESOURCE_NAME = "resource_name";
+	private static final String ENDPOINT = "endpoint";
+	private static final String SHAPE = "shape";
+	private static final String EXPLORATORY = "exploratoryName";
 
 	@Inject
 	protected SettingsDAO settings;
@@ -111,160 +84,6 @@
 	private ProjectDAO projectDAO;
 
 	@Override
-	public Document getReport(UserInfo userInfo, BillingFilter filter) {
-		boolean isFullReport = UserRoles.checkAccess(userInfo, RoleType.PAGE, "/api/infrastructure_provision/billing",
-				userInfo.getRoles());
-		setUserFilter(userInfo, filter, isFullReport);
-		List<Bson> matchCriteria = matchCriteria(filter);
-		List<Bson> pipeline = new ArrayList<>();
-		if (!matchCriteria.isEmpty()) {
-			pipeline.add(Aggregates.match(Filters.and(matchCriteria)));
-		}
-		pipeline.add(groupCriteria());
-		pipeline.add(sortCriteria());
-		final Map<String, BaseShape> shapes = getShapes(filter.getShape());
-		return prepareReport(filter.getStatuses(), !filter.getShape().isEmpty(),
-				getCollection(BILLING).aggregate(pipeline), shapes, isFullReport);
-	}
-
-	private Document prepareReport(List<UserInstanceStatus> statuses, boolean filterByShape,
-								   AggregateIterable<Document> agg,
-								   Map<String, BaseShape> shapes, boolean fullReport) {
-
-		List<Document> reportItems = new ArrayList<>();
-
-		String usageDateStart = null;
-		String usageDateEnd = null;
-		double costTotal = 0D;
-
-		for (Document d : agg) {
-			Document id = (Document) d.get(MongoKeyWords.MONGO_ID);
-			String resourceId = id.getString(dlabIdFieldName());
-			BaseShape shape = shapes.get(resourceId);
-			final UserInstanceStatus status = Optional.ofNullable(shape).map(BaseShape::getStatus).orElse(null);
-			if ((filterByShape && shape == null) ||
-					(!statuses.isEmpty() && statuses.stream().noneMatch(s -> s.equals(status)))) {
-				continue;
-			}
-
-
-			String dateStart = d.getString(MongoKeyWords.USAGE_FROM);
-			if (StringUtils.compare(usageDateStart, dateStart, false) > 0) {
-				usageDateStart = dateStart;
-			}
-			String dateEnd = d.getString(MongoKeyWords.USAGE_TO);
-			if (StringUtils.compare(usageDateEnd, dateEnd) < 0) {
-				usageDateEnd = dateEnd;
-			}
-
-
-			costTotal += d.getDouble(MongoKeyWords.COST);
-
-			final String dlabResourceType = id.getString("dlab_resource_type");
-			final String statusString = Optional
-					.ofNullable(status)
-					.map(UserInstanceStatus::toString)
-					.orElse(StringUtils.EMPTY);
-
-			Document item = new Document()
-					.append(MongoKeyWords.DLAB_USER, getOrDefault(id.getString(USER)))
-					.append(dlabIdFieldName(), resourceId)
-					.append(shapeFieldName(), Optional.ofNullable(shape).map(BaseShape::format)
-							.orElse(StringUtils.EMPTY))
-					.append("dlab_resource_type", DlabResourceType
-							.getResourceTypeName(dlabResourceType)) //todo check on azure!!!
-					.append(STATUS, statusString)
-					.append(FIELD_RESOURCE_TYPE, resourceType(id))
-					.append(productFieldName(), id.getString(productFieldName()))
-					.append(PROJECT, getOrDefault(id.getString(PROJECT)))
-					.append(MongoKeyWords.COST, d.getDouble(MongoKeyWords.COST))
-					.append(costFieldName(), BillingCalculationUtils.formatDouble(d.getDouble(MongoKeyWords
-							.COST)))
-					.append(currencyCodeFieldName(), id.getString(currencyCodeFieldName()))
-					.append(usageDateFromFieldName(), dateStart)
-					.append(usageDateToFieldName(), dateEnd);
-
-			reportItems.add(item);
-		}
-
-		return new Document()
-				.append(SERVICE_BASE_NAME, settings.getServiceBaseName())
-				.append(usageDateFromFieldName(), usageDateStart)
-				.append(usageDateToFieldName(), usageDateEnd)
-				.append(ITEMS, reportItems)
-				.append(COST_TOTAL, BillingCalculationUtils.formatDouble(BillingCalculationUtils.round
-						(costTotal, 2)))
-				.append(currencyCodeFieldName(), (reportItems.isEmpty() ? null :
-						reportItems.get(0).getString(currencyCodeFieldName())))
-				.append(FULL_REPORT, fullReport);
-
-	}
-
-	protected String resourceType(Document id) {
-		return id.getString(FIELD_RESOURCE_TYPE);
-	}
-
-	protected String currencyCodeFieldName() {
-		return "currency_code";
-	}
-
-	protected String usageDateToFieldName() {
-		return MongoKeyWords.USAGE_TO;
-	}
-
-	protected String costFieldName() {
-		return MongoKeyWords.COST;
-	}
-
-	protected String productFieldName() {
-		return ReportLine.FIELD_PRODUCT;
-	}
-
-	protected String usageDateFromFieldName() {
-		return MongoKeyWords.USAGE_FROM;
-	}
-
-	protected String dlabIdFieldName() {
-		return ReportLine.FIELD_DLAB_ID;
-	}
-
-	protected String shapeFieldName() {
-		return SHAPE;
-	}
-
-	protected abstract Bson sortCriteria();
-
-	protected abstract Bson groupCriteria();
-
-	private Map<String, BaseShape> getShapes(List<String> shapeNames) {
-		FindIterable<Document> userInstances = getUserInstances();
-		final Map<String, BaseShape> shapes = new HashMap<>();
-
-		for (Document d : userInstances) {
-			getExploratoryShape(shapeNames, d)
-					.ifPresent(shape -> shapes.put(d.getString(EXPLORATORY_ID), shape));
-			@SuppressWarnings("unchecked")
-			List<Document> comp = (List<Document>) d.get(COMPUTATIONAL_RESOURCES);
-			comp.forEach(c -> (isDataEngine(c.getString(DATAENGINE_DOCKER_IMAGE)) ? getDataEngineShape(shapeNames, c) :
-					getDataEngineServiceShape(shapeNames, c))
-					.ifPresent(shape -> shapes.put(c.getString(COMPUTATIONAL_ID), shape)));
-		}
-
-		StreamSupport.stream(getCollection(PROJECT_COLLECTION).find().spliterator(), false)
-				.forEach(d -> ((List<Document>) d.get("endpoints"))
-						.forEach(endpoint -> getEndpointShape(shapeNames, endpoint)
-								.ifPresent(shape -> shapes.put(String.format(EDGE_FORMAT, getServiceBaseName(),
-										d.getString("name").toLowerCase(),
-										endpoint.getString("name")), shape))));
-
-		getSsnShape(shapeNames)
-				.ifPresent(shape -> shapes.put(getServiceBaseName() + "-ssn", shape));
-
-		log.trace("Loaded shapes is {}", shapes);
-		return shapes;
-	}
-
-	@Override
 	public Double getTotalCost() {
 		return aggregateBillingData(singletonList(group(null, sum(TOTAL_FIELD_NAME, COST_FIELD))));
 	}
@@ -306,7 +125,6 @@
 				.isPresent();
 	}
 
-
 	@Override
 	public boolean isProjectQuoteReached(String project) {
 		final Double projectCost = getProjectCost(project);
@@ -316,12 +134,42 @@
 	}
 
 	@Override
+	public List<BillingReportLine> findBillingData(String project, String endpoint, List<String> resourceNames) {
+		return find(BILLING, and(eq(PROJECT, project), eq(ENDPOINT, endpoint), in(RESOURCE_NAME, resourceNames)), BillingReportLine.class);
+	}
+
+	@Override
 	public int getBillingProjectQuoteUsed(String project) {
 		return toPercentage(() -> projectDAO.getAllowedBudget(project), getProjectCost(project));
 	}
 
-	private String getOrDefault(String value) {
-		return StringUtils.isNotBlank(value) ? value : SHARED_RESOURCE_NAME;
+	public List<BillingReportLine> aggregateBillingData(BillingFilter filter) {
+		List<Bson> pipeline = new ArrayList<>();
+		List<Bson> matchCriteria = matchCriteria(filter);
+		if (!matchCriteria.isEmpty()) {
+			pipeline.add(Aggregates.match(Filters.and(matchCriteria)));
+		}
+		pipeline.add(groupCriteria());
+		return StreamSupport.stream(getCollection(BILLING).aggregate(pipeline).spliterator(), false)
+				.map(this::toBillingReport)
+				.collect(Collectors.toList());
+	}
+
+	@Override
+	public void deleteByUsageDate(String application, String usageDate) {
+		deleteMany(BILLING, and(eq(APPLICATION, application), eq(USAGE_DATE, usageDate)));
+	}
+
+	@Override
+	public void deleteByUsageDateRegex(String application, String usageDate) {
+		deleteMany(BILLING, and(eq(APPLICATION, application), regex(USAGE_DATE, "^" + usageDate)));
+	}
+
+	@Override
+	public void save(List<BillingReportLine> billingData) {
+		if (CollectionUtils.isNotEmpty(billingData)) {
+			insertMany(BILLING, new ArrayList<>(billingData));
+		}
 	}
 
 	private Integer toPercentage(Supplier<Optional<Integer>> allowedBudget, Double totalCost) {
@@ -331,152 +179,65 @@
 				.orElse(BigDecimal.ZERO.intValue());
 	}
 
-	private List<Bson> matchCriteria(BillingFilter filter) {
-
-		List<Bson> searchCriteria = new ArrayList<>();
-
-		if (filter.getUser() != null && !filter.getUser().isEmpty()) {
-			searchCriteria.add(Filters.in(MongoKeyWords.DLAB_USER, filter.getUser()));
-		}
-
-		if (filter.getResourceType() != null && !filter.getResourceType().isEmpty()) {
-			searchCriteria.add(Filters.in("dlab_resource_type",
-					DlabResourceType.getResourceTypeIds(filter.getResourceType())));
-		}
-
-		if (filter.getDlabId() != null && !filter.getDlabId().isEmpty()) {
-			searchCriteria.add(regex(dlabIdFieldName(), filter.getDlabId(), "i"));
-		}
-
-		if (filter.getDateStart() != null && !filter.getDateStart().isEmpty()) {
-			searchCriteria.add(gte(FIELD_USAGE_DATE, filter.getDateStart()));
-		}
-		if (filter.getDateEnd() != null && !filter.getDateEnd().isEmpty()) {
-			searchCriteria.add(lte(FIELD_USAGE_DATE, filter.getDateEnd()));
-		}
-		if (filter.getProjects() != null && !filter.getProjects().isEmpty()) {
-			searchCriteria.add(in(PROJECT, filter.getProjects()));
-		}
-
-		searchCriteria.addAll(cloudMatchCriteria(filter));
-		return searchCriteria;
-	}
-
-	protected abstract List<Bson> cloudMatchCriteria(BillingFilter filter);
-
 	private Double aggregateBillingData(List<Bson> pipeline) {
 		return Optional.ofNullable(aggregate(BILLING, pipeline).first())
 				.map(d -> d.getDouble(TOTAL_FIELD_NAME))
 				.orElse(BigDecimal.ZERO.doubleValue());
 	}
 
-	private FindIterable<Document> getUserInstances() {
-		return getCollection(USER_INSTANCES)
-				.find()
-				.projection(
-						fields(excludeId(),
-								include(SHAPE, EXPLORATORY_ID, STATUS, TAGS,
-										COMPUTATIONAL_RESOURCES + "." + COMPUTATIONAL_ID,
-										COMPUTATIONAL_RESOURCES + "." + MASTER_NODE_SHAPE,
-										COMPUTATIONAL_RESOURCES + "." + SLAVE_NODE_SHAPE,
-										COMPUTATIONAL_RESOURCES + "." + TOTAL_INSTANCE_NUMBER,
-										COMPUTATIONAL_RESOURCES + "." + DATAENGINE_SHAPE,
-										COMPUTATIONAL_RESOURCES + "." + DATAENGINE_INSTANCE_COUNT,
-										COMPUTATIONAL_RESOURCES + "." + DATAENGINE_DOCKER_IMAGE,
-										COMPUTATIONAL_RESOURCES + "." + STATUS,
-										COMPUTATIONAL_RESOURCES + "." + TAGS
-								)));
+	private Bson groupCriteria() {
+		return group(getGroupingFields(USER, DLAB_ID, RESOURCE_TYPE, RESOURCE_NAME, PROJECT, PRODUCT, CURRENCY, SHAPE, EXPLORATORY),
+				sum(COST, "$" + COST),
+				min(FROM, "$" + FROM),
+				max(TO, "$" + TO));
 	}
 
-	private Optional<ExploratoryShape> getExploratoryShape(List<String> shapeNames, Document d) {
-		final String shape = d.getString(SHAPE);
-		if (isShapeAcceptable(shapeNames, shape)) {
-			return Optional.of(ExploratoryShape.builder()
-					.shape(shape)
-					.status(UserInstanceStatus.of(d.getString(STATUS)))
-					.tags((Map<String, String>) d.get(TAGS))
-					.build());
+	private List<Bson> matchCriteria(BillingFilter filter) {
+		List<Bson> searchCriteria = new ArrayList<>();
+
+		if (CollectionUtils.isNotEmpty(filter.getUsers())) {
+			searchCriteria.add(in(USER, filter.getUsers()));
 		}
-		return Optional.empty();
-	}
-
-	private Optional<DataEngineServiceShape> getDataEngineServiceShape(List<String> shapeNames, Document c) {
-		final String desMasterShape = c.getString(MASTER_NODE_SHAPE);
-		final String desSlaveShape = c.getString(SLAVE_NODE_SHAPE);
-		if (isShapeAcceptable(shapeNames, desMasterShape, desSlaveShape)) {
-			return Optional.of(DataEngineServiceShape.builder()
-					.shape(desMasterShape)
-					.status(UserInstanceStatus.of(c.getString(STATUS)))
-					.slaveCount(c.getString(TOTAL_INSTANCE_NUMBER))
-					.slaveShape(desSlaveShape)
-					.tags((Map<String, String>) c.get(TAGS))
-					.build());
+		if (CollectionUtils.isNotEmpty(filter.getResourceTypes())) {
+			searchCriteria.add(in(RESOURCE_TYPE, filter.getResourceTypes()));
 		}
-		return Optional.empty();
-	}
-
-	private Optional<DataEngineShape> getDataEngineShape(List<String> shapeNames, Document c) {
-		final String shape = c.getString(DATAENGINE_SHAPE);
-		if ((isShapeAcceptable(shapeNames, shape)) && StringUtils.isNotEmpty(c.getString(COMPUTATIONAL_ID))) {
-
-			return Optional.of(DataEngineShape.builder()
-					.shape(shape)
-					.status(UserInstanceStatus.of(c.getString(STATUS)))
-					.slaveCount(c.getString(DATAENGINE_INSTANCE_COUNT))
-					.tags((Map<String, String>) c.get(TAGS))
-					.build());
+		if (StringUtils.isNotEmpty(filter.getDlabId())) {
+			searchCriteria.add(regex(DLAB_ID, filter.getDlabId(), "i"));
 		}
-		return Optional.empty();
-	}
-
-	private Optional<SsnShape> getSsnShape(List<String> shapeNames) {
-		final String shape = getSsnShape();
-		if (isShapeAcceptable(shapeNames, shape)) {
-			return Optional.of(SsnShape.builder()
-					.shape(shape)
-					.status(UserInstanceStatus.RUNNING)
-					.build());
+		if (StringUtils.isNotEmpty(filter.getDateStart())) {
+			searchCriteria.add(gte(USAGE_DATE, filter.getDateStart()));
 		}
-		return Optional.empty();
-	}
-
-	private Optional<EndpointShape> getEndpointShape(List<String> shapeNames, Document endpoint) {
-		if (isShapeAcceptable(shapeNames, getSsnShape())) {
-			return Optional.of(EndpointShape.builder()
-					.shape(StringUtils.EMPTY)
-					.status(UserInstanceStatus.of(endpoint.getString("status")))
-					.build());
+		if (StringUtils.isNotEmpty(filter.getDateEnd())) {
+			searchCriteria.add(lte(USAGE_DATE, filter.getDateEnd()));
 		}
-		return Optional.empty();
-	}
-
-	private boolean isDataEngine(String dockerImage) {
-		return DataEngineType.fromDockerImageName(dockerImage) == DataEngineType.SPARK_STANDALONE;
-	}
-
-	private boolean isShapeAcceptable(List<String> shapeNames, String... shapes) {
-		return shapeNames == null || shapeNames.isEmpty() || Arrays.stream(shapes).anyMatch(shapeNames::contains);
-	}
-
-	protected String getServiceBaseName() {
-		return settings.getServiceBaseName();
-	}
-
-	protected String getSsnShape() {
-		return settings.getSsnInstanceSize();
-	}
-
-	protected void usersToLowerCase(List<String> users) {
-		if (users != null) {
-			users.replaceAll(u -> u != null ? u.toLowerCase() : null);
+		if (CollectionUtils.isNotEmpty(filter.getProjects())) {
+			searchCriteria.add(in(PROJECT, filter.getProjects()));
 		}
+		if (CollectionUtils.isNotEmpty(filter.getProducts())) {
+			searchCriteria.add(in(PRODUCT, filter.getProducts()));
+		}
+		if (CollectionUtils.isNotEmpty(filter.getShapes())) {
+			searchCriteria.add(regex(SHAPE, "(" + String.join("|", filter.getShapes()) + ")"));
+		}
+
+		return searchCriteria;
 	}
 
-	protected void setUserFilter(UserInfo userInfo, BillingFilter filter, boolean isFullReport) {
-		if (isFullReport) {
-			usersToLowerCase(filter.getUser());
-		} else {
-			filter.setUser(Lists.newArrayList(userInfo.getName().toLowerCase()));
-		}
+	private BillingReportLine toBillingReport(Document d) {
+		Document id = (Document) d.get("_id");
+		return BillingReportLine.builder()
+				.dlabId(id.getString(DLAB_ID))
+				.project(id.getString(PROJECT))
+				.resourceName(id.getString(RESOURCE_NAME))
+				.exploratoryName(id.getString(EXPLORATORY))
+				.shape(id.getString(SHAPE))
+				.user(id.getString(USER))
+				.product(id.getString(PRODUCT))
+				.resourceType(Optional.ofNullable(id.getString(RESOURCE_TYPE)).map(BillingResourceType::valueOf).orElse(null))
+				.usageDateFrom(d.getDate(FROM).toInstant().atZone(ZoneId.systemDefault()).toLocalDate())
+				.usageDateTo(d.getDate(TO).toInstant().atZone(ZoneId.systemDefault()).toLocalDate())
+				.cost(BigDecimal.valueOf(d.getDouble(COST)).setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue())
+				.currency(id.getString(CURRENCY))
+				.build();
 	}
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseDAO.java
index 034011a..c2ff69b 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BaseDAO.java
@@ -31,7 +31,11 @@
 import com.google.inject.Inject;
 import com.mongodb.BasicDBObject;
 import com.mongodb.MongoException;
-import com.mongodb.client.*;
+import com.mongodb.client.AggregateIterable;
+import com.mongodb.client.FindIterable;
+import com.mongodb.client.MongoCollection;
+import com.mongodb.client.MongoCursor;
+import com.mongodb.client.MongoIterable;
 import com.mongodb.client.model.UpdateOptions;
 import com.mongodb.client.result.DeleteResult;
 import com.mongodb.client.result.UpdateResult;
@@ -41,13 +45,21 @@
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Date;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+import java.util.UUID;
 import java.util.function.Supplier;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 import java.util.stream.StreamSupport;
 
-import static com.mongodb.client.model.Filters.*;
+import static com.mongodb.client.model.Filters.and;
+import static com.mongodb.client.model.Filters.exists;
+import static com.mongodb.client.model.Filters.ne;
 
 /**
  * Implements the base API for Mongo database.
@@ -158,6 +170,29 @@
 	}
 
 	/**
+	 * Serializes objects and inserts into the collection.
+	 *
+	 * @param collection collection name.
+	 * @param object     for inserting to collection.
+	 */
+	protected void insertMany(String collection, List<Object> object) {
+		try {
+			mongoService.getCollection(collection)
+					.insertMany(convertToBson(object)
+							.stream()
+							.peek(o -> {
+								o.append(ID, generateUUID());
+								o.append(TIMESTAMP, new Date());
+							})
+							.collect(Collectors.toList())
+					);
+		} catch (MongoException e) {
+			LOGGER.warn("Insert to Mongo DB fails: {}", e.getLocalizedMessage(), e);
+			throw new DlabException("Insert to Mongo DB fails: " + e.getLocalizedMessage(), e);
+		}
+	}
+
+	/**
 	 * Updates single document in the collection by condition.
 	 *
 	 * @param collection collection name.
@@ -230,6 +265,22 @@
 	}
 
 	/**
+	 * Removes many documents in the collection by condition.
+	 *
+	 * @param collection collection name.
+	 * @param condition  condition for search documents in collection.
+	 */
+	protected DeleteResult deleteMany(String collection, Bson condition) {
+		try {
+			return mongoService.getCollection(collection)
+					.deleteMany(condition);
+		} catch (MongoException e) {
+			LOGGER.warn("Removing document from Mongo DB fails: {}", e.getLocalizedMessage(), e);
+			throw new DlabException("Removing document from Mongo DB fails: " + e.getLocalizedMessage(), e);
+		}
+	}
+
+	/**
 	 * Finds and returns all documents from the collection.
 	 *
 	 * @param collection collection name.
@@ -362,6 +413,13 @@
 		}
 	}
 
+	List<Document> convertToBson(List<Object> objects) {
+		return objects
+				.stream()
+				.map(this::convertToBson)
+				.collect(Collectors.toList());
+	}
+
 	/**
 	 * Finds and returns one object as given class from the collection by condition.
 	 *
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BillingDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BillingDAO.java
index 1ea06b8..67630cd 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BillingDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/BillingDAO.java
@@ -18,9 +18,10 @@
  */
 package com.epam.dlab.backendapi.dao;
 
-import com.epam.dlab.auth.UserInfo;
+import com.epam.dlab.backendapi.domain.BillingReportLine;
 import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import org.bson.Document;
+
+import java.util.List;
 
 public interface BillingDAO {
 	Double getTotalCost();
@@ -41,5 +42,13 @@
 
 	boolean isProjectQuoteReached(String project);
 
-	Document getReport(UserInfo userInfo, BillingFilter filter);
+	List<BillingReportLine> findBillingData(String project, String endpoint, List<String> resourceNames);
+
+	List<BillingReportLine> aggregateBillingData(BillingFilter filter);
+
+	void deleteByUsageDate(String application, String usageDate);
+
+	void deleteByUsageDateRegex(String application, String usageDate);
+
+	void save(List<BillingReportLine> billingData);
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java
index ebacc51..f554873 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java
@@ -23,8 +23,6 @@
 import com.epam.dlab.backendapi.SelfServiceApplication;
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
 import com.epam.dlab.backendapi.resources.aws.ComputationalResourceAws;
-import com.epam.dlab.backendapi.resources.dto.HealthStatusEnum;
-import com.epam.dlab.backendapi.resources.dto.HealthStatusPageDTO;
 import com.epam.dlab.dto.UserInstanceDTO;
 import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.dto.base.DataEngineType;
@@ -166,18 +164,6 @@
 	}
 
 	/**
-	 * @param user       the name of user.
-	 * @param fullReport return full report if <b>true</b> otherwise common status only.
-	 * @throws DlabException in case of any exception
-	 */
-	public HealthStatusPageDTO getHealthStatusPageDTO(String user, boolean fullReport) {
-		return new HealthStatusPageDTO()
-				.withStatus(HealthStatusEnum.OK)
-				.withListResources(Collections.emptyList());
-	}
-
-
-	/**
 	 * Updates the status of exploratory and computational for user.
 	 *
 	 * @param user    the name of user.
@@ -211,11 +197,6 @@
 				.collect(Collectors.toSet());
 	}
 
-	public Set<String> fetchAllUsers() {
-		return stream(find(USER_EDGE)).map(d -> d.getString(ID))
-				.collect(Collectors.toSet());
-	}
-
 	@SuppressWarnings("unchecked")
 	private void updateUserResourceStatuses(String user, String project, EnvResourceList list, Document exp) {
 		final String exploratoryName = exp.getString(EXPLORATORY_NAME);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
index ebe81c5..fc44569 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
@@ -144,6 +144,25 @@
 		return getUserInstances(and(eq(PROJECT, project)), true);
 	}
 
+	public List<UserInstanceDTO> fetchExploratoryFieldsForProjectWithComp(List<String> projects) {
+		return getUserInstances(and(in(PROJECT, projects)), true);
+	}
+
+	public List<UserInstanceDTO> findExploratories(String project, String endpoint, String user) {
+		return getUserInstances(and(eq(PROJECT, project), eq(ENDPOINT, endpoint), eq(USER, user)), true);
+	}
+
+	public List<UserInstanceDTO> fetchUserExploratoriesWhereStatusIn(String user, boolean computationalFieldsRequired,
+																	 UserInstanceStatus... statuses) {
+		final List<String> statusList = statusList(statuses);
+		return getUserInstances(
+				and(
+						eq(USER, user),
+						in(STATUS, statusList)
+				),
+				computationalFieldsRequired);
+	}
+
 	/**
 	 * Finds and returns the info of all user's notebooks whose status or status of affiliated computational resource
 	 * is present among predefined ones.
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAO.java
index 245df5b..c94daae 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.dao;
 
 import com.epam.dlab.auth.UserInfo;
@@ -12,8 +31,6 @@
 public interface ProjectDAO {
 	List<ProjectDTO> getProjects();
 
-	List<ProjectDTO> getProjectsWithStatus(ProjectDTO.Status status);
-
 	List<ProjectDTO> getProjectsWithEndpointStatusNotIn(UserInstanceStatus... statuses);
 
 	List<ProjectDTO> getUserProjects(UserInfo userInfo, boolean active);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAOImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAOImpl.java
index 7128c51..fc79656 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAOImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ProjectDAOImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.dao;
 
 import com.epam.dlab.auth.UserInfo;
@@ -18,7 +37,11 @@
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
-import static com.mongodb.client.model.Filters.*;
+import static com.mongodb.client.model.Filters.and;
+import static com.mongodb.client.model.Filters.elemMatch;
+import static com.mongodb.client.model.Filters.eq;
+import static com.mongodb.client.model.Filters.in;
+import static com.mongodb.client.model.Filters.not;
 
 public class ProjectDAOImpl extends BaseDAO implements ProjectDAO {
 
@@ -46,11 +69,6 @@
 	}
 
 	@Override
-	public List<ProjectDTO> getProjectsWithStatus(ProjectDTO.Status status) {
-		return find(PROJECTS_COLLECTION, eq(STATUS_FIELD, status.toString()), ProjectDTO.class);
-	}
-
-	@Override
 	public List<ProjectDTO> getProjectsWithEndpointStatusNotIn(UserInstanceStatus... statuses) {
 		final List<String> statusList =
 				Arrays.stream(statuses).map(UserInstanceStatus::name).collect(Collectors.toList());
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDao.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDao.java
index 0a4dde5..ae221f1 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDao.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDao.java
@@ -25,8 +25,6 @@
 
 	void updateUsers(String group, Set<String> users);
 
-	void removeUser(String group, String user);
-
 	void removeGroup(String groupId);
 
 	Set<String> getUserGroups(String user);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDaoImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDaoImpl.java
index 03a6f51..cc0da31 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDaoImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserGroupDaoImpl.java
@@ -44,11 +44,6 @@
 	}
 
 	@Override
-	public void removeUser(String group, String user) {
-		updateOne(USER_GROUPS, eq(ID, group), pull(USERS_FIELD, user));
-	}
-
-	@Override
 	public void removeGroup(String groupId) {
 		deleteOne(USER_GROUPS, eq(ID, groupId));
 	}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDao.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDao.java
index c2a401b..48abb54 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDao.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDao.java
@@ -39,8 +39,6 @@
 
 	boolean addGroupToRole(Set<String> groups, Set<String> roleIds);
 
-	boolean removeGroupFromRole(Set<String> groups, Set<String> roleIds);
-
 	void removeGroupWhenRoleNotIn(String group, Set<String> roleIds);
 
 	void removeUnnecessaryRoles(CloudProvider cloudProviderToBeRemoved, List<CloudProvider> remainingProviders);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDaoImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDaoImpl.java
index fffc70b..5bc845a 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDaoImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/UserRoleDaoImpl.java
@@ -21,6 +21,7 @@
 import com.epam.dlab.backendapi.resources.dto.UserGroupDto;
 import com.epam.dlab.backendapi.resources.dto.UserRoleDto;
 import com.epam.dlab.cloud.CloudProvider;
+import com.epam.dlab.exceptions.DlabException;
 import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.ObjectMapper;
 import com.google.inject.Singleton;
@@ -33,9 +34,12 @@
 import java.io.InputStream;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collections;
 import java.util.Date;
 import java.util.List;
 import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 import static com.epam.dlab.backendapi.dao.MongoCollections.USER_GROUPS;
 import static com.mongodb.client.model.Aggregates.group;
@@ -92,11 +96,20 @@
 
 	@Override
 	public void updateMissingRoles(CloudProvider cloudProvider) {
-		getUserRoleFromFile(cloudProvider).stream()
-				.filter(u -> findAll().stream()
+		getUserRoleFromFile(cloudProvider)
+				.stream()
+				.peek(u -> u.setGroups(Collections.emptySet()))
+				.filter(u -> findAll()
+						.stream()
 						.map(UserRoleDto::getId)
 						.noneMatch(id -> id.equals(u.getId())))
 				.forEach(this::insert);
+
+		addGroupToRole(aggregateRolesByGroup()
+						.stream()
+						.map(UserGroupDto::getGroup)
+						.collect(Collectors.toSet()),
+				getDefaultShapes(cloudProvider));
 	}
 
 	@Override
@@ -106,11 +119,6 @@
 	}
 
 	@Override
-	public boolean removeGroupFromRole(Set<String> groups, Set<String> roleIds) {
-		return conditionMatched(updateMany(MongoCollections.ROLES, in(ID, roleIds), pullAll(GROUPS_FIELD, groups)));
-	}
-
-	@Override
 	public void removeGroupWhenRoleNotIn(String group, Set<String> roleIds) {
 		updateMany(MongoCollections.ROLES, not(in(ID, roleIds)), pull(GROUPS_FIELD, group));
 	}
@@ -168,6 +176,21 @@
 		}
 	}
 
+	private Set<String> getDefaultShapes(CloudProvider cloudProvider) {
+		if (cloudProvider == CloudProvider.AWS) {
+			return Stream.of("nbShapes_t2.medium_fetching", "compShapes_c4.xlarge_fetching")
+					.collect(Collectors.toSet());
+		} else if (cloudProvider == CloudProvider.GCP) {
+			return Stream.of("compShapes_n1-standard-2_fetching", "nbShapes_n1-standard-2_fetching")
+					.collect(Collectors.toSet());
+		} else if (cloudProvider == CloudProvider.AZURE) {
+			return Stream.of("nbShapes_Standard_E4s_v3_fetching", "compShapes_Standard_E4s_v3_fetching")
+					.collect(Collectors.toSet());
+		} else {
+			throw new DlabException("Unsupported cloud provider " + cloudProvider);
+		}
+	}
+
 	private Document roleDocument() {
 		return new Document().append(ID, "$" + ID)
 				.append(DESCRIPTION, "$" + DESCRIPTION)
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/aws/AwsBillingDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/aws/AwsBillingDAO.java
deleted file mode 100644
index fde1d8f..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/aws/AwsBillingDAO.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.dao.aws;
-
-import com.epam.dlab.MongoKeyWords;
-import com.epam.dlab.backendapi.dao.BaseBillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import org.bson.Document;
-import org.bson.conversions.Bson;
-
-import java.util.Collections;
-import java.util.List;
-
-import static com.epam.dlab.model.aws.ReportLine.FIELD_COST;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_CURRENCY_CODE;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_DLAB_ID;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_PRODUCT;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_RESOURCE_TYPE;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_USAGE_DATE;
-import static com.mongodb.client.model.Accumulators.max;
-import static com.mongodb.client.model.Accumulators.min;
-import static com.mongodb.client.model.Accumulators.sum;
-import static com.mongodb.client.model.Aggregates.group;
-import static com.mongodb.client.model.Aggregates.sort;
-
-/**
- * DAO for user billing.
- */
-public class AwsBillingDAO extends BaseBillingDAO {
-
-    public static final String DLAB_RESOURCE_TYPE = "dlab_resource_type";
-    public static final String USAGE_DATE_START = "from";
-    public static final String USAGE_DATE_END = "to";
-    public static final String TAG_RESOURCE_ID = "tag_resource_id";
-
-    @Override
-    protected Bson sortCriteria() {
-        return sort(new Document(ID + "." + USER, 1)
-                .append(ID + "." + FIELD_DLAB_ID, 1)
-                .append(ID + "." + DLAB_RESOURCE_TYPE, 1)
-                .append(ID + "." + FIELD_PRODUCT, 1));
-    }
-
-    @Override
-    protected Bson groupCriteria() {
-        return group(getGroupingFields(USER, FIELD_DLAB_ID, DLAB_RESOURCE_TYPE, FIELD_PRODUCT, FIELD_RESOURCE_TYPE,
-                FIELD_CURRENCY_CODE, FIELD_PROJECT),
-                sum(FIELD_COST, "$" + FIELD_COST),
-                min(MongoKeyWords.USAGE_FROM, "$" + FIELD_USAGE_DATE),
-                max(MongoKeyWords.USAGE_TO, "$" + FIELD_USAGE_DATE));
-    }
-
-    @Override
-    protected List<Bson> cloudMatchCriteria(BillingFilter filter) {
-        return Collections.emptyList();
-    }
-}
\ No newline at end of file
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/azure/AzureBillingDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/azure/AzureBillingDAO.java
deleted file mode 100644
index 04c5e6d..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/azure/AzureBillingDAO.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.dao.azure;
-
-import com.epam.dlab.MongoKeyWords;
-import com.epam.dlab.backendapi.dao.BaseBillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.billing.DlabResourceType;
-import com.google.inject.Singleton;
-import com.mongodb.client.model.Accumulators;
-import com.mongodb.client.model.Aggregates;
-import com.mongodb.client.model.Filters;
-import com.mongodb.client.model.Sorts;
-import lombok.extern.slf4j.Slf4j;
-import org.bson.Document;
-import org.bson.conversions.Bson;
-
-import java.util.Collections;
-import java.util.List;
-
-@Singleton
-@Slf4j
-public class AzureBillingDAO extends BaseBillingDAO {
-	public static final String SIZE = "size";
-
-	@Override
-	protected List<Bson> cloudMatchCriteria(BillingFilter filter) {
-		if (!filter.getService().isEmpty()) {
-			return Collections.singletonList(Filters.in(MongoKeyWords.METER_CATEGORY, filter.getService()));
-		} else {
-			return Collections.emptyList();
-		}
-	}
-
-	@Override
-	protected Bson groupCriteria() {
-		return Aggregates.group(getGroupingFields(
-				MongoKeyWords.DLAB_USER,
-				MongoKeyWords.DLAB_ID,
-				MongoKeyWords.RESOURCE_TYPE,
-				MongoKeyWords.METER_CATEGORY,
-				MongoKeyWords.CURRENCY_CODE,
-				FIELD_PROJECT),
-				Accumulators.sum(MongoKeyWords.COST, MongoKeyWords.prepend$(MongoKeyWords.COST)),
-				Accumulators.min(MongoKeyWords.USAGE_FROM, MongoKeyWords.prepend$(MongoKeyWords.USAGE_DAY)),
-				Accumulators.max(MongoKeyWords.USAGE_TO, MongoKeyWords.prepend$(MongoKeyWords.USAGE_DAY))
-		);
-	}
-
-	@Override
-	protected Bson sortCriteria() {
-		return Aggregates.sort(Sorts.ascending(
-				MongoKeyWords.prependId(MongoKeyWords.DLAB_USER),
-				MongoKeyWords.prependId(MongoKeyWords.DLAB_ID),
-				MongoKeyWords.prependId(MongoKeyWords.RESOURCE_TYPE),
-				MongoKeyWords.prependId(MongoKeyWords.METER_CATEGORY)));
-	}
-
-	@Override
-	protected String getServiceBaseName() {
-		return settings.getServiceBaseName().replace("_", "-").toLowerCase();
-	}
-
-	@Override
-	protected String shapeFieldName() {
-		return SIZE;
-	}
-
-	@Override
-	protected String dlabIdFieldName() {
-		return MongoKeyWords.DLAB_ID;
-	}
-
-	@Override
-	protected String productFieldName() {
-		return MongoKeyWords.METER_CATEGORY;
-	}
-
-	@Override
-	protected String costFieldName() {
-		return MongoKeyWords.COST_STRING;
-	}
-
-	@Override
-	protected String usageDateFromFieldName() {
-		return MongoKeyWords.USAGE_FROM;
-	}
-
-	@Override
-	protected String usageDateToFieldName() {
-		return MongoKeyWords.USAGE_TO;
-	}
-
-	@Override
-	protected String currencyCodeFieldName() {
-		return MongoKeyWords.CURRENCY_CODE;
-	}
-
-	@Override
-	protected String resourceType(Document id) {
-		return DlabResourceType.getResourceTypeName(id.getString(MongoKeyWords.RESOURCE_TYPE));
-	}
-
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/gcp/GcpBillingDao.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/gcp/GcpBillingDao.java
deleted file mode 100644
index 1105066..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/gcp/GcpBillingDao.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.dao.gcp;
-
-import com.epam.dlab.backendapi.dao.BaseBillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import org.bson.Document;
-import org.bson.conversions.Bson;
-
-import java.util.Collections;
-import java.util.List;
-
-import static com.epam.dlab.MongoKeyWords.USAGE_FROM;
-import static com.epam.dlab.MongoKeyWords.USAGE_TO;
-import static com.epam.dlab.backendapi.dao.aws.AwsBillingDAO.DLAB_RESOURCE_TYPE;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_COST;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_DLAB_ID;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_PRODUCT;
-import static com.epam.dlab.model.aws.ReportLine.FIELD_USAGE_DATE;
-import static com.mongodb.client.model.Accumulators.max;
-import static com.mongodb.client.model.Accumulators.min;
-import static com.mongodb.client.model.Accumulators.sum;
-import static com.mongodb.client.model.Aggregates.group;
-import static com.mongodb.client.model.Aggregates.sort;
-
-public class GcpBillingDao extends BaseBillingDAO {
-    @Override
-    protected Bson sortCriteria() {
-        return sort(new Document(ID + "." + USER, 1)
-                .append(ID + "." + FIELD_DLAB_ID, 1)
-                .append(ID + "." + FIELD_PRODUCT, 1));
-    }
-
-    @Override
-    protected Bson groupCriteria() {
-        return group(getGroupingFields(USER, FIELD_DLAB_ID, DLAB_RESOURCE_TYPE, FIELD_PRODUCT,
-                currencyCodeFieldName(), FIELD_PROJECT),
-                sum(FIELD_COST, "$" + FIELD_COST),
-                min(USAGE_FROM, "$" + FIELD_USAGE_DATE),
-                max(USAGE_TO, "$" + FIELD_USAGE_DATE)
-        );
-    }
-
-    @Override
-    protected List<Bson> cloudMatchCriteria(BillingFilter filter) {
-        return Collections.emptyList();
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BaseShape.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BaseShape.java
deleted file mode 100644
index 4a56034..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BaseShape.java
+++ /dev/null
@@ -1,23 +0,0 @@
-package com.epam.dlab.backendapi.domain;
-
-import com.epam.dlab.backendapi.service.ShapeFormat;
-import com.epam.dlab.dto.UserInstanceStatus;
-import lombok.AllArgsConstructor;
-import lombok.Data;
-import lombok.NoArgsConstructor;
-
-import java.util.Map;
-
-@Data
-@AllArgsConstructor
-@NoArgsConstructor
-public class BaseShape implements ShapeFormat {
-    protected String shape;
-    protected UserInstanceStatus status;
-    protected Map<String, String> tags;
-
-    @Override
-    public String format() {
-        return shape;
-    }
-}
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/Project.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BillingReport.java
similarity index 62%
rename from services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/Project.java
rename to services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BillingReport.java
index 0b40235..2bb2062 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/documents/Project.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BillingReport.java
@@ -17,25 +17,29 @@
  * under the License.
  */
 
-package com.epam.dlab.billing.gcp.documents;
+package com.epam.dlab.backendapi.domain;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
+import lombok.Builder;
 import lombok.Data;
-import org.springframework.data.mongodb.core.mapping.Document;
 
+import java.time.LocalDate;
 import java.util.List;
 
-@Document(collection = "Projects")
 @Data
-public class Project {
-
-	@JsonProperty("name")
-	private String name;
-	private List<Endpoint> endpoints;
-
-
-	@Data
-	public class Endpoint {
-		private final String name;
-	}
+@Builder
+public class BillingReport {
+    private String sbn;
+    private String name;
+    @JsonProperty("report_lines")
+    private List<BillingReportLine> reportLines;
+    @JsonProperty("from")
+    private LocalDate usageDateFrom;
+    @JsonProperty("to")
+    private LocalDate usageDateTo;
+    @JsonProperty("total_cost")
+    private double totalCost;
+    private String currency;
+    @JsonProperty("is_full")
+    private boolean isFull;
 }
diff --git a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/BillingData.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BillingReportLine.java
similarity index 61%
rename from services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/BillingData.java
rename to services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BillingReportLine.java
index 32a98ed..a9cdd12 100644
--- a/services/billing-gcp/src/main/java/com/epam/dlab/billing/gcp/model/BillingData.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/BillingReportLine.java
@@ -17,54 +17,40 @@
  * under the License.
  */
 
-package com.epam.dlab.billing.gcp.model;
+package com.epam.dlab.backendapi.domain;
 
+import com.epam.dlab.dto.UserInstanceStatus;
+import com.epam.dlab.dto.billing.BillingResourceType;
+import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
+import com.fasterxml.jackson.annotation.JsonProperty;
 import lombok.Builder;
 import lombok.Data;
-import org.springframework.data.annotation.Id;
-import org.springframework.data.mongodb.core.mapping.Document;
-import org.springframework.data.mongodb.core.mapping.Field;
 
 import java.time.LocalDate;
 
 @Data
 @Builder
-@Document(collection = "billing")
-public class BillingData {
-    @Id
-    private String id;
-    private String user;
-    @Field("resource_name")
-    private String displayName;
+@JsonIgnoreProperties(ignoreUnknown = true)
+public class BillingReportLine {
+    private String dlabId;
+    private String application;
+    @JsonProperty("resource_name")
     private String resourceName;
-    @Field("from")
+    private String project;
+    private String endpoint;
+    private String user;
+    @JsonProperty("from")
     private LocalDate usageDateFrom;
-    @Field("to")
+    @JsonProperty("to")
     private LocalDate usageDateTo;
-    @Field("usage_date")
     private String usageDate;
     private String product;
     private String usageType;
     private Double cost;
-    @Field("currency_code")
     private String currency;
-    private String project;
+    @JsonProperty("resource_type")
+    private BillingResourceType resourceType;
+    private UserInstanceStatus status;
+    private String shape;
     private String exploratoryName;
-    private String computationalName;
-    @Field("dlab_id")
-    private String dlabId;
-    @Field("dlab_resource_type")
-    private ResourceType resourceType;
-
-
-    public enum ResourceType {
-        EDGE,
-        SSN,
-        SHARED_BUCKET,
-        SSN_BUCKET,
-        EDGE_BUCKET,
-        VOLUME,
-        EXPLORATORY,
-        COMPUTATIONAL
-    }
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/CreateProjectDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/CreateProjectDTO.java
index bfee5b3..44f8eef 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/CreateProjectDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/CreateProjectDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.domain;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/DataEngineServiceShape.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/DataEngineServiceShape.java
deleted file mode 100644
index 73c0193..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/DataEngineServiceShape.java
+++ /dev/null
@@ -1,37 +0,0 @@
-package com.epam.dlab.backendapi.domain;
-
-import com.epam.dlab.backendapi.service.ShapeFormat;
-import com.epam.dlab.dto.UserInstanceStatus;
-import lombok.Builder;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.Map;
-
-
-@Slf4j
-public class DataEngineServiceShape extends BaseShape implements ShapeFormat {
-    private static final String DES_NAME_FORMAT = "Master: %s%sSlave:  %d x %s";
-    private String slaveCount;
-    private String slaveShape;
-
-    @Builder
-    public DataEngineServiceShape(String shape, UserInstanceStatus status, String slaveCount, String slaveShape,
-                                  Map<String, String> tags) {
-        super(shape, status, tags);
-        this.slaveCount = slaveCount;
-        this.slaveShape = slaveShape;
-    }
-
-    @Override
-    public String format() {
-        Integer count;
-        try {
-            count = Integer.valueOf(slaveCount);
-        } catch (NumberFormatException e) {
-            log.error("Cannot parse string {} to integer", slaveCount);
-            return StringUtils.EMPTY;
-        }
-        return String.format(DES_NAME_FORMAT, shape, System.lineSeparator(), count - 1, slaveShape);
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/DataEngineShape.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/DataEngineShape.java
deleted file mode 100644
index 8d4c003..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/DataEngineShape.java
+++ /dev/null
@@ -1,34 +0,0 @@
-package com.epam.dlab.backendapi.domain;
-
-import com.epam.dlab.backendapi.service.ShapeFormat;
-import com.epam.dlab.dto.UserInstanceStatus;
-import lombok.Builder;
-import lombok.extern.slf4j.Slf4j;
-import org.apache.commons.lang3.StringUtils;
-
-import java.util.Map;
-
-@Slf4j
-public class DataEngineShape extends BaseShape implements ShapeFormat {
-    private static final String DE_NAME_FORMAT = "%d x %s";
-    private String slaveCount;
-
-
-    @Builder
-    public DataEngineShape(String shape, UserInstanceStatus status, String slaveCount, Map<String, String> tags) {
-        super(shape, status, tags);
-        this.slaveCount = slaveCount;
-    }
-
-    @Override
-    public String format() {
-        Integer count;
-        try {
-            count = Integer.valueOf(slaveCount);
-        } catch (NumberFormatException e) {
-            log.error("Cannot parse string {} to integer", slaveCount);
-            return StringUtils.EMPTY;
-        }
-        return String.format(DE_NAME_FORMAT, count, shape);
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/EndpointShape.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/EndpointShape.java
deleted file mode 100644
index 5f41cad..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/EndpointShape.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.epam.dlab.backendapi.domain;
-
-import com.epam.dlab.dto.UserInstanceStatus;
-import lombok.Builder;
-
-import java.util.Collections;
-
-public class EndpointShape extends BaseShape {
-
-    @Builder
-    public EndpointShape(String shape, UserInstanceStatus status) {
-        super(shape, status, Collections.emptyMap());
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ExploratoryShape.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ExploratoryShape.java
deleted file mode 100644
index 74ceab0..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ExploratoryShape.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.epam.dlab.backendapi.domain;
-
-import com.epam.dlab.dto.UserInstanceStatus;
-import lombok.Builder;
-
-import java.util.Map;
-
-public class ExploratoryShape extends BaseShape {
-
-    @Builder
-    public ExploratoryShape(String shape, UserInstanceStatus status, Map<String, String> tags) {
-        super(shape, status, tags);
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectDTO.java
index 9cd0a35..72d6697 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.domain;
 
 import com.epam.dlab.dto.UserInstanceStatus;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectEndpointDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectEndpointDTO.java
index 5a2f2ba..66b1dac 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectEndpointDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectEndpointDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.domain;
 
 import com.epam.dlab.dto.UserInstanceStatus;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectManagingDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectManagingDTO.java
deleted file mode 100644
index 167128e..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/ProjectManagingDTO.java
+++ /dev/null
@@ -1,16 +0,0 @@
-package com.epam.dlab.backendapi.domain;
-
-import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
-import lombok.AllArgsConstructor;
-import lombok.Data;
-
-
-@Data
-@JsonIgnoreProperties(ignoreUnknown = true)
-@AllArgsConstructor
-public class ProjectManagingDTO {
-    private String name;
-    private final Integer budget;
-    private boolean canBeStopped;
-    private boolean canBeTerminated;
-}
\ No newline at end of file
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/SsnShape.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/SsnShape.java
deleted file mode 100644
index a38a99e..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/SsnShape.java
+++ /dev/null
@@ -1,14 +0,0 @@
-package com.epam.dlab.backendapi.domain;
-
-import com.epam.dlab.dto.UserInstanceStatus;
-import lombok.Builder;
-
-import java.util.Collections;
-
-public class SsnShape extends BaseShape {
-
-    @Builder
-    public SsnShape(String shape, UserInstanceStatus status) {
-        super(shape, status, Collections.emptyMap());
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectBudgetDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectBudgetDTO.java
index 61ec137..f686978 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectBudgetDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectBudgetDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.domain;
 
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectDTO.java
index e4070bb..4622ac5 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/domain/UpdateProjectDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.domain;
 
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/bundles/DlabKeycloakBundle.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/bundles/DlabKeycloakBundle.java
index 5089c1c..d15d6e8 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/bundles/DlabKeycloakBundle.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/bundles/DlabKeycloakBundle.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.dropwizard.bundles;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/listeners/MongoStartupListener.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/listeners/MongoStartupListener.java
index d25e2dd..9d9c9f7 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/listeners/MongoStartupListener.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dropwizard/listeners/MongoStartupListener.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.dropwizard.listeners;
 
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/interceptor/ProjectAdminInterceptor.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/interceptor/ProjectAdminInterceptor.java
new file mode 100644
index 0000000..a536dab
--- /dev/null
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/interceptor/ProjectAdminInterceptor.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.backendapi.interceptor;
+
+import com.epam.dlab.auth.UserInfo;
+import com.epam.dlab.backendapi.annotation.Project;
+import com.epam.dlab.backendapi.annotation.User;
+import com.epam.dlab.backendapi.roles.UserRoles;
+import com.epam.dlab.backendapi.service.ProjectService;
+import com.epam.dlab.exceptions.DlabException;
+import com.epam.dlab.exceptions.ResourceQuoteReachedException;
+import com.google.inject.Inject;
+import lombok.extern.slf4j.Slf4j;
+import org.aopalliance.intercept.MethodInterceptor;
+import org.aopalliance.intercept.MethodInvocation;
+
+import java.lang.reflect.Method;
+import java.lang.reflect.Parameter;
+import java.util.Objects;
+import java.util.stream.IntStream;
+
+@Slf4j
+public class ProjectAdminInterceptor implements MethodInterceptor {
+    @Inject
+    private ProjectService projectService;
+
+    @Override
+    public Object invoke(MethodInvocation mi) throws Throwable {
+        if (grantAccess(mi)) {
+            return mi.proceed();
+        } else {
+            final Method method = mi.getMethod();
+            log.warn("Execution of method {} failed because user doesn't have appropriate permission", method.getName());
+            throw new ResourceQuoteReachedException("Operation can not be finished. User doesn't have appropriate permission");
+        }
+    }
+
+    private boolean grantAccess(MethodInvocation mi) {
+        final Parameter[] parameters = mi.getMethod().getParameters();
+        String project = IntStream.range(0, parameters.length)
+                .filter(i -> Objects.nonNull(parameters[i].getAnnotation(Project.class)))
+                .mapToObj(i -> (String) mi.getArguments()[i])
+                .findAny()
+                .orElseThrow(() -> new DlabException("Project parameter wanted!"));
+        UserInfo userInfo = IntStream.range(0, parameters.length)
+                .filter(i -> Objects.nonNull(parameters[i].getAnnotation(User.class)))
+                .mapToObj(i -> (UserInfo) mi.getArguments()[i])
+                .findAny()
+                .orElseThrow(() -> new DlabException("UserInfo parameter wanted!"));
+
+        return checkPermission(userInfo, project);
+    }
+
+    private boolean checkPermission(UserInfo userInfo, String project) {
+        return UserRoles.isAdmin(userInfo) || UserRoles.isProjectAdmin(userInfo, projectService.get(project).getGroups());
+    }
+}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/AwsSelfServiceModule.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/AwsSelfServiceModule.java
deleted file mode 100644
index 0fd45de..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/AwsSelfServiceModule.java
+++ /dev/null
@@ -1,83 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.modules;
-
-import com.epam.dlab.backendapi.SelfServiceApplication;
-import com.epam.dlab.backendapi.annotation.BudgetLimited;
-import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
-import com.epam.dlab.backendapi.dao.BillingDAO;
-import com.epam.dlab.backendapi.dao.aws.AwsBillingDAO;
-import com.epam.dlab.backendapi.interceptor.BudgetLimitInterceptor;
-import com.epam.dlab.backendapi.resources.aws.ComputationalResourceAws;
-import com.epam.dlab.backendapi.service.BillingService;
-import com.epam.dlab.backendapi.service.aws.AwsBillingService;
-import com.epam.dlab.cloud.CloudModule;
-import com.epam.dlab.mongo.MongoServiceFactory;
-import com.fiestacabin.dropwizard.quartz.SchedulerConfiguration;
-import com.google.inject.Injector;
-import com.google.inject.Provides;
-import com.google.inject.Singleton;
-import io.dropwizard.setup.Environment;
-import org.quartz.Scheduler;
-import org.quartz.SchedulerException;
-import org.quartz.impl.StdSchedulerFactory;
-
-import static com.google.inject.matcher.Matchers.annotatedWith;
-import static com.google.inject.matcher.Matchers.any;
-
-public class AwsSelfServiceModule extends CloudModule {
-
-	private static final String MONGO_URI_FORMAT = "mongodb://%s:%s@%s:%d/%s";
-	private static final String QUARTZ_MONGO_URI_PROPERTY = "org.quartz.jobStore.mongoUri";
-	private static final String QUARTZ_DB_NAME = "org.quartz.jobStore.dbName";
-
-	@Override
-	protected void configure() {
-		bind(BillingService.class).to(AwsBillingService.class);
-		bind(SchedulerConfiguration.class).toInstance(
-				new SchedulerConfiguration(SelfServiceApplication.class.getPackage().getName()));
-		bind(BillingDAO.class).to(AwsBillingDAO.class);
-		final BudgetLimitInterceptor budgetLimitInterceptor = new BudgetLimitInterceptor();
-		requestInjection(budgetLimitInterceptor);
-		bindInterceptor(any(), annotatedWith(BudgetLimited.class), budgetLimitInterceptor);
-	}
-
-	@Override
-	public void init(Environment environment, Injector injector) {
-		environment.jersey().register(injector.getInstance(ComputationalResourceAws.class));
-//
-
-		/*injector.getInstance(SecurityFactory.class).configure(injector, environment,
-				SelfServiceSecurityAuthenticator.class, injector.getInstance(Authorizer.class));*/
-	}
-
-
-	@Provides
-	@Singleton
-	Scheduler provideScheduler(SelfServiceApplicationConfiguration configuration) throws SchedulerException {
-		final MongoServiceFactory mongoFactory = configuration.getMongoFactory();
-		final String database = mongoFactory.getDatabase();
-		final String mongoUri = String.format(MONGO_URI_FORMAT, mongoFactory.getUsername(), mongoFactory.getPassword(),
-				mongoFactory.getHost(), mongoFactory.getPort(), database);
-		System.setProperty(QUARTZ_MONGO_URI_PROPERTY, mongoUri);
-		System.setProperty(QUARTZ_DB_NAME, database);
-		return StdSchedulerFactory.getDefaultScheduler();
-	}
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/AzureSelfServiceModule.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/AzureSelfServiceModule.java
deleted file mode 100644
index ee04041..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/AzureSelfServiceModule.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.modules;
-
-import com.epam.dlab.backendapi.SelfServiceApplication;
-import com.epam.dlab.backendapi.annotation.BudgetLimited;
-import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
-import com.epam.dlab.backendapi.dao.BillingDAO;
-import com.epam.dlab.backendapi.dao.azure.AzureBillingDAO;
-import com.epam.dlab.backendapi.interceptor.BudgetLimitInterceptor;
-import com.epam.dlab.backendapi.resources.azure.ComputationalResourceAzure;
-import com.epam.dlab.backendapi.service.BillingService;
-import com.epam.dlab.backendapi.service.azure.AzureBillingService;
-import com.epam.dlab.cloud.CloudModule;
-import com.epam.dlab.mongo.MongoServiceFactory;
-import com.fiestacabin.dropwizard.quartz.SchedulerConfiguration;
-import com.google.inject.Injector;
-import com.google.inject.Provides;
-import com.google.inject.Singleton;
-import io.dropwizard.setup.Environment;
-import lombok.extern.slf4j.Slf4j;
-import org.quartz.Scheduler;
-import org.quartz.SchedulerException;
-import org.quartz.impl.StdSchedulerFactory;
-
-import static com.google.inject.matcher.Matchers.annotatedWith;
-import static com.google.inject.matcher.Matchers.any;
-
-@Slf4j
-public class AzureSelfServiceModule extends CloudModule {
-
-	private static final String MONGO_URI_FORMAT = "mongodb://%s:%s@%s:%d/%s";
-	private static final String QUARTZ_MONGO_URI_PROPERTY = "org.quartz.jobStore.mongoUri";
-	private static final String QUARTZ_DB_NAME = "org.quartz.jobStore.dbName";
-
-	@Override
-	protected void configure() {
-		bind(BillingService.class).to(AzureBillingService.class);
-		bind(SchedulerConfiguration.class).toInstance(
-				new SchedulerConfiguration(SelfServiceApplication.class.getPackage().getName()));
-		bind(BillingDAO.class).to(AzureBillingDAO.class);
-		final BudgetLimitInterceptor budgetLimitInterceptor = new BudgetLimitInterceptor();
-		requestInjection(budgetLimitInterceptor);
-		bindInterceptor(any(), annotatedWith(BudgetLimited.class), budgetLimitInterceptor);
-	}
-
-	@Override
-	public void init(Environment environment, Injector injector) {
-		environment.jersey().register(injector.getInstance(ComputationalResourceAzure.class));
-
-	}
-
-	@Provides
-	@Singleton
-	Scheduler provideScheduler(SelfServiceApplicationConfiguration configuration) throws SchedulerException {
-		final MongoServiceFactory mongoFactory = configuration.getMongoFactory();
-		final String database = mongoFactory.getDatabase();
-		final String mongoUri = String.format(MONGO_URI_FORMAT, mongoFactory.getUsername(), mongoFactory.getPassword(),
-				mongoFactory.getHost(), mongoFactory.getPort(), database);
-		System.setProperty(QUARTZ_MONGO_URI_PROPERTY, mongoUri);
-		System.setProperty(QUARTZ_DB_NAME, database);
-		return StdSchedulerFactory.getDefaultScheduler();
-	}
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/CloudProviderModule.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/CloudProviderModule.java
index 7ea2739..8b41baf 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/CloudProviderModule.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/CloudProviderModule.java
@@ -21,12 +21,10 @@
 
 import com.epam.dlab.backendapi.SelfServiceApplication;
 import com.epam.dlab.backendapi.annotation.BudgetLimited;
+import com.epam.dlab.backendapi.annotation.ProjectAdmin;
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
-import com.epam.dlab.backendapi.dao.BillingDAO;
-import com.epam.dlab.backendapi.dao.aws.AwsBillingDAO;
-import com.epam.dlab.backendapi.dao.azure.AzureBillingDAO;
-import com.epam.dlab.backendapi.dao.gcp.GcpBillingDao;
 import com.epam.dlab.backendapi.interceptor.BudgetLimitInterceptor;
+import com.epam.dlab.backendapi.interceptor.ProjectAdminInterceptor;
 import com.epam.dlab.backendapi.resources.BillingResource;
 import com.epam.dlab.backendapi.resources.aws.ComputationalResourceAws;
 import com.epam.dlab.backendapi.resources.azure.ComputationalResourceAzure;
@@ -35,9 +33,7 @@
 import com.epam.dlab.backendapi.service.BillingService;
 import com.epam.dlab.backendapi.service.InfrastructureInfoService;
 import com.epam.dlab.backendapi.service.InfrastructureTemplateService;
-import com.epam.dlab.backendapi.service.aws.AwsBillingService;
-import com.epam.dlab.backendapi.service.azure.AzureBillingService;
-import com.epam.dlab.backendapi.service.gcp.GcpBillingService;
+import com.epam.dlab.backendapi.service.impl.BillingServiceImpl;
 import com.epam.dlab.backendapi.service.impl.InfrastructureInfoServiceImpl;
 import com.epam.dlab.backendapi.service.impl.InfrastructureTemplateServiceImpl;
 import com.epam.dlab.cloud.CloudModule;
@@ -68,15 +64,18 @@
 
     @Override
     protected void configure() {
-        bindBilling();
+        bind(BillingService.class).to(BillingServiceImpl.class);
         bind(InfrastructureInfoService.class).to(InfrastructureInfoServiceImpl.class);
         bind(InfrastructureTemplateService.class).to(InfrastructureTemplateServiceImpl.class);
         bind(SchedulerConfiguration.class).toInstance(
                 new SchedulerConfiguration(SelfServiceApplication.class.getPackage().getName()));
 
         final BudgetLimitInterceptor budgetLimitInterceptor = new BudgetLimitInterceptor();
+        final ProjectAdminInterceptor projectAdminInterceptor = new ProjectAdminInterceptor();
         requestInjection(budgetLimitInterceptor);
+        requestInjection(projectAdminInterceptor);
         bindInterceptor(any(), annotatedWith(BudgetLimited.class), budgetLimitInterceptor);
+        bindInterceptor(any(), annotatedWith(ProjectAdmin.class), projectAdminInterceptor);
     }
 
     @Override
@@ -101,23 +100,4 @@
         System.setProperty(QUARTZ_DB_NAME, database);
         return StdSchedulerFactory.getDefaultScheduler();
     }
-
-    private void bindBilling() {
-        switch (configuration.getCloudProvider()) {
-            case AWS:
-                bind(BillingService.class).to(AwsBillingService.class);
-                bind(BillingDAO.class).to(AwsBillingDAO.class);
-                break;
-            case AZURE:
-                bind(BillingService.class).to(AzureBillingService.class);
-                bind(BillingDAO.class).to(AzureBillingDAO.class);
-                break;
-            case GCP:
-                bind(BillingService.class).to(GcpBillingService.class);
-                bind(BillingDAO.class).to(GcpBillingDao.class);
-                break;
-            default:
-                throw new UnsupportedOperationException("Unsupported cloud provider " + configuration.getCloudProvider());
-        }
-    }
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java
index cf08d12..9275319 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java
@@ -23,9 +23,66 @@
 import com.epam.dlab.auth.contract.SecurityAPI;
 import com.epam.dlab.backendapi.auth.SelfServiceSecurityAuthorizer;
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
-import com.epam.dlab.backendapi.dao.*;
-import com.epam.dlab.backendapi.service.*;
-import com.epam.dlab.backendapi.service.impl.*;
+import com.epam.dlab.backendapi.dao.BackupDao;
+import com.epam.dlab.backendapi.dao.BackupDaoImpl;
+import com.epam.dlab.backendapi.dao.BaseBillingDAO;
+import com.epam.dlab.backendapi.dao.BillingDAO;
+import com.epam.dlab.backendapi.dao.EndpointDAO;
+import com.epam.dlab.backendapi.dao.EndpointDAOImpl;
+import com.epam.dlab.backendapi.dao.ImageExploratoryDao;
+import com.epam.dlab.backendapi.dao.ImageExploratoryDaoImpl;
+import com.epam.dlab.backendapi.dao.ProjectDAO;
+import com.epam.dlab.backendapi.dao.ProjectDAOImpl;
+import com.epam.dlab.backendapi.dao.UserGroupDao;
+import com.epam.dlab.backendapi.dao.UserGroupDaoImpl;
+import com.epam.dlab.backendapi.dao.UserRoleDao;
+import com.epam.dlab.backendapi.dao.UserRoleDaoImpl;
+import com.epam.dlab.backendapi.service.AccessKeyService;
+import com.epam.dlab.backendapi.service.ApplicationSettingService;
+import com.epam.dlab.backendapi.service.ApplicationSettingServiceImpl;
+import com.epam.dlab.backendapi.service.BackupService;
+import com.epam.dlab.backendapi.service.ComputationalService;
+import com.epam.dlab.backendapi.service.EndpointService;
+import com.epam.dlab.backendapi.service.EnvironmentService;
+import com.epam.dlab.backendapi.service.ExploratoryService;
+import com.epam.dlab.backendapi.service.ExternalLibraryService;
+import com.epam.dlab.backendapi.service.GitCredentialService;
+import com.epam.dlab.backendapi.service.GuacamoleService;
+import com.epam.dlab.backendapi.service.ImageExploratoryService;
+import com.epam.dlab.backendapi.service.InactivityService;
+import com.epam.dlab.backendapi.service.KeycloakService;
+import com.epam.dlab.backendapi.service.KeycloakServiceImpl;
+import com.epam.dlab.backendapi.service.LibraryService;
+import com.epam.dlab.backendapi.service.ProjectService;
+import com.epam.dlab.backendapi.service.ReuploadKeyService;
+import com.epam.dlab.backendapi.service.SchedulerJobService;
+import com.epam.dlab.backendapi.service.SecurityService;
+import com.epam.dlab.backendapi.service.SecurityServiceImpl;
+import com.epam.dlab.backendapi.service.SystemInfoService;
+import com.epam.dlab.backendapi.service.TagService;
+import com.epam.dlab.backendapi.service.TagServiceImpl;
+import com.epam.dlab.backendapi.service.UserGroupService;
+import com.epam.dlab.backendapi.service.UserRoleService;
+import com.epam.dlab.backendapi.service.UserRoleServiceImpl;
+import com.epam.dlab.backendapi.service.UserSettingService;
+import com.epam.dlab.backendapi.service.UserSettingServiceImpl;
+import com.epam.dlab.backendapi.service.impl.AccessKeyServiceImpl;
+import com.epam.dlab.backendapi.service.impl.BackupServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ComputationalServiceImpl;
+import com.epam.dlab.backendapi.service.impl.EndpointServiceImpl;
+import com.epam.dlab.backendapi.service.impl.EnvironmentServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ExploratoryServiceImpl;
+import com.epam.dlab.backendapi.service.impl.GitCredentialServiceImpl;
+import com.epam.dlab.backendapi.service.impl.GuacamoleServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ImageExploratoryServiceImpl;
+import com.epam.dlab.backendapi.service.impl.InactivityServiceImpl;
+import com.epam.dlab.backendapi.service.impl.LibraryServiceImpl;
+import com.epam.dlab.backendapi.service.impl.MavenCentralLibraryService;
+import com.epam.dlab.backendapi.service.impl.ProjectServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ReuploadKeyServiceImpl;
+import com.epam.dlab.backendapi.service.impl.SchedulerJobServiceImpl;
+import com.epam.dlab.backendapi.service.impl.SystemInfoServiceImpl;
+import com.epam.dlab.backendapi.service.impl.UserGroupServiceImpl;
 import com.epam.dlab.constants.ServiceConsts;
 import com.epam.dlab.mongo.MongoService;
 import com.epam.dlab.rest.client.RESTService;
@@ -75,6 +132,9 @@
 		bind(RESTService.class).annotatedWith(Names.named(ServiceConsts.PROVISIONING_SERVICE_NAME))
 				.toInstance(configuration.getProvisioningFactory()
 						.build(environment, ServiceConsts.PROVISIONING_SERVICE_NAME));
+		bind(RESTService.class).annotatedWith(Names.named(ServiceConsts.BILLING_SERVICE_NAME))
+				.toInstance(configuration.getBillingFactory()
+						.build(environment, ServiceConsts.BILLING_SERVICE_NAME));
 		bind(ImageExploratoryService.class).to(ImageExploratoryServiceImpl.class);
 		bind(ImageExploratoryDao.class).to(ImageExploratoryDaoImpl.class);
 		bind(BackupService.class).to(BackupServiceImpl.class);
@@ -106,6 +166,7 @@
 		bind(EndpointDAO.class).to(EndpointDAOImpl.class);
 		bind(ProjectService.class).to(ProjectServiceImpl.class);
 		bind(ProjectDAO.class).to(ProjectDAOImpl.class);
+		bind(BillingDAO.class).to(BaseBillingDAO.class);
 	}
 
 	private void configureCors(Environment environment) {
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/GcpSelfServiceModule.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/GcpSelfServiceModule.java
deleted file mode 100644
index 276238e..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/GcpSelfServiceModule.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.modules;
-
-import com.epam.dlab.backendapi.SelfServiceApplication;
-import com.epam.dlab.backendapi.annotation.BudgetLimited;
-import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
-import com.epam.dlab.backendapi.dao.BillingDAO;
-import com.epam.dlab.backendapi.dao.gcp.GcpBillingDao;
-import com.epam.dlab.backendapi.interceptor.BudgetLimitInterceptor;
-import com.epam.dlab.backendapi.resources.gcp.ComputationalResourceGcp;
-import com.epam.dlab.backendapi.resources.gcp.GcpOauthResource;
-import com.epam.dlab.backendapi.service.BillingService;
-import com.epam.dlab.backendapi.service.gcp.GcpBillingService;
-import com.epam.dlab.cloud.CloudModule;
-import com.epam.dlab.mongo.MongoServiceFactory;
-import com.fiestacabin.dropwizard.quartz.SchedulerConfiguration;
-import com.google.inject.Injector;
-import com.google.inject.Provides;
-import com.google.inject.Singleton;
-import io.dropwizard.setup.Environment;
-import org.quartz.Scheduler;
-import org.quartz.SchedulerException;
-import org.quartz.impl.StdSchedulerFactory;
-
-import static com.google.inject.matcher.Matchers.annotatedWith;
-import static com.google.inject.matcher.Matchers.any;
-
-public class GcpSelfServiceModule extends CloudModule {
-
-    private static final String MONGO_URI_FORMAT = "mongodb://%s:%s@%s:%d/%s";
-    private static final String QUARTZ_MONGO_URI_PROPERTY = "org.quartz.jobStore.mongoUri";
-    private static final String QUARTZ_DB_NAME = "org.quartz.jobStore.dbName";
-
-    @Override
-    @SuppressWarnings("unchecked")
-    public void init(Environment environment, Injector injector) {
-		environment.jersey().register(injector.getInstance(ComputationalResourceGcp.class));
-		if (injector.getInstance(SelfServiceApplicationConfiguration.class).isGcpOuauth2AuthenticationEnabled()) {
-			environment.jersey().register(injector.getInstance(GcpOauthResource.class));
-		}
-
-    }
-
-    @Override
-    protected void configure() {
-        bind(BillingService.class).to(GcpBillingService.class);
-        bind(BillingDAO.class).to(GcpBillingDao.class);
-        bind(SchedulerConfiguration.class).toInstance(
-                new SchedulerConfiguration(SelfServiceApplication.class.getPackage().getName()));
-        final BudgetLimitInterceptor budgetLimitInterceptor = new BudgetLimitInterceptor();
-        requestInjection(budgetLimitInterceptor);
-        bindInterceptor(any(), annotatedWith(BudgetLimited.class), budgetLimitInterceptor);
-    }
-
-    @Provides
-    @Singleton
-    Scheduler provideScheduler(SelfServiceApplicationConfiguration configuration) throws SchedulerException {
-        final MongoServiceFactory mongoFactory = configuration.getMongoFactory();
-        final String database = mongoFactory.getDatabase();
-        final String mongoUri = String.format(MONGO_URI_FORMAT, mongoFactory.getUsername(), mongoFactory.getPassword(),
-                mongoFactory.getHost(), mongoFactory.getPort(), database);
-        System.setProperty(QUARTZ_MONGO_URI_PROPERTY, mongoUri);
-        System.setProperty(QUARTZ_DB_NAME, database);
-        return StdSchedulerFactory.getDefaultScheduler();
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ModuleFactory.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ModuleFactory.java
index 1480fe7..eb8d3bc 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ModuleFactory.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ModuleFactory.java
@@ -48,17 +48,4 @@
 	public static CloudModule getCloudProviderModule(SelfServiceApplicationConfiguration configuration) {
 		return new CloudProviderModule(configuration);
 	}
-
-	private static CloudModule getCloudModule(SelfServiceApplicationConfiguration configuration) {
-		switch (configuration.getCloudProvider()) {
-			case AWS:
-				return new AwsSelfServiceModule();
-			case AZURE:
-				return new AzureSelfServiceModule();
-			case GCP:
-				return new GcpSelfServiceModule();
-			default:
-				throw new UnsupportedOperationException("Unsupported cloud provider " + configuration.getCloudProvider());
-		}
-	}
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java
index 0d0ae1d..d20adbf 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java
@@ -20,11 +20,68 @@
 package com.epam.dlab.backendapi.modules;
 
 import com.epam.dlab.ModuleBase;
-import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
 import com.epam.dlab.backendapi.auth.SelfServiceSecurityAuthorizer;
-import com.epam.dlab.backendapi.dao.*;
-import com.epam.dlab.backendapi.service.*;
-import com.epam.dlab.backendapi.service.impl.*;
+import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
+import com.epam.dlab.backendapi.dao.BackupDao;
+import com.epam.dlab.backendapi.dao.BackupDaoImpl;
+import com.epam.dlab.backendapi.dao.BaseBillingDAO;
+import com.epam.dlab.backendapi.dao.BillingDAO;
+import com.epam.dlab.backendapi.dao.EndpointDAO;
+import com.epam.dlab.backendapi.dao.EndpointDAOImpl;
+import com.epam.dlab.backendapi.dao.ImageExploratoryDao;
+import com.epam.dlab.backendapi.dao.ImageExploratoryDaoImpl;
+import com.epam.dlab.backendapi.dao.ProjectDAO;
+import com.epam.dlab.backendapi.dao.ProjectDAOImpl;
+import com.epam.dlab.backendapi.dao.UserGroupDao;
+import com.epam.dlab.backendapi.dao.UserGroupDaoImpl;
+import com.epam.dlab.backendapi.dao.UserRoleDao;
+import com.epam.dlab.backendapi.dao.UserRoleDaoImpl;
+import com.epam.dlab.backendapi.service.AccessKeyService;
+import com.epam.dlab.backendapi.service.ApplicationSettingService;
+import com.epam.dlab.backendapi.service.ApplicationSettingServiceImpl;
+import com.epam.dlab.backendapi.service.BackupService;
+import com.epam.dlab.backendapi.service.ComputationalService;
+import com.epam.dlab.backendapi.service.EndpointService;
+import com.epam.dlab.backendapi.service.EnvironmentService;
+import com.epam.dlab.backendapi.service.ExploratoryService;
+import com.epam.dlab.backendapi.service.ExternalLibraryService;
+import com.epam.dlab.backendapi.service.GitCredentialService;
+import com.epam.dlab.backendapi.service.GuacamoleService;
+import com.epam.dlab.backendapi.service.ImageExploratoryService;
+import com.epam.dlab.backendapi.service.InactivityService;
+import com.epam.dlab.backendapi.service.KeycloakService;
+import com.epam.dlab.backendapi.service.KeycloakServiceImpl;
+import com.epam.dlab.backendapi.service.LibraryService;
+import com.epam.dlab.backendapi.service.ProjectService;
+import com.epam.dlab.backendapi.service.ReuploadKeyService;
+import com.epam.dlab.backendapi.service.SchedulerJobService;
+import com.epam.dlab.backendapi.service.SecurityService;
+import com.epam.dlab.backendapi.service.SecurityServiceImpl;
+import com.epam.dlab.backendapi.service.SystemInfoService;
+import com.epam.dlab.backendapi.service.TagService;
+import com.epam.dlab.backendapi.service.TagServiceImpl;
+import com.epam.dlab.backendapi.service.UserGroupService;
+import com.epam.dlab.backendapi.service.UserRoleService;
+import com.epam.dlab.backendapi.service.UserRoleServiceImpl;
+import com.epam.dlab.backendapi.service.UserSettingService;
+import com.epam.dlab.backendapi.service.UserSettingServiceImpl;
+import com.epam.dlab.backendapi.service.impl.AccessKeyServiceImpl;
+import com.epam.dlab.backendapi.service.impl.BackupServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ComputationalServiceImpl;
+import com.epam.dlab.backendapi.service.impl.EndpointServiceImpl;
+import com.epam.dlab.backendapi.service.impl.EnvironmentServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ExploratoryServiceImpl;
+import com.epam.dlab.backendapi.service.impl.GitCredentialServiceImpl;
+import com.epam.dlab.backendapi.service.impl.GuacamoleServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ImageExploratoryServiceImpl;
+import com.epam.dlab.backendapi.service.impl.InactivityServiceImpl;
+import com.epam.dlab.backendapi.service.impl.LibraryServiceImpl;
+import com.epam.dlab.backendapi.service.impl.MavenCentralLibraryService;
+import com.epam.dlab.backendapi.service.impl.ProjectServiceImpl;
+import com.epam.dlab.backendapi.service.impl.ReuploadKeyServiceImpl;
+import com.epam.dlab.backendapi.service.impl.SchedulerJobServiceImpl;
+import com.epam.dlab.backendapi.service.impl.SystemInfoServiceImpl;
+import com.epam.dlab.backendapi.service.impl.UserGroupServiceImpl;
 import com.epam.dlab.constants.ServiceConsts;
 import com.epam.dlab.mongo.MongoService;
 import com.epam.dlab.rest.client.RESTService;
@@ -66,6 +123,9 @@
 		bind(RESTService.class).annotatedWith(Names.named(ServiceConsts.PROVISIONING_SERVICE_NAME))
 				.toInstance(configuration.getProvisioningFactory().build(environment, ServiceConsts
 						.PROVISIONING_SERVICE_NAME));
+		bind(RESTService.class).annotatedWith(Names.named(ServiceConsts.BILLING_SERVICE_NAME))
+				.toInstance(configuration.getBillingFactory()
+						.build(environment, ServiceConsts.BILLING_SERVICE_NAME));
 		bind(ImageExploratoryService.class).to(ImageExploratoryServiceImpl.class);
 		bind(ImageExploratoryDao.class).to(ImageExploratoryDaoImpl.class);
 		bind(BackupService.class).to(BackupServiceImpl.class);
@@ -95,6 +155,7 @@
 		bind(EndpointDAO.class).to(EndpointDAOImpl.class);
 		bind(ProjectService.class).to(ProjectServiceImpl.class);
 		bind(ProjectDAO.class).to(ProjectDAOImpl.class);
+		bind(BillingDAO.class).to(BaseBillingDAO.class);
 		bind(TagService.class).to(TagServiceImpl.class);
 		bind(SecurityService.class).to(SecurityServiceImpl.class);
 		bind(KeycloakService.class).to(KeycloakServiceImpl.class);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/BillingResource.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/BillingResource.java
index 0d27fad..1916a38 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/BillingResource.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/BillingResource.java
@@ -24,7 +24,6 @@
 import com.epam.dlab.backendapi.service.BillingService;
 import com.google.inject.Inject;
 import io.dropwizard.auth.Auth;
-import org.bson.Document;
 
 import javax.validation.Valid;
 import javax.validation.constraints.NotNull;
@@ -50,17 +49,16 @@
     @POST
     @Path("/report")
     @Produces(MediaType.APPLICATION_JSON)
-    public Document getBillingReport(@Auth UserInfo userInfo, @Valid @NotNull BillingFilter formDTO) {
-        return billingService.getBillingReport(userInfo, formDTO);
+    public Response getBillingReport(@Auth UserInfo userInfo, @Valid @NotNull BillingFilter filter) {
+        return Response.ok(billingService.getBillingReport(userInfo, filter)).build();
     }
 
     @POST
     @Path("/report/download")
     @Produces(MediaType.APPLICATION_OCTET_STREAM)
-    public Response downloadBillingReport(@Auth UserInfo userInfo, @Valid @NotNull BillingFilter formDTO) {
-        return Response.ok(billingService.downloadReport(userInfo, formDTO))
-                .header(HttpHeaders.CONTENT_DISPOSITION,
-                        "attachment; filename=\"" + billingService.getReportFileName(userInfo, formDTO) + "\"")
+    public Response downloadBillingReport(@Auth UserInfo userInfo, @Valid @NotNull BillingFilter filter) {
+        return Response.ok(billingService.downloadReport(userInfo, filter))
+                .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"billing-report.csv\"")
                 .build();
     }
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/EnvironmentResource.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/EnvironmentResource.java
index ae60e39..3553ff4 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/EnvironmentResource.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/EnvironmentResource.java
@@ -49,29 +49,11 @@
 	}
 
 	@GET
-	@Path("user")
-	@Produces(MediaType.APPLICATION_JSON)
-	public Response getUsersWithActiveEnv(@Auth UserInfo userInfo) {
-		log.debug("User {} requested information about active environments", userInfo.getName());
-		return Response.ok(environmentService.getUsers()).build();
-	}
-
-	@GET
 	@Path("all")
 	@Produces(MediaType.APPLICATION_JSON)
 	public Response getAllEnv(@Auth UserInfo userInfo) {
 		log.debug("Admin {} requested information about all user's environment", userInfo.getName());
-		return Response.ok(environmentService.getAllEnv()).build();
-	}
-
-	@POST
-	@Consumes(MediaType.TEXT_PLAIN)
-	@Produces(MediaType.APPLICATION_JSON)
-	@Path("stop/{projectName}")
-	public Response stopEnv(@Auth UserInfo userInfo, @NotEmpty String user, @PathParam("projectName") String projectName) {
-		log.info("User {} is stopping {} environment", userInfo.getName(), user);
-		environmentService.stopEnvironment(userInfo, user, projectName);
-		return Response.ok().build();
+		return Response.ok(environmentService.getAllEnv(userInfo)).build();
 	}
 
 	@POST
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResource.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResource.java
index db8197f..c8952f3 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResource.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResource.java
@@ -22,13 +22,17 @@
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.resources.dto.HealthStatusPageDTO;
 import com.epam.dlab.backendapi.resources.dto.ProjectInfrastructureInfo;
-import com.epam.dlab.backendapi.roles.UserRoles;
 import com.epam.dlab.backendapi.service.InfrastructureInfoService;
 import com.google.inject.Inject;
 import io.dropwizard.auth.Auth;
 import lombok.extern.slf4j.Slf4j;
 
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.util.List;
@@ -66,8 +70,7 @@
 	@Path("/status")
 	public HealthStatusPageDTO status(@Auth UserInfo userInfo,
 									  @QueryParam("full") @DefaultValue("0") int fullReport) {
-		return infrastructureInfoService
-				.getHeathStatus(userInfo, fullReport != 0, UserRoles.isAdmin(userInfo));
+		return infrastructureInfoService.getHeathStatus(userInfo, fullReport != 0);
 	}
 
 	/**
@@ -78,7 +81,7 @@
 	@GET
 	@Path("/info")
 	public List<ProjectInfrastructureInfo> getUserResources(@Auth UserInfo userInfo) {
-		return infrastructureInfoService.getUserResources(userInfo.getName());
+		return infrastructureInfoService.getUserResources(userInfo);
 
 	}
 
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/KeycloakResource.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/KeycloakResource.java
index 818dfe1..836597b 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/KeycloakResource.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/KeycloakResource.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java
index 93e42bb..6af8729 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/ProjectResource.java
@@ -1,7 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources;
 
 import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.domain.*;
+import com.epam.dlab.backendapi.domain.CreateProjectDTO;
+import com.epam.dlab.backendapi.domain.ProjectDTO;
+import com.epam.dlab.backendapi.domain.ProjectEndpointDTO;
+import com.epam.dlab.backendapi.domain.UpdateProjectBudgetDTO;
+import com.epam.dlab.backendapi.domain.UpdateProjectDTO;
 import com.epam.dlab.backendapi.resources.dto.ProjectActionFormDTO;
 import com.epam.dlab.backendapi.service.AccessKeyService;
 import com.epam.dlab.backendapi.service.ProjectService;
@@ -20,7 +43,15 @@
 import javax.annotation.security.RolesAllowed;
 import javax.validation.Valid;
 import javax.validation.constraints.NotNull;
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DefaultValue;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
 import javax.ws.rs.core.Context;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
@@ -58,7 +89,7 @@
 	})
 	@POST
 	@Consumes(MediaType.APPLICATION_JSON)
-	@RolesAllowed("/api/project")
+	@RolesAllowed("/api/project/create")
 	public Response createProject(@Parameter(hidden = true) @Auth UserInfo userInfo,
 								  @Valid CreateProjectDTO projectDTO) {
 		projectService.create(userInfo, new ProjectDTO(projectDTO.getName(), projectDTO.getGroups(),
@@ -140,27 +171,9 @@
 	@GET
 	@Produces(MediaType.APPLICATION_JSON)
 	@RolesAllowed("/api/project")
-	public Response getProjects(@Parameter(hidden = true) @Auth UserInfo userInfo,
-								@Parameter(description = "Project name")
-								@PathParam("name") String name) {
+	public Response getProjects(@Parameter(hidden = true) @Auth UserInfo userInfo) {
 		return Response
-				.ok(projectService.getProjects())
-				.build();
-	}
-
-	@Operation(summary = "Get available projects for managing", tags = "project")
-	@ApiResponses({
-			@ApiResponse(responseCode = "200", description = "Return information about projects",
-					content = @Content(mediaType = MediaType.APPLICATION_JSON, schema =
-					@Schema(implementation = ProjectManagingDTO.class))),
-	})
-	@GET
-	@Path("managing")
-	@Produces(MediaType.APPLICATION_JSON)
-	@RolesAllowed("/api/project")
-	public Response getProjectsForManaging(@Parameter(hidden = true) @Auth UserInfo userInfo) {
-		return Response
-				.ok(projectService.getProjectsForManaging())
+				.ok(projectService.getProjects(userInfo))
 				.build();
 	}
 
@@ -193,7 +206,7 @@
 	@PUT
 	@RolesAllowed("/api/project")
 	public Response updateProject(@Parameter(hidden = true) @Auth UserInfo userInfo, UpdateProjectDTO projectDTO) {
-		projectService.update(userInfo, projectDTO);
+		projectService.update(userInfo, projectDTO, projectDTO.getName());
 		return Response.ok().build();
 	}
 
@@ -213,16 +226,6 @@
 		return Response.ok().build();
 	}
 
-	@DELETE
-	@Path("{name}")
-	@RolesAllowed("/api/project")
-	public Response removeProject(
-			@Parameter(hidden = true) @Auth UserInfo userInfo,
-			@PathParam("name") String name) {
-		projectService.terminateProject(userInfo, name);
-		return Response.ok().build();
-	}
-
 	@Operation(summary = "Updates project budget", tags = "project")
 	@ApiResponses({
 			@ApiResponse(responseCode = "200", description = "Project budget is successfully updated"),
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserGroupResource.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserGroupResource.java
index 154ddc2..8cd3381 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserGroupResource.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserGroupResource.java
@@ -20,24 +20,26 @@
 
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.resources.dto.GroupDTO;
-import com.epam.dlab.backendapi.resources.dto.UpdateRoleGroupDto;
-import com.epam.dlab.backendapi.resources.dto.UpdateUserGroupDto;
 import com.epam.dlab.backendapi.service.UserGroupService;
 import com.google.inject.Inject;
 import io.dropwizard.auth.Auth;
 import lombok.extern.slf4j.Slf4j;
-import org.hibernate.validator.constraints.NotEmpty;
 
 import javax.annotation.security.RolesAllowed;
 import javax.validation.Valid;
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.DELETE;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.PUT;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.util.Set;
 
 @Slf4j
 @Path("group")
-@RolesAllowed("/roleManagement")
 @Consumes(MediaType.APPLICATION_JSON)
 @Produces(MediaType.APPLICATION_JSON)
 public class UserGroupResource {
@@ -51,71 +53,34 @@
 
 
 	@POST
-	public Response createGroup(@Auth UserInfo userInfo,
-								@Valid GroupDTO dto) {
+	@RolesAllowed("/roleManagement/create")
+	public Response createGroup(@Auth UserInfo userInfo, @Valid GroupDTO dto) {
 		log.debug("Creating new group {}", dto.getName());
 		userGroupService.createGroup(dto.getName(), dto.getRoleIds(), dto.getUsers());
 		return Response.ok().build();
 	}
 
 	@PUT
+	@RolesAllowed("/roleManagement")
 	public Response updateGroup(@Auth UserInfo userInfo, @Valid GroupDTO dto) {
 		log.debug("Updating group {}", dto.getName());
-		userGroupService.updateGroup(dto.getName(), dto.getRoleIds(), dto.getUsers());
+		userGroupService.updateGroup(userInfo, dto.getName(), dto.getRoleIds(), dto.getUsers());
 		return Response.ok().build();
 	}
 
 	@GET
+	@RolesAllowed("/roleManagement")
 	public Response getGroups(@Auth UserInfo userInfo) {
 		log.debug("Getting all groups for admin {}...", userInfo.getName());
-		return Response.ok(userGroupService.getAggregatedRolesByGroup()).build();
-	}
-
-	@PUT
-	@Path("role")
-	public Response updateRolesForGroup(@Auth UserInfo userInfo, @Valid UpdateRoleGroupDto updateRoleGroupDto) {
-		log.info("Admin {} is trying to add new group {} to roles {}", userInfo.getName(),
-				updateRoleGroupDto.getGroup(), updateRoleGroupDto.getRoleIds());
-		userGroupService.updateRolesForGroup(updateRoleGroupDto.getGroup(), updateRoleGroupDto.getRoleIds());
-		return Response.ok().build();
-	}
-
-	@DELETE
-	@Path("role")
-	public Response deleteGroupFromRole(@Auth UserInfo userInfo,
-										@QueryParam("group") @NotEmpty Set<String> groups,
-										@QueryParam("roleId") @NotEmpty Set<String> roleIds) {
-		log.info("Admin {} is trying to delete groups {} from roles {}", userInfo.getName(), groups, roleIds);
-		userGroupService.removeGroupFromRole(groups, roleIds);
-		return Response.ok().build();
+		return Response.ok(userGroupService.getAggregatedRolesByGroup(userInfo)).build();
 	}
 
 	@DELETE
 	@Path("{id}")
-	public Response deleteGroup(@Auth UserInfo userInfo,
-								@PathParam("id") String group) {
+	@RolesAllowed("/roleManagement/delete")
+	public Response deleteGroup(@Auth UserInfo userInfo, @PathParam("id") String group) {
 		log.info("Admin {} is trying to delete group {} from application", userInfo.getName(), group);
 		userGroupService.removeGroup(group);
 		return Response.ok().build();
 	}
-
-	@PUT
-	@Path("user")
-	public Response addUserToGroup(@Auth UserInfo userInfo,
-								   @Valid UpdateUserGroupDto updateUserGroupDto) {
-		log.info("Admin {} is trying to add new users {} to group {}", userInfo.getName(),
-				updateUserGroupDto.getUsers(), updateUserGroupDto.getGroup());
-		userGroupService.addUsersToGroup(updateUserGroupDto.getGroup(), updateUserGroupDto.getUsers());
-		return Response.ok().build();
-	}
-
-	@DELETE
-	@Path("user")
-	public Response deleteUserFromGroup(@Auth UserInfo userInfo,
-										@QueryParam("user") @NotEmpty String user,
-										@QueryParam("group") @NotEmpty String group) {
-		log.info("Admin {} is trying to delete user {} from group {}", userInfo.getName(), user, group);
-		userGroupService.removeUserFromGroup(group, user);
-		return Response.ok().build();
-	}
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserRoleResource.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserRoleResource.java
index b9d0619..52ad739 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserRoleResource.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/UserRoleResource.java
@@ -26,7 +26,11 @@
 import lombok.extern.slf4j.Slf4j;
 
 import javax.annotation.security.RolesAllowed;
-import javax.ws.rs.*;
+import javax.ws.rs.Consumes;
+import javax.ws.rs.GET;
+import javax.ws.rs.POST;
+import javax.ws.rs.Path;
+import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ProjectCallback.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ProjectCallback.java
index 59c31da..4500e5e 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ProjectCallback.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ProjectCallback.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources.callback;
 
 import com.epam.dlab.backendapi.dao.ProjectDAO;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/BillingFilter.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/BillingFilter.java
index f820169..52363a8 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/BillingFilter.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/BillingFilter.java
@@ -23,29 +23,36 @@
 import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import lombok.Data;
+import lombok.NoArgsConstructor;
+import lombok.NonNull;
 
 import java.util.Collections;
 import java.util.List;
 
 @Data
+@NoArgsConstructor
 @JsonIgnoreProperties(ignoreUnknown = true)
 public class BillingFilter {
-	@JsonProperty
-	protected List<String> user;
+	@NonNull
+	private List<String> users;
+	@NonNull
 	@JsonProperty("dlab_id")
-	protected String dlabId;
-	@JsonProperty("resource_type")
-	protected List<String> resourceType;
+	private String dlabId;
+	@NonNull
 	@JsonProperty("date_start")
-	protected String dateStart;
+	private String dateStart;
+	@NonNull
 	@JsonProperty("date_end")
-	protected String dateEnd;
-	@JsonProperty("status")
-	protected List<UserInstanceStatus> statuses = Collections.emptyList();
-	@JsonProperty("project")
-	protected List<String> projects;
-	@JsonProperty
-	private List<String> service;
-	@JsonProperty
-	private List<String> shape;
+	private String dateEnd;
+	@NonNull
+	@JsonProperty("resource_type")
+	private List<String> resourceTypes;
+	@NonNull
+	private List<UserInstanceStatus> statuses = Collections.emptyList();
+	@NonNull
+	private List<String> projects;
+	@NonNull
+	private List<String> products;
+	@NonNull
+	private List<String> shapes;
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/HealthStatusPageDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/HealthStatusPageDTO.java
index b7f9362..17e7b91 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/HealthStatusPageDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/HealthStatusPageDTO.java
@@ -20,13 +20,16 @@
 package com.epam.dlab.backendapi.resources.dto;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.MoreObjects;
+import lombok.Builder;
+import lombok.Data;
 
 import java.util.List;
 
 /**
  * Stores the health statuses for environment resources.
  */
+@Data
+@Builder
 public class HealthStatusPageDTO {
 	@JsonProperty
 	private String status;
@@ -37,131 +40,11 @@
 	@JsonProperty
 	private boolean admin;
 	@JsonProperty
+	private boolean projectAdmin;
+	@JsonProperty
 	private int billingQuoteUsed;
 	@JsonProperty
 	private int billingUserQuoteUsed;
 	@JsonProperty
 	private boolean projectAssigned;
-
-	/**
-	 * Return the status of environment.
-	 */
-	public String getStatus() {
-		return status;
-	}
-
-	/**
-	 * Set the status of environment.
-	 */
-	public void setStatus(HealthStatusEnum status) {
-		this.status = status == null ? null : status.toString();
-	}
-
-	/**
-	 * Set the status of environment.
-	 */
-	public void setStatus(String status) {
-		this.status = status;
-	}
-
-	public void setBillingEnabled(boolean billingEnabled) {
-		this.billingEnabled = billingEnabled;
-	}
-
-
-	/**
-	 * Set the status of environment.
-	 */
-	public HealthStatusPageDTO withStatus(String status) {
-		setStatus(status);
-		return this;
-	}
-
-	/**
-	 * Set the status of environment.
-	 */
-	public HealthStatusPageDTO withStatus(HealthStatusEnum status) {
-		setStatus(status);
-		return this;
-	}
-
-	public HealthStatusPageDTO withProjectAssinged(boolean isProjectAssigned) {
-		this.projectAssigned = isProjectAssigned;
-		return this;
-	}
-
-	/**
-	 * Return the list of resources.
-	 */
-	public List<HealthStatusResource> getListResources() {
-		return listResources;
-	}
-
-	/**
-	 * Set the list of resources.
-	 */
-	public void setListResources(List<HealthStatusResource> listResources) {
-		this.listResources = listResources;
-	}
-
-	/**
-	 * Set the list of resources.
-	 */
-	public HealthStatusPageDTO withListResources(List<HealthStatusResource> listResources) {
-		setListResources(listResources);
-		return this;
-	}
-
-	/**
-	 * Set billing enabled flag
-	 */
-	public HealthStatusPageDTO withBillingEnabled(boolean billingEnabled) {
-		setBillingEnabled(billingEnabled);
-		return this;
-	}
-
-	@Override
-	public String toString() {
-		return MoreObjects.toStringHelper(this)
-				.add("status", status)
-				.add("listResources", listResources)
-				.add("billingEnabled", billingEnabled)
-				.add("admin", admin)
-				.toString();
-	}
-
-	public HealthStatusPageDTO withAdmin(boolean isAdmin) {
-		this.admin = isAdmin;
-		return this;
-	}
-
-	public HealthStatusPageDTO withBillingQuoteUsed(int billingQuoteUsedPct) {
-		this.billingQuoteUsed = billingQuoteUsedPct;
-		return this;
-	}
-
-	public HealthStatusPageDTO withBillingUserQuoteUsed(int billingUserQuoteUsed) {
-		this.billingUserQuoteUsed = billingUserQuoteUsed;
-		return this;
-	}
-
-	public boolean isBillingEnabled() {
-		return billingEnabled;
-	}
-
-	public boolean isAdmin() {
-		return admin;
-	}
-
-	public boolean isProjectAssigned() {
-		return projectAssigned;
-	}
-
-	public int getBillingQuoteUsed() {
-		return billingQuoteUsed;
-	}
-
-	public int getBillingUserQuoteUsed() {
-		return billingUserQuoteUsed;
-	}
 }
\ No newline at end of file
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ImageInfoRecord.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ImageInfoRecord.java
index d430701..ed722ee 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ImageInfoRecord.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ImageInfoRecord.java
@@ -30,6 +30,7 @@
 	private final String description;
 	private final String project;
 	private final String endpoint;
+	private final String user;
 	private final String application;
 	private final String fullName;
 	private final ImageStatus status;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/KeysDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/KeysDTO.java
index 8093fc9..5bfbb0c 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/KeysDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/KeysDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources.dto;
 
 import lombok.AllArgsConstructor;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectActionFormDTO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectActionFormDTO.java
index 24d8342..ccdd3c4 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectActionFormDTO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectActionFormDTO.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources.dto;
 
 import com.fasterxml.jackson.annotation.JsonProperty;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectInfrastructureInfo.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectInfrastructureInfo.java
index 44837a2..b9dfd89 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectInfrastructureInfo.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/dto/ProjectInfrastructureInfo.java
@@ -19,6 +19,7 @@
 
 package com.epam.dlab.backendapi.resources.dto;
 
+import com.epam.dlab.backendapi.domain.BillingReport;
 import com.epam.dlab.backendapi.domain.EndpointDTO;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import lombok.AllArgsConstructor;
@@ -40,5 +41,7 @@
 	@JsonProperty
 	private Iterable<Document> exploratory;
 	@JsonProperty
+	private List<BillingReport> exploratoryBilling;
+	@JsonProperty
 	private List<EndpointDTO> endpoints;
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRole.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRole.java
index 206b143..e5343dd 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRole.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRole.java
@@ -23,6 +23,7 @@
 import com.google.common.base.MoreObjects.ToStringHelper;
 
 import javax.annotation.Nonnull;
+import java.util.Comparator;
 import java.util.Objects;
 import java.util.Set;
 
@@ -69,10 +70,6 @@
 		this.users = users;
 	}
 
-	UserRole(RoleType type, String name, Set<String> groups, Set<String> users) {
-		this(null, type, name, groups, users);
-	}
-
 	/**
 	 * Return the type of role.
 	 */
@@ -107,8 +104,10 @@
 
 	@Override
 	public int compareTo(@Nonnull UserRole o) {
-		int result = type.compareTo(o.type);
-		return (result == 0 ? name.compareTo(o.name) : result);
+		return Comparator.comparing(UserRole::getType)
+				.thenComparing(UserRole::getName)
+				.thenComparing(UserRole::getId, Comparator.nullsLast(String::compareToIgnoreCase))
+				.compare(this, o);
 	}
 
 	private ToStringHelper toStringHelper(Object self) {
@@ -124,7 +123,7 @@
 		if (this == o) return true;
 		if (o == null || getClass() != o.getClass()) return false;
 		UserRole userRole = (UserRole) o;
-		return this.type.equals(userRole.getType()) && this.name.equals(userRole.getName());
+		return this.id.equals(userRole.getId()) && this.type.equals(userRole.getType()) && this.name.equals(userRole.getName());
 	}
 
 	@Override
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRoles.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRoles.java
index 411f798..9be9578 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRoles.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/roles/UserRoles.java
@@ -28,7 +28,15 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.util.*;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Optional;
+import java.util.Set;
+import java.util.stream.Collectors;
 
 /**
  * Provides user roles access to features.
@@ -45,6 +53,7 @@
 	 * Node name of user.
 	 */
 	private static final String USERS = "users";
+	private static final String PROJECT_ADMIN_ROLE_NAME = "projectAdmin";
 	private static final String ADMIN_ROLE_NAME = "admin";
 	/**
 	 * Single instance of the user roles.
@@ -95,10 +104,22 @@
 		return checkAccess(userInfo, type, name, true, roles);
 	}
 
+	public static boolean isProjectAdmin(UserInfo userInfo) {
+		final List<UserRole> roles = UserRoles.getRoles();
+		return roles == null || roles.stream().anyMatch(r -> PROJECT_ADMIN_ROLE_NAME.equalsIgnoreCase(r.getId()) &&
+				(userRoles.hasAccessByGroup(userInfo, userInfo.getRoles(), r.getGroups()) || userRoles.hasAccessByUserName(userInfo, r)));
+	}
+
+	public static boolean isProjectAdmin(UserInfo userInfo, Set<String> groups) {
+		final List<UserRole> roles = UserRoles.getRoles();
+		return roles == null || roles.stream().anyMatch(r -> PROJECT_ADMIN_ROLE_NAME.equalsIgnoreCase(r.getId()) &&
+				(userRoles.hasAccessByGroup(userInfo, userInfo.getRoles(), retainGroups(r.getGroups(), groups)) || userRoles.hasAccessByUserName(userInfo, r)));
+	}
+
 	public static boolean isAdmin(UserInfo userInfo) {
 		final List<UserRole> roles = UserRoles.getRoles();
 		return roles == null || roles.stream().anyMatch(r -> ADMIN_ROLE_NAME.equalsIgnoreCase(r.getId()) &&
-				(userRoles.hasAccessByGroup(userInfo, r, userInfo.getRoles()) || userRoles.hasAccessByUserName(userInfo, r)));
+				(userRoles.hasAccessByGroup(userInfo, userInfo.getRoles(), r.getGroups()) || userRoles.hasAccessByUserName(userInfo, r)));
 	}
 
 	/**
@@ -181,12 +202,16 @@
 	 *
 	 * @param type type of role.
 	 * @param name the name of role.
+	 * @return list of UserRole
 	 */
-	private UserRole get(RoleType type, String name) {
-		UserRole item = new UserRole(type, name, null, null);
+	private Set<String> getGroups(RoleType type, String name) {
 		synchronized (roles) {
-			int i = Collections.binarySearch(roles, item);
-			return (i < 0 ? null : roles.get(i));
+			return roles
+					.stream()
+					.filter(r -> type == r.getType() && name.equalsIgnoreCase(r.getName()))
+					.map(UserRole::getGroups)
+					.flatMap(Collection::stream)
+					.collect(Collectors.toSet());
 		}
 	}
 
@@ -233,17 +258,18 @@
 		}
 		LOGGER.trace("Check access for user {} with groups {} to {}/{}", userInfo.getName(), userInfo.getRoles(),
 				type, name);
-		UserRole role = get(type, name);
-		if (role == null) {
+		Set<String> groups = getGroups(type, name);
+		if (groups == null || groups.isEmpty()) {
 			return checkDefault(useDefault);
 		}
-		if (hasAccessByGroup(userInfo, role, roles)) return true;
+		if (hasAccessByGroup(userInfo, roles, groups)) {
+			return true;
+		}
 		LOGGER.trace("Access denied for user {} to {}/{}", userInfo.getName(), type, name);
 		return false;
 	}
 
-	private boolean hasAccessByGroup(UserInfo userInfo, UserRole role, Collection<String> userRoles) {
-		Set<String> groups = role.getGroups();
+	private boolean hasAccessByGroup(UserInfo userInfo, Collection<String> userRoles, Collection<String> groups) {
 		if (groups != null) {
 			if (groups.contains(ANY_USER)) {
 				return true;
@@ -255,7 +281,7 @@
 				}
 			}
 
-			final Optional<String> group = role.getGroups()
+			final Optional<String> group = groups
 					.stream()
 					.filter(g -> userGroups.getOrDefault(g, Collections.emptySet()).contains(userInfo.getName().toLowerCase()))
 					.findAny();
@@ -287,12 +313,20 @@
 		}
 	}
 
+	private static Set<String> retainGroups(Set<String> groups1, Set<String> groups2) {
+		Set<String> result = groups2
+				.stream()
+				.map(String::toLowerCase)
+				.collect(Collectors.toSet());
+		result.retainAll(groups1);
+
+		return result;
+	}
+
 	@Override
 	public String toString() {
 		return MoreObjects.toStringHelper(roles)
 				.addValue(roles)
 				.toString();
 	}
-
-
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/schedulers/billing/BillingScheduler.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/schedulers/billing/BillingScheduler.java
new file mode 100644
index 0000000..45563a2
--- /dev/null
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/schedulers/billing/BillingScheduler.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.backendapi.schedulers.billing;
+
+import com.epam.dlab.backendapi.schedulers.internal.Scheduled;
+import com.epam.dlab.backendapi.service.BillingService;
+import com.epam.dlab.backendapi.service.SecurityService;
+import com.google.inject.Inject;
+import lombok.extern.slf4j.Slf4j;
+import org.quartz.Job;
+import org.quartz.JobExecutionContext;
+
+@Scheduled("billingScheduler")
+@Slf4j
+public class BillingScheduler implements Job {
+
+    private final BillingService billingService;
+    private final SecurityService securityService;
+
+    @Inject
+    public BillingScheduler(BillingService billingService, SecurityService securityService) {
+        this.billingService = billingService;
+        this.securityService = securityService;
+    }
+
+    @Override
+    public void execute(JobExecutionContext jobExecutionContext) {
+        log.info("Trying to update billing");
+        try {
+            billingService.updateRemoteBillingData(securityService.getServiceAccountInfo("admin"));
+        } catch (Exception e) {
+            log.error("Something went wrong {}", e.getMessage());
+        }
+    }
+}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/BillingService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/BillingService.java
index 5f79280..b76b141 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/BillingService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/BillingService.java
@@ -20,82 +20,17 @@
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.dao.BaseBillingDAO;
-import com.epam.dlab.backendapi.dao.BillingDAO;
+import com.epam.dlab.backendapi.domain.BillingReport;
 import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.backendapi.util.CSVFormatter;
-import com.epam.dlab.exceptions.DlabException;
-import com.google.inject.Inject;
-import jersey.repackaged.com.google.common.collect.Lists;
-import lombok.extern.slf4j.Slf4j;
-import org.bson.Document;
 
-import java.text.ParseException;
 import java.util.List;
 
-@Slf4j
-public abstract class BillingService {
+public interface BillingService {
+    BillingReport getBillingReport(UserInfo userInfo, BillingFilter filter);
 
-    @Inject
-    private BillingDAO billingDAO;
+    String downloadReport(UserInfo userInfo, BillingFilter filter);
 
-    public Document getReport(UserInfo userInfo, BillingFilter filter) {
-        log.trace("Get billing report for user {} with filter {}", userInfo.getName(), filter);
-        try {
-            return billingDAO.getReport(userInfo, filter);
-        } catch (RuntimeException t) {
-            log.error("Cannot load billing report for user {} with filter {}", userInfo.getName(), filter, t);
-            throw new DlabException("Cannot load billing report: " + t.getLocalizedMessage(), t);
-        }
-    }
+    BillingReport getExploratoryBillingData(String project, String endpoint, String exploratoryName, List<String> compNames);
 
-    protected String getValueOrEmpty(Document document, String key) {
-        String value = document.getString(key);
-        return value == null ? "" : value;
-    }
-
-    String getHeaders(boolean full) {
-        return CSVFormatter.formatLine(getHeadersList(full), CSVFormatter.SEPARATOR);
-    }
-
-    public Document getBillingReport(UserInfo userInfo, BillingFilter filter) {
-        filter.getUser().replaceAll(s -> s.equalsIgnoreCase(BaseBillingDAO.SHARED_RESOURCE_NAME) ? null : s);
-        return getReport(userInfo, filter);
-    }
-
-    public byte[] downloadReport(UserInfo userInfo, BillingFilter filter) {
-        return prepareReport(getReport(userInfo, filter)).getBytes();
-    }
-
-    String prepareReport(Document document) {
-        try {
-            StringBuilder builder =
-                    new StringBuilder(CSVFormatter.formatLine(Lists.newArrayList(getFirstLine(document)),
-                            CSVFormatter.SEPARATOR, '\"'));
-
-            Boolean full = (Boolean) document.get(BaseBillingDAO.FULL_REPORT);
-            builder.append(getHeaders(full));
-
-            @SuppressWarnings("unchecked")
-            List<Document> items = (List<Document>) document.get(BaseBillingDAO.ITEMS);
-
-            items.forEach(d -> builder.append(getLine(full, d)));
-
-            builder.append(getTotal(full, document));
-
-            return builder.toString();
-        } catch (ParseException e) {
-            throw new DlabException("Cannot prepare CSV file", e);
-        }
-    }
-
-    public abstract String getFirstLine(Document document) throws ParseException;
-
-    public abstract List<String> getHeadersList(boolean full);
-
-    public abstract String getLine(boolean full, Document document);
-
-    public abstract String getTotal(boolean full, Document document);
-
-    public abstract String getReportFileName(UserInfo userInfo, BillingFilter filter);
+    void updateRemoteBillingData(UserInfo userInfo);
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EndpointService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EndpointService.java
index 58afa39..abd28d6 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EndpointService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EndpointService.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EnvironmentService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EnvironmentService.java
index f765aa4..c605131 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EnvironmentService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/EnvironmentService.java
@@ -24,20 +24,14 @@
 import com.epam.dlab.backendapi.resources.dto.UserResourceInfo;
 
 import java.util.List;
-import java.util.Set;
 
 public interface EnvironmentService {
-
-	Set<String> getUserNames();
-
 	List<UserDTO> getUsers();
 
-	List<UserResourceInfo> getAllEnv();
+	List<UserResourceInfo> getAllEnv(UserInfo user);
 
 	void stopAll();
 
-	void stopEnvironment(UserInfo userInfo, String user, String project);
-
 	void stopEnvironmentWithServiceAccount(String user);
 
 	void stopProjectEnvironment(String project);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ExploratoryService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ExploratoryService.java
index 2b93a8e..807df17 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ExploratoryService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ExploratoryService.java
@@ -21,6 +21,7 @@
 
 
 import com.epam.dlab.auth.UserInfo;
+import com.epam.dlab.backendapi.domain.ProjectDTO;
 import com.epam.dlab.backendapi.resources.dto.ExploratoryCreatePopUp;
 import com.epam.dlab.dto.UserInstanceDTO;
 import com.epam.dlab.dto.UserInstanceStatus;
@@ -29,6 +30,7 @@
 
 import java.util.List;
 import java.util.Optional;
+import java.util.Set;
 
 public interface ExploratoryService {
 
@@ -46,6 +48,12 @@
 
     Optional<UserInstanceDTO> getUserInstance(String user, String project, String exploratoryName);
 
+    Optional<UserInstanceDTO> getUserInstance(String user, String project, String exploratoryName, boolean includeCompResources);
+
+    List<UserInstanceDTO> findAll();
+
+    List<UserInstanceDTO> findAll(Set<ProjectDTO> projects);
+
     List<ClusterConfig> getClusterConfig(UserInfo user, String project, String exploratoryName);
 
     ExploratoryCreatePopUp getUserInstances(UserInfo user);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/GuacamoleService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/GuacamoleService.java
index 06dc8b1..776e409 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/GuacamoleService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/GuacamoleService.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InfrastructureInfoService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InfrastructureInfoService.java
index aa23d1d..ffb3531 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InfrastructureInfoService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InfrastructureInfoService.java
@@ -27,9 +27,9 @@
 import java.util.List;
 
 public interface InfrastructureInfoService {
-	List<ProjectInfrastructureInfo> getUserResources(String user);
+	List<ProjectInfrastructureInfo> getUserResources(UserInfo user);
 
-	HealthStatusPageDTO getHeathStatus(UserInfo user, boolean fullReport, boolean isAdmin);
+	HealthStatusPageDTO getHeathStatus(UserInfo user, boolean fullReport);
 
 	InfrastructureMetaInfoDTO getInfrastructureMetaInfo();
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakService.java
index e55b0cc..2259426 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakService.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import org.keycloak.representations.AccessTokenResponse;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakServiceImpl.java
index b7c508a..04cf8d9 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/KeycloakServiceImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java
index 1fd394b..5362dfc 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ProjectService.java
@@ -1,8 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.domain.ProjectDTO;
-import com.epam.dlab.backendapi.domain.ProjectManagingDTO;
 import com.epam.dlab.backendapi.domain.UpdateProjectDTO;
 
 import java.util.List;
@@ -10,12 +28,10 @@
 public interface ProjectService {
 	List<ProjectDTO> getProjects();
 
-	List<ProjectManagingDTO> getProjectsForManaging();
+	List<ProjectDTO> getProjects(UserInfo user);
 
 	List<ProjectDTO> getUserProjects(UserInfo userInfo, boolean active);
 
-	List<ProjectDTO> getProjectsWithStatus(ProjectDTO.Status status);
-
 	List<ProjectDTO> getProjectsByEndpoint(String endpointName);
 
 	void create(UserInfo userInfo, ProjectDTO projectDTO);
@@ -26,21 +42,15 @@
 
 	void terminateEndpoint(UserInfo userInfo, List<String> endpoints, String name);
 
-	void terminateProject(UserInfo userInfo, String name);
-
 	void start(UserInfo userInfo, String endpoint, String name);
 
 	void start(UserInfo userInfo, List<String> endpoints, String name);
 
 	void stop(UserInfo userInfo, String endpoint, String name);
 
-	void stop(UserInfo userInfo, List<String> endpoints, String name);
-
 	void stopWithResources(UserInfo userInfo, List<String> endpoints, String projectName);
 
-	void update(UserInfo userInfo, UpdateProjectDTO projectDTO);
-
-	void updateBudget(String project, Integer budget);
+	void update(UserInfo userInfo, UpdateProjectDTO projectDTO, String projectName);
 
 	void updateBudget(List<ProjectDTO> projects);
 
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityService.java
index 6f78ad2..c30a670 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityService.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityServiceImpl.java
index b43128d..db41a13 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SecurityServiceImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ShapeFormat.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ShapeFormat.java
deleted file mode 100644
index da224ab..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/ShapeFormat.java
+++ /dev/null
@@ -1,5 +0,0 @@
-package com.epam.dlab.backendapi.service;
-
-public interface ShapeFormat {
-    String format();
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagService.java
index f436f6d..a095fbc 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagService.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagServiceImpl.java
index 5f1504c..64b3383 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/TagServiceImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserGroupService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserGroupService.java
index e9e3e47..94e89e3 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserGroupService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserGroupService.java
@@ -18,6 +18,7 @@
  */
 package com.epam.dlab.backendapi.service;
 
+import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.resources.dto.UserGroupDto;
 
 import java.util.List;
@@ -26,17 +27,10 @@
 public interface UserGroupService {
 
 	void createGroup(String group, Set<String> roleIds, Set<String> users);
-	void updateGroup(String group, Set<String> roleIds, Set<String> users);
 
-	void addUsersToGroup(String group, Set<String> users);
-
-	void updateRolesForGroup(String group, Set<String> roleIds);
-
-	void removeUserFromGroup(String group, String user);
-
-	void removeGroupFromRole(Set<String> groups, Set<String> roleIds);
+	void updateGroup(UserInfo user, String group, Set<String> roleIds, Set<String> users);
 
 	void removeGroup(String groupId);
 
-	List<UserGroupDto> getAggregatedRolesByGroup();
+	List<UserGroupDto> getAggregatedRolesByGroup(UserInfo user);
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserRoleServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserRoleServiceImpl.java
index ce9462f..92e0afb 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserRoleServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/UserRoleServiceImpl.java
@@ -31,6 +31,7 @@
 @Singleton
 public class UserRoleServiceImpl implements UserRoleService {
 	private static final String ROLE_NOT_FOUND_MSG = "Any of role : %s were not found";
+
 	@Inject
 	private UserRoleDao userRoleDao;
 
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/aws/AwsBillingService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/aws/AwsBillingService.java
deleted file mode 100644
index eb94ea5..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/aws/AwsBillingService.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.service.aws;
-
-import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.dao.aws.AwsBillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.backendapi.service.BillingService;
-import com.epam.dlab.backendapi.util.CSVFormatter;
-import com.epam.dlab.model.aws.ReportLine;
-import com.google.inject.Singleton;
-import lombok.extern.slf4j.Slf4j;
-import org.bson.Document;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-
-@Slf4j
-@Singleton
-public class AwsBillingService extends BillingService {
-
-	@Override
-	public String getReportFileName(UserInfo userInfo, BillingFilter filter) {
-		return "aws-billing-report.csv";
-	}
-
-	public String getFirstLine(Document document) throws ParseException {
-
-		SimpleDateFormat from = new SimpleDateFormat("yyyy-MM-dd");
-		SimpleDateFormat to = new SimpleDateFormat("MMM dd, yyyy");
-
-		return String.format("Service base name: %s  " +
-						"Resource tag ID: %s  " +
-						"Available reporting period from: %s to: %s",
-				document.get(AwsBillingDAO.SERVICE_BASE_NAME), document.get(AwsBillingDAO.TAG_RESOURCE_ID),
-				to.format(from.parse((String) document.get(AwsBillingDAO.USAGE_DATE_START))),
-				to.format(from.parse((String) document.get(AwsBillingDAO.USAGE_DATE_END))));
-
-	}
-
-	public List<String> getHeadersList(boolean full) {
-		List<String> headers = new ArrayList<>();
-
-		if (full) {
-			headers.add("USER");
-		}
-
-		headers.add("PROJECT");
-		headers.add("ENVIRONMENT NAME");
-		headers.add("RESOURCE TYPE");
-		headers.add("SHAPE");
-		headers.add("SERVICE");
-		headers.add("SERVICE CHARGES");
-
-		return headers;
-	}
-
-	public String getLine(boolean full, Document document) {
-		List<String> items = new ArrayList<>();
-
-		if (full) {
-			items.add(getValueOrEmpty(document, ReportLine.FIELD_USER_ID));
-		}
-
-		items.add(getValueOrEmpty(document, ReportLine.FIELD_PROJECT));
-		items.add(getValueOrEmpty(document, ReportLine.FIELD_DLAB_ID));
-		items.add(getValueOrEmpty(document, AwsBillingDAO.DLAB_RESOURCE_TYPE));
-		items.add(getValueOrEmpty(document, AwsBillingDAO.SHAPE).replace(System.lineSeparator(), " "));
-		items.add(getValueOrEmpty(document, ReportLine.FIELD_PRODUCT));
-
-		items.add(getValueOrEmpty(document, ReportLine.FIELD_COST)
-				+ " " + getValueOrEmpty(document, ReportLine.FIELD_CURRENCY_CODE));
-
-		return CSVFormatter.formatLine(items, CSVFormatter.SEPARATOR);
-	}
-
-	public String getTotal(boolean full, Document document) {
-		int padding = getHeadersList(full).size() - 1;
-
-		List<String> items = new ArrayList<>();
-		while (padding-- > 0) {
-			items.add("");
-		}
-
-		items.add(String.format("Total: %s %s", getValueOrEmpty(document, AwsBillingDAO.COST_TOTAL),
-				getValueOrEmpty(document, ReportLine.FIELD_CURRENCY_CODE)));
-
-		return CSVFormatter.formatLine(items, CSVFormatter.SEPARATOR);
-
-	}
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/azure/AzureBillingService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/azure/AzureBillingService.java
deleted file mode 100644
index 9ff33a8..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/azure/AzureBillingService.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.service.azure;
-
-import com.epam.dlab.MongoKeyWords;
-import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.dao.BaseBillingDAO;
-import com.epam.dlab.backendapi.dao.BillingDAO;
-import com.epam.dlab.backendapi.dao.azure.AzureBillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.backendapi.service.BillingService;
-import com.epam.dlab.backendapi.util.CSVFormatter;
-import com.epam.dlab.model.aws.ReportLine;
-import com.google.inject.Inject;
-import com.google.inject.Singleton;
-import lombok.extern.slf4j.Slf4j;
-import org.bson.Document;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-
-@Slf4j
-@Singleton
-public class AzureBillingService extends BillingService {
-
-    @Inject
-    private BillingDAO billingDAO;
-
-    @Override
-    public String getReportFileName(UserInfo userInfo, BillingFilter filter) {
-        return "azure-billing-report.csv";
-    }
-
-    @Override
-    public String getFirstLine(Document document) throws ParseException {
-        SimpleDateFormat from = new SimpleDateFormat("yyyy-MM-dd");
-        SimpleDateFormat to = new SimpleDateFormat("MMM dd, yyyy");
-
-        return String.format("Service base name: %s  " +
-                        "Available reporting period from: %s to: %s",
-                document.get(BaseBillingDAO.SERVICE_BASE_NAME),
-                to.format(from.parse((String) document.get(MongoKeyWords.USAGE_FROM))),
-                to.format(from.parse((String) document.get(MongoKeyWords.USAGE_TO))));
-    }
-
-    public List<String> getHeadersList(boolean full) {
-        List<String> headers = new ArrayList<>();
-
-        if (full) {
-            headers.add("USER");
-        }
-
-        headers.add("PROJECT");
-        headers.add("ENVIRONMENT NAME");
-        headers.add("RESOURCE TYPE");
-        headers.add("INSTANCE SIZE");
-        headers.add("CATEGORY");
-        headers.add("SERVICE CHARGES");
-
-        return headers;
-    }
-
-    @Override
-    public String getLine(boolean full, Document document) {
-        List<String> items = new ArrayList<>();
-
-        if (full) {
-            items.add(getValueOrEmpty(document, MongoKeyWords.DLAB_USER));
-        }
-
-        items.add(getValueOrEmpty(document, ReportLine.FIELD_PROJECT));
-        items.add(getValueOrEmpty(document, MongoKeyWords.DLAB_ID));
-        items.add(getValueOrEmpty(document, MongoKeyWords.RESOURCE_TYPE));
-        items.add(getValueOrEmpty(document, AzureBillingDAO.SIZE).replace(System.lineSeparator(), " "));
-        items.add(getValueOrEmpty(document, MongoKeyWords.METER_CATEGORY));
-
-        items.add(getValueOrEmpty(document, MongoKeyWords.COST_STRING)
-                + " " + getValueOrEmpty(document, MongoKeyWords.CURRENCY_CODE));
-
-        return CSVFormatter.formatLine(items, CSVFormatter.SEPARATOR);
-    }
-
-    @Override
-    public String getTotal(boolean full, Document document) {
-        int padding = getHeadersList(full).size() - 1;
-
-        List<String> items = new ArrayList<>();
-        while (padding-- > 0) {
-            items.add("");
-        }
-
-        items.add(String.format("Total: %s %s", getValueOrEmpty(document, MongoKeyWords.COST_STRING),
-                getValueOrEmpty(document, MongoKeyWords.CURRENCY_CODE)));
-
-        return CSVFormatter.formatLine(items, CSVFormatter.SEPARATOR);
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/gcp/GcpBillingService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/gcp/GcpBillingService.java
deleted file mode 100644
index a7599f7..0000000
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/gcp/GcpBillingService.java
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.service.gcp;
-
-import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.dao.aws.AwsBillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.backendapi.service.BillingService;
-import com.epam.dlab.backendapi.util.CSVFormatter;
-import com.epam.dlab.model.aws.ReportLine;
-import org.bson.Document;
-
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.List;
-
-public class GcpBillingService extends BillingService {
-    @Override
-    public String getFirstLine(Document document) throws ParseException {
-        SimpleDateFormat from = new SimpleDateFormat("yyyy-MM-dd");
-        SimpleDateFormat to = new SimpleDateFormat("MMM dd, yyyy");
-
-        return String.format("Service base name: %s Available reporting period from: %s to: %s",
-                document.get(AwsBillingDAO.SERVICE_BASE_NAME),
-                to.format(from.parse((String) document.get("from"))),
-                to.format(from.parse((String) document.get("to"))));
-    }
-
-    @Override
-    public List<String> getHeadersList(boolean full) {
-        List<String> headers = new ArrayList<>();
-
-        if (full) {
-            headers.add("USER");
-        }
-
-        headers.add("PROJECT");
-        headers.add("ENVIRONMENT NAME");
-        headers.add("RESOURCE TYPE");
-        headers.add("SHAPE");
-        headers.add("SERVICE");
-        headers.add("SERVICE CHARGES");
-
-        return headers;
-    }
-
-    @Override
-    public String getLine(boolean full, Document document) {
-        List<String> items = new ArrayList<>();
-
-        if (full) {
-            items.add(getValueOrEmpty(document, ReportLine.FIELD_USER_ID));
-        }
-
-        items.add(getValueOrEmpty(document, ReportLine.FIELD_PROJECT));
-        items.add(getValueOrEmpty(document, ReportLine.FIELD_DLAB_ID));
-        items.add(getValueOrEmpty(document, AwsBillingDAO.DLAB_RESOURCE_TYPE));
-        items.add(getValueOrEmpty(document, AwsBillingDAO.SHAPE).replace(System.lineSeparator(), " "));
-        items.add(getValueOrEmpty(document, ReportLine.FIELD_PRODUCT));
-
-        items.add(getValueOrEmpty(document, ReportLine.FIELD_COST)
-                + " " + getValueOrEmpty(document, ReportLine.FIELD_CURRENCY_CODE));
-
-        return CSVFormatter.formatLine(items, CSVFormatter.SEPARATOR);
-    }
-
-    @Override
-    public String getTotal(boolean full, Document document) {
-        int padding = getHeadersList(full).size() - 1;
-
-        List<String> items = new ArrayList<>();
-        while (padding-- > 0) {
-            items.add("");
-        }
-
-        items.add(String.format("Total: %s %s", getValueOrEmpty(document, AwsBillingDAO.COST_TOTAL),
-                getValueOrEmpty(document, ReportLine.FIELD_CURRENCY_CODE)));
-
-        return CSVFormatter.formatLine(items, CSVFormatter.SEPARATOR);
-    }
-
-    @Override
-    public String getReportFileName(UserInfo userInfo, BillingFilter filter) {
-        return "gcp-billing-report.csv";
-    }
-}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/BillingServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/BillingServiceImpl.java
new file mode 100644
index 0000000..8eae49b
--- /dev/null
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/BillingServiceImpl.java
@@ -0,0 +1,338 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.backendapi.service.impl;
+
+import com.epam.dlab.auth.UserInfo;
+import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
+import com.epam.dlab.backendapi.dao.BillingDAO;
+import com.epam.dlab.backendapi.dao.ImageExploratoryDao;
+import com.epam.dlab.backendapi.domain.BillingReport;
+import com.epam.dlab.backendapi.domain.BillingReportLine;
+import com.epam.dlab.backendapi.domain.EndpointDTO;
+import com.epam.dlab.backendapi.domain.ProjectDTO;
+import com.epam.dlab.backendapi.domain.ProjectEndpointDTO;
+import com.epam.dlab.backendapi.resources.dto.BillingFilter;
+import com.epam.dlab.backendapi.roles.RoleType;
+import com.epam.dlab.backendapi.roles.UserRoles;
+import com.epam.dlab.backendapi.service.BillingService;
+import com.epam.dlab.backendapi.service.EndpointService;
+import com.epam.dlab.backendapi.service.ExploratoryService;
+import com.epam.dlab.backendapi.service.ProjectService;
+import com.epam.dlab.backendapi.util.BillingUtils;
+import com.epam.dlab.cloud.CloudProvider;
+import com.epam.dlab.constants.ServiceConsts;
+import com.epam.dlab.dto.UserInstanceStatus;
+import com.epam.dlab.dto.billing.BillingData;
+import com.epam.dlab.dto.billing.BillingResourceType;
+import com.epam.dlab.exceptions.DlabException;
+import com.epam.dlab.rest.client.RESTService;
+import com.google.common.collect.Lists;
+import com.google.inject.Inject;
+import com.google.inject.name.Named;
+import lombok.extern.slf4j.Slf4j;
+import org.apache.commons.collections4.CollectionUtils;
+import org.apache.http.client.utils.URIBuilder;
+
+import javax.ws.rs.core.GenericType;
+import java.math.BigDecimal;
+import java.net.URI;
+import java.net.URISyntaxException;
+import java.time.LocalDate;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
+
+@Slf4j
+public class BillingServiceImpl implements BillingService {
+    private static final String BILLING_PATH = "/api/billing";
+    private static final String USAGE_DATE_FORMAT = "yyyy-MM";
+
+    private final ProjectService projectService;
+    private final EndpointService endpointService;
+    private final ExploratoryService exploratoryService;
+    private final SelfServiceApplicationConfiguration configuration;
+    private final RESTService provisioningService;
+    private final ImageExploratoryDao imageExploratoryDao;
+    private final BillingDAO billingDAO;
+    private final String sbn;
+
+    @Inject
+    public BillingServiceImpl(ProjectService projectService, EndpointService endpointService,
+                              ExploratoryService exploratoryService, SelfServiceApplicationConfiguration configuration,
+                              @Named(ServiceConsts.BILLING_SERVICE_NAME) RESTService provisioningService, ImageExploratoryDao imageExploratoryDao,
+                              BillingDAO billingDAO) {
+        this.projectService = projectService;
+        this.endpointService = endpointService;
+        this.exploratoryService = exploratoryService;
+        this.configuration = configuration;
+        this.provisioningService = provisioningService;
+        this.imageExploratoryDao = imageExploratoryDao;
+        this.billingDAO = billingDAO;
+        sbn = configuration.getServiceBaseName();
+    }
+
+    @Override
+    public BillingReport getBillingReport(UserInfo user, BillingFilter filter) {
+        setUserFilter(user, filter);
+        List<BillingReportLine> billingReportLines = billingDAO.aggregateBillingData(filter)
+                .stream()
+                .peek(this::appendStatuses)
+                .filter(bd -> CollectionUtils.isEmpty(filter.getStatuses()) || filter.getStatuses().contains(bd.getStatus()))
+                .collect(Collectors.toList());
+        final LocalDate min = billingReportLines.stream().min(Comparator.comparing(BillingReportLine::getUsageDateFrom)).map(BillingReportLine::getUsageDateFrom).orElse(null);
+        final LocalDate max = billingReportLines.stream().max(Comparator.comparing(BillingReportLine::getUsageDateTo)).map(BillingReportLine::getUsageDateTo).orElse(null);
+        final double sum = billingReportLines.stream().mapToDouble(BillingReportLine::getCost).sum();
+        final String currency = billingReportLines.stream().map(BillingReportLine::getCurrency).distinct().count() == 1 ? billingReportLines.get(0).getCurrency() : null;
+        return BillingReport.builder()
+                .name("Billing report")
+                .sbn(sbn)
+                .reportLines(billingReportLines)
+                .usageDateFrom(min)
+                .usageDateTo(max)
+                .totalCost(new BigDecimal(sum).setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue())
+                .currency(currency)
+                .isFull(isFullReport(user))
+                .build();
+    }
+
+    @Override
+    public String downloadReport(UserInfo user, BillingFilter filter) {
+        boolean isFull = isFullReport(user);
+        BillingReport report = getBillingReport(user, filter);
+        StringBuilder builder = new StringBuilder(BillingUtils.getFirstLine(report.getSbn(), report.getUsageDateFrom(), report.getUsageDateTo()));
+        builder.append(BillingUtils.getHeader(isFull));
+        try {
+            report.getReportLines().forEach(r -> builder.append(BillingUtils.printLine(r, isFull)));
+            builder.append(BillingUtils.getTotal(report.getTotalCost(), report.getCurrency()));
+            return builder.toString();
+        } catch (Exception e) {
+            log.error("Cannot write billing data ", e);
+            throw new DlabException("Cannot write billing file ", e);
+        }
+    }
+
+    public BillingReport getExploratoryBillingData(String project, String endpoint, String exploratoryName, List<String> compNames) {
+        List<String> resourceNames = new ArrayList<>(compNames);
+        resourceNames.add(exploratoryName);
+        List<BillingReportLine> billingData = billingDAO.findBillingData(project, endpoint, resourceNames)
+                .stream()
+                .peek(bd -> bd.setCost(BigDecimal.valueOf(bd.getCost()).setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue()))
+                .collect(Collectors.toList());
+        final double sum = billingData.stream().mapToDouble(BillingReportLine::getCost).sum();
+        final String currency = billingData.stream().map(BillingReportLine::getCurrency).distinct().count() == 1 ? billingData.get(0).getCurrency() : null;
+        return BillingReport.builder()
+                .name(exploratoryName)
+                .reportLines(billingData)
+                .totalCost(new BigDecimal(sum).setScale(2, BigDecimal.ROUND_HALF_UP).doubleValue())
+                .currency(currency)
+                .build();
+    }
+
+    public void updateRemoteBillingData(UserInfo userInfo) {
+        List<EndpointDTO> endpoints = endpointService.getEndpoints();
+        if (CollectionUtils.isEmpty(endpoints)) {
+            log.error("Cannot update billing info. There are no endpoints");
+            throw new DlabException("Cannot update billing info. There are no endpoints");
+        }
+
+        Map<EndpointDTO, List<BillingData>> billingDataMap = endpoints
+                .stream()
+                .collect(Collectors.toMap(e -> e, e -> getBillingData(userInfo, e)));
+
+        billingDataMap.forEach((endpointDTO, billingData) -> {
+            log.info("Updating billing information for endpoint {}. Billing data {}", endpointDTO.getName(), billingData);
+            try {
+                updateBillingData(endpointDTO, billingData);
+            } catch (Exception e) {
+                log.error("Something went wrong while trying to update billing for {}. {}", endpointDTO.getName(), e.getMessage());
+            }
+        });
+    }
+
+    private Map<String, BillingReportLine> getBillableResources() {
+        Set<ProjectDTO> projects = new HashSet<>(projectService.getProjects());
+        final Stream<BillingReportLine> ssnBillingDataStream = BillingUtils.ssnBillingDataStream(sbn);
+        final Stream<BillingReportLine> billableEdges = projects
+                .stream()
+                .collect(Collectors.toMap(ProjectDTO::getName, ProjectDTO::getEndpoints))
+                .entrySet()
+                .stream()
+                .flatMap(e -> projectEdges(sbn, e.getKey(), e.getValue()));
+        final Stream<BillingReportLine> billableSharedEndpoints = endpointService.getEndpoints()
+                .stream()
+                .flatMap(endpoint -> BillingUtils.sharedEndpointBillingDataStream(endpoint.getName(), sbn));
+        final Stream<BillingReportLine> billableUserInstances = exploratoryService.findAll(projects)
+                .stream()
+                .filter(userInstance -> Objects.nonNull(userInstance.getExploratoryId()))
+                .flatMap(ui -> BillingUtils.exploratoryBillingDataStream(ui, configuration.getMaxSparkInstanceCount()));
+        final Stream<BillingReportLine> customImages = projects
+                .stream()
+                .map(p -> imageExploratoryDao.getImagesForProject(p.getName()))
+                .flatMap(Collection::stream)
+                .flatMap(i -> BillingUtils.customImageBillingDataStream(i, sbn));
+
+        final Map<String, BillingReportLine> billableResources = Stream.of(ssnBillingDataStream, billableEdges, billableSharedEndpoints, billableUserInstances, customImages)
+                .flatMap(s -> s)
+                .collect(Collectors.toMap(BillingReportLine::getDlabId, b -> b));
+        log.debug("Billable resources are: {}", billableResources);
+
+        return billableResources;
+    }
+
+    private Stream<BillingReportLine> projectEdges(String serviceBaseName, String projectName, List<ProjectEndpointDTO> endpoints) {
+        return endpoints
+                .stream()
+                .flatMap(endpoint -> BillingUtils.edgeBillingDataStream(projectName, serviceBaseName, endpoint.getName()));
+    }
+
+    private void updateBillingData(EndpointDTO endpointDTO, List<BillingData> billingData) {
+        final String endpointName = endpointDTO.getName();
+        final CloudProvider cloudProvider = endpointDTO.getCloudProvider();
+        final Map<String, BillingReportLine> billableResources = getBillableResources();
+        final Stream<BillingReportLine> billingReportLineStream = billingData
+                .stream()
+                .peek(bd -> bd.setApplication(endpointName))
+                .map(bd -> toBillingReport(bd, getOrDefault(billableResources, bd.getTag())));
+
+        if (cloudProvider == CloudProvider.GCP) {
+            final Map<String, List<BillingReportLine>> gcpBillingData = billingReportLineStream
+                    .collect(Collectors.groupingBy(bd -> bd.getUsageDate().substring(0, USAGE_DATE_FORMAT.length())));
+            updateGcpBillingData(endpointName, gcpBillingData);
+        } else if (cloudProvider == CloudProvider.AWS) {
+            final Map<String, List<BillingReportLine>> awsBillingData = billingReportLineStream
+                    .collect(Collectors.groupingBy(BillingReportLine::getUsageDate));
+            updateAwsBillingData(endpointName, awsBillingData);
+        } else if (cloudProvider == CloudProvider.AZURE) {
+            final List<BillingReportLine> billingReportLines = billingReportLineStream
+                    .collect(Collectors.toList());
+            updateAzureBillingData(billingReportLines);
+        }
+    }
+
+    private BillingReportLine getOrDefault(Map<String, BillingReportLine> billableResources, String tag) {
+        return billableResources.getOrDefault(tag, BillingReportLine.builder().dlabId(tag).build());
+    }
+
+    private void updateGcpBillingData(String endpointName, Map<String, List<BillingReportLine>> billingData) {
+        billingData.forEach((usageDate, billingReportLines) -> {
+            billingDAO.deleteByUsageDateRegex(endpointName, usageDate);
+            billingDAO.save(billingReportLines);
+        });
+    }
+
+    private void updateAwsBillingData(String endpointName, Map<String, List<BillingReportLine>> billingData) {
+        billingData.forEach((usageDate, billingReportLines) -> {
+            billingDAO.deleteByUsageDate(endpointName, usageDate);
+            billingDAO.save(billingReportLines);
+        });
+    }
+
+    private void updateAzureBillingData(List<BillingReportLine> billingReportLines) {
+        billingDAO.save(billingReportLines);
+    }
+
+    private List<BillingData> getBillingData(UserInfo userInfo, EndpointDTO e) {
+        try {
+            return provisioningService.get(getBillingUrl(e.getUrl(), BILLING_PATH), userInfo.getAccessToken(),
+                    new GenericType<List<BillingData>>() {
+                    });
+        } catch (Exception ex) {
+            log.error("Cannot retrieve billing information for {}. {}", e.getName(), ex.getMessage());
+            return Collections.emptyList();
+        }
+    }
+
+    private String getBillingUrl(String endpointUrl, String path) {
+        URI uri;
+        try {
+            uri = new URI(endpointUrl);
+        } catch (URISyntaxException e) {
+            log.error("Wrong URI syntax {}", e.getMessage(), e);
+            throw new DlabException("Wrong URI syntax");
+        }
+        return new URIBuilder()
+                .setScheme(uri.getScheme())
+                .setHost(uri.getHost())
+                .setPort(8088)
+                .setPath(path)
+                .toString();
+    }
+
+    private void appendStatuses(BillingReportLine br) {
+        BillingResourceType resourceType = br.getResourceType();
+        if (BillingResourceType.EDGE == resourceType) {
+            projectService.get(br.getProject()).getEndpoints()
+                    .stream()
+                    .filter(e -> e.getName().equals(br.getResourceName()))
+                    .findAny()
+                    .ifPresent(e -> br.setStatus(e.getStatus()));
+        } else if (BillingResourceType.EXPLORATORY == resourceType) {
+            exploratoryService.getUserInstance(br.getUser(), br.getProject(), br.getResourceName())
+                    .ifPresent(ui -> br.setStatus(UserInstanceStatus.of(ui.getStatus())));
+        } else if (BillingResourceType.COMPUTATIONAL == resourceType) {
+            exploratoryService.getUserInstance(br.getUser(), br.getProject(), br.getExploratoryName(), true)
+                    .flatMap(ui -> ui.getResources()
+                            .stream()
+                            .filter(cr -> cr.getComputationalName().equals(br.getResourceName()))
+                            .findAny())
+                    .ifPresent(cr -> br.setStatus(UserInstanceStatus.of(cr.getStatus())));
+        }
+    }
+
+    private boolean isFullReport(UserInfo userInfo) {
+        return UserRoles.checkAccess(userInfo, RoleType.PAGE, "/api/infrastructure_provision/billing",
+                userInfo.getRoles());
+    }
+
+    private void setUserFilter(UserInfo userInfo, BillingFilter filter) {
+        if (!isFullReport(userInfo)) {
+            filter.setUsers(Lists.newArrayList(userInfo.getName()));
+        }
+    }
+
+    private BillingReportLine toBillingReport(BillingData billingData, BillingReportLine billingReportLine) {
+        return BillingReportLine.builder()
+                .application(billingData.getApplication())
+                .cost(billingData.getCost())
+                .currency(billingData.getCurrency())
+                .product(billingData.getProduct())
+                .project(billingReportLine.getProject())
+                .endpoint(billingReportLine.getEndpoint())
+                .usageDateFrom(billingData.getUsageDateFrom())
+                .usageDateTo(billingData.getUsageDateTo())
+                .usageDate(billingData.getUsageDate())
+                .usageType(billingData.getUsageType())
+                .user(billingReportLine.getUser())
+                .dlabId(billingData.getTag())
+                .resourceType(billingReportLine.getResourceType())
+                .resourceName(billingReportLine.getResourceName())
+                .shape(billingReportLine.getShape())
+                .exploratoryName(billingReportLine.getExploratoryName())
+                .build();
+    }
+}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EndpointServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EndpointServiceImpl.java
index 9c71f76..57c6549 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EndpointServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EndpointServiceImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service.impl;
 
 import com.epam.dlab.auth.UserInfo;
@@ -138,7 +157,7 @@
 			cloudProvider = response.readEntity(CloudProvider.class);
 		} catch (Exception e) {
 			log.error("Cannot connect to url '{}'. {}", url, e.getMessage());
-			throw new DlabException(String.format("Cannot connect to url '%s'", url), e);
+			throw new DlabException(String.format("Cannot connect to url '%s'. %s", url, e.getMessage()));
 		}
 		if (response.getStatus() != 200) {
 			log.warn("Endpoint url {} is not valid", url);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImpl.java
index 3dfaaae..8b2806b 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImpl.java
@@ -20,6 +20,9 @@
 package com.epam.dlab.backendapi.service.impl;
 
 import com.epam.dlab.auth.UserInfo;
+import com.epam.dlab.backendapi.annotation.Project;
+import com.epam.dlab.backendapi.annotation.ProjectAdmin;
+import com.epam.dlab.backendapi.annotation.User;
 import com.epam.dlab.backendapi.dao.EnvDAO;
 import com.epam.dlab.backendapi.dao.ExploratoryDAO;
 import com.epam.dlab.backendapi.dao.UserSettingsDAO;
@@ -52,23 +55,29 @@
 @Singleton
 @Slf4j
 public class EnvironmentServiceImpl implements EnvironmentService {
-
 	private static final String ERROR_MSG_FORMAT = "Can not %s environment because on of user resource is in status " +
 			"CREATING or STARTING";
+
+	private final EnvDAO envDAO;
+	private final UserSettingsDAO settingsDAO;
+	private final ExploratoryDAO exploratoryDAO;
+	private final ExploratoryService exploratoryService;
+	private final ComputationalService computationalService;
+	private final SecurityService securityService;
+	private final ProjectService projectService;
+
 	@Inject
-	private EnvDAO envDAO;
-	@Inject
-	private ExploratoryDAO exploratoryDAO;
-	@Inject
-	private ExploratoryService exploratoryService;
-	@Inject
-	private ComputationalService computationalService;
-	@Inject
-	private SecurityService securityService;
-	@Inject
-	private ProjectService projectService;
-	@Inject
-	private UserSettingsDAO settingsDAO;
+	public EnvironmentServiceImpl(EnvDAO envDAO, UserSettingsDAO settingsDAO, ExploratoryDAO exploratoryDAO,
+								  ExploratoryService exploratoryService, ComputationalService computationalService,
+								  SecurityService securityService, ProjectService projectService) {
+		this.envDAO = envDAO;
+		this.settingsDAO = settingsDAO;
+		this.exploratoryDAO = exploratoryDAO;
+		this.exploratoryService = exploratoryService;
+		this.computationalService = computationalService;
+		this.securityService = securityService;
+		this.projectService = projectService;
+	}
 
 	@Override
 	public List<UserDTO> getUsers() {
@@ -87,18 +96,13 @@
 	}
 
 	@Override
-	public Set<String> getUserNames() {
-		log.debug("Getting all users...");
-		return envDAO.fetchAllUsers();
-	}
-
-	@Override
-	public List<UserResourceInfo> getAllEnv() {
+	public List<UserResourceInfo> getAllEnv(UserInfo user) {
 		log.debug("Getting all user's environment...");
 		List<UserInstanceDTO> expList = exploratoryDAO.getInstances();
-		return projectService.getProjects()
+		return projectService.getProjects(user)
 				.stream()
-				.map(projectDTO -> getProjectEnv(projectDTO, expList)).flatMap(Collection::stream)
+				.map(projectDTO -> getProjectEnv(projectDTO, expList))
+				.flatMap(Collection::stream)
 				.collect(toList());
 	}
 
@@ -112,14 +116,6 @@
 	}
 
 	@Override
-	public void stopEnvironment(UserInfo userInfo, String user, String project) {
-		log.debug("Stopping environment for user {}", user);
-		checkState(user, "stop");
-		exploratoryDAO.fetchRunningExploratoryFields(user)
-				.forEach(e -> stopExploratory(userInfo, user, project, e.getExploratoryName()));
-	}
-
-	@Override
 	public void stopEnvironmentWithServiceAccount(String user) {
 		log.debug("Stopping environment for user {} by scheduler", user);
 		checkState(user, "stop");
@@ -140,24 +136,30 @@
 						endpoint.getName(), project));
 	}
 
+	@ProjectAdmin
 	@Override
-	public void stopExploratory(UserInfo userInfo, String user, String project, String exploratoryName) {
+	public void stopExploratory(@User UserInfo userInfo, String user, @Project String project, String exploratoryName) {
 		exploratoryService.stop(new UserInfo(user, userInfo.getAccessToken()), project, exploratoryName);
 	}
 
+	@ProjectAdmin
 	@Override
-	public void stopComputational(UserInfo userInfo, String user, String project, String exploratoryName, String computationalName) {
+	public void stopComputational(@User UserInfo userInfo, String user, @Project String project, String exploratoryName,
+								  String computationalName) {
 		computationalService.stopSparkCluster(new UserInfo(user, userInfo.getAccessToken()), project, exploratoryName,
 				computationalName);
 	}
 
+	@ProjectAdmin
 	@Override
-	public void terminateExploratory(UserInfo userInfo, String user, String project, String exploratoryName) {
+	public void terminateExploratory(@User UserInfo userInfo, String user, @Project String project, String exploratoryName) {
 		exploratoryService.terminate(new UserInfo(user, userInfo.getAccessToken()), project, exploratoryName);
 	}
 
+	@ProjectAdmin
 	@Override
-	public void terminateComputational(UserInfo userInfo, String user, String project, String exploratoryName, String computationalName) {
+	public void terminateComputational(@User UserInfo userInfo, String user, @Project String project,
+									   String exploratoryName, String computationalName) {
 		computationalService.terminateComputational(new UserInfo(user, userInfo.getAccessToken()), project, exploratoryName,
 				computationalName);
 	}
@@ -185,8 +187,10 @@
 	}
 
 	private List<UserResourceInfo> getProjectEnv(ProjectDTO projectDTO, List<UserInstanceDTO> allInstances) {
-		final Stream<UserResourceInfo> userResources = allInstances.stream()
-				.filter(instance -> instance.getProject().equals(projectDTO.getName())).map(this::toUserResourceInfo);
+		final Stream<UserResourceInfo> userResources = allInstances
+				.stream()
+				.filter(instance -> instance.getProject().equals(projectDTO.getName()))
+				.map(this::toUserResourceInfo);
 		if (projectDTO.getEndpoints() != null) {
 			final Stream<UserResourceInfo> edges = projectDTO.getEndpoints()
 					.stream()
@@ -194,8 +198,7 @@
 							.withResourceStatus(e.getStatus().toString())
 							.withProject(projectDTO.getName())
 							.withIp(e.getEdgeInfo() != null ? e.getEdgeInfo().getPublicIp() : null));
-			return Stream.concat(edges, userResources)
-					.collect(toList());
+			return Stream.concat(edges, userResources).collect(toList());
 		} else {
 			return userResources.collect(toList());
 		}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ExploratoryServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ExploratoryServiceImpl.java
index 77d870b..9f6be91 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ExploratoryServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ExploratoryServiceImpl.java
@@ -61,6 +61,7 @@
 import java.util.List;
 import java.util.Map;
 import java.util.Optional;
+import java.util.Set;
 import java.util.stream.Collectors;
 
 import static com.epam.dlab.dto.UserInstanceStatus.CREATING;
@@ -188,12 +189,36 @@
 		try {
 			return Optional.of(exploratoryDAO.fetchExploratoryFields(user, project, exploratoryName));
 		} catch (DlabException e) {
-			log.warn("User instance with exploratory name {} for user {} not found.", exploratoryName, user);
+			log.warn("User instance with exploratory {}, project {} for user {} not found.", exploratoryName, project, user);
 		}
 		return Optional.empty();
 	}
 
 	@Override
+	public Optional<UserInstanceDTO> getUserInstance(String user, String project, String exploratoryName, boolean includeCompResources) {
+		try {
+			return Optional.of(exploratoryDAO.fetchExploratoryFields(user, project, exploratoryName, includeCompResources));
+		} catch (DlabException e) {
+			log.warn("User instance with exploratory {}, project {} for user {} not found.", exploratoryName, project, user);
+		}
+		return Optional.empty();
+	}
+
+	@Override
+	public List<UserInstanceDTO> findAll() {
+		return exploratoryDAO.getInstances();
+	}
+
+	@Override
+	public List<UserInstanceDTO> findAll(Set<ProjectDTO> projects) {
+		List<String> projectNames = projects
+				.stream()
+				.map(ProjectDTO::getName)
+				.collect(Collectors.toList());
+		return exploratoryDAO.fetchExploratoryFieldsForProjectWithComp(projectNames);
+	}
+
+	@Override
 	public List<ClusterConfig> getClusterConfig(UserInfo user, String project, String exploratoryName) {
 		return exploratoryDAO.getClusterConfig(user.getName(), project, exploratoryName);
 	}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/GuacamoleServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/GuacamoleServiceImpl.java
index 2394595..555479b 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/GuacamoleServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/GuacamoleServiceImpl.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service.impl;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImpl.java
index 85ce534..5cb3a64 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImpl.java
@@ -24,9 +24,11 @@
 import com.epam.dlab.backendapi.dao.ExploratoryLibDAO;
 import com.epam.dlab.backendapi.dao.ImageExploratoryDao;
 import com.epam.dlab.backendapi.domain.EndpointDTO;
+import com.epam.dlab.backendapi.domain.ProjectDTO;
 import com.epam.dlab.backendapi.resources.dto.ImageInfoRecord;
 import com.epam.dlab.backendapi.service.EndpointService;
 import com.epam.dlab.backendapi.service.ImageExploratoryService;
+import com.epam.dlab.backendapi.service.ProjectService;
 import com.epam.dlab.backendapi.util.RequestBuilder;
 import com.epam.dlab.constants.ServiceConsts;
 import com.epam.dlab.dto.UserInstanceDTO;
@@ -71,10 +73,12 @@
 	private RequestBuilder requestBuilder;
 	@Inject
 	private EndpointService endpointService;
+	@Inject
+	private ProjectService projectService;
 
 	@Override
 	public String createImage(UserInfo user, String project, String exploratoryName, String imageName, String imageDescription) {
-
+		ProjectDTO projectDTO = projectService.get(project);
 		UserInstanceDTO userInstance = exploratoryDAO.fetchRunningExploratoryFields(user.getName(), project, exploratoryName);
 
 		if (imageExploratoryDao.exist(imageName, userInstance.getProject())) {
@@ -105,7 +109,7 @@
 		EndpointDTO endpointDTO = endpointService.get(userInstance.getEndpoint());
 		return provisioningService.post(endpointDTO.getUrl() + ExploratoryAPI.EXPLORATORY_IMAGE,
 				user.getAccessToken(),
-				requestBuilder.newExploratoryImageCreate(user, userInstance, imageName, endpointDTO), String.class);
+				requestBuilder.newExploratoryImageCreate(user, userInstance, imageName, endpointDTO, projectDTO), String.class);
 	}
 
 	@Override
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InfrastructureInfoServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InfrastructureInfoServiceImpl.java
index ecfe6cf..b1eac51 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InfrastructureInfoServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InfrastructureInfoServiceImpl.java
@@ -22,12 +22,15 @@
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
 import com.epam.dlab.backendapi.dao.BillingDAO;
-import com.epam.dlab.backendapi.dao.EnvDAO;
 import com.epam.dlab.backendapi.dao.ExploratoryDAO;
+import com.epam.dlab.backendapi.domain.BillingReport;
 import com.epam.dlab.backendapi.domain.EndpointDTO;
 import com.epam.dlab.backendapi.domain.ProjectEndpointDTO;
+import com.epam.dlab.backendapi.resources.dto.HealthStatusEnum;
 import com.epam.dlab.backendapi.resources.dto.HealthStatusPageDTO;
 import com.epam.dlab.backendapi.resources.dto.ProjectInfrastructureInfo;
+import com.epam.dlab.backendapi.roles.UserRoles;
+import com.epam.dlab.backendapi.service.BillingService;
 import com.epam.dlab.backendapi.service.EndpointService;
 import com.epam.dlab.backendapi.service.InfrastructureInfoService;
 import com.epam.dlab.backendapi.service.ProjectService;
@@ -42,10 +45,11 @@
 import lombok.extern.slf4j.Slf4j;
 import org.bson.Document;
 
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
-import java.util.Objects;
+import java.util.Optional;
 import java.util.stream.Collectors;
 import java.util.stream.StreamSupport;
 
@@ -54,28 +58,32 @@
 
 	private static final String RELEASE_NOTES_FORMAT = "https://github.com/apache/incubator-dlab/blob/%s" +
 			"/RELEASE_NOTES.md";
-	@Inject
-	private ExploratoryDAO expDAO;
-	@Inject
-	private EnvDAO envDAO;
-	@Inject
-	private SelfServiceApplicationConfiguration configuration;
-	@Inject
-	private BillingDAO billingDAO;
-	@Inject
-	private ProjectService projectService;
-	@Inject
-	private EndpointService endpointService;
+	private final ExploratoryDAO expDAO;
+	private final SelfServiceApplicationConfiguration configuration;
+	private final BillingDAO billingDAO;
+	private final ProjectService projectService;
+	private final EndpointService endpointService;
+	private final BillingService billingService;
 
+	@Inject
+	public InfrastructureInfoServiceImpl(ExploratoryDAO expDAO, SelfServiceApplicationConfiguration configuration,
+										 BillingDAO billingDAO, ProjectService projectService, EndpointService endpointService,
+										 BillingService billingService) {
+		this.expDAO = expDAO;
+		this.configuration = configuration;
+		this.billingDAO = billingDAO;
+		this.projectService = projectService;
+		this.endpointService = endpointService;
+		this.billingService = billingService;
+	}
 
 	@Override
-	public List<ProjectInfrastructureInfo> getUserResources(String user) {
+	public List<ProjectInfrastructureInfo> getUserResources(UserInfo user) {
 		log.debug("Loading list of provisioned resources for user {}", user);
 		try {
-			Iterable<Document> documents = expDAO.findExploratory(user);
+			Iterable<Document> documents = expDAO.findExploratory(user.getName());
 			List<EndpointDTO> allEndpoints = endpointService.getEndpoints();
-			return StreamSupport.stream(documents.spliterator(),
-					false)
+			return StreamSupport.stream(documents.spliterator(), false)
 					.collect(Collectors.groupingBy(d -> d.getString("project")))
 					.entrySet()
 					.stream()
@@ -85,12 +93,25 @@
 								.filter(endpoint -> endpoints.stream()
 										.anyMatch(endpoint1 -> endpoint1.getName().equals(endpoint.getName())))
 								.collect(Collectors.toList());
+
+						List<BillingReport> billingData = e.getValue()
+								.stream()
+								.map(exp ->
+										billingService.getExploratoryBillingData(exp.getString("project"), exp.getString("endpoint"),
+												exp.getString("exploratory_name"),
+												Optional.ofNullable(exp.get("computational_resources")).map(cr -> (List<Document>) cr).get()
+														.stream()
+														.map(cr -> cr.getString("computational_name"))
+														.collect(Collectors.toList()))
+								)
+								.collect(Collectors.toList());
+
 						final Map<String, Map<String, String>> projectEdges =
-								endpoints.stream()
-										.collect(Collectors.toMap(ProjectEndpointDTO::getName,
-												endpointDTO -> getSharedInfo(endpointDTO.getEdgeInfo())));
-						return new ProjectInfrastructureInfo(e.getKey(),
-								billingDAO.getBillingProjectQuoteUsed(e.getKey()), projectEdges, e.getValue(), endpointResult);
+								endpoints
+										.stream()
+										.collect(Collectors.toMap(ProjectEndpointDTO::getName, this::getSharedInfo));
+						return new ProjectInfrastructureInfo(e.getKey(), billingDAO.getBillingProjectQuoteUsed(e.getKey()),
+								projectEdges, e.getValue(), billingData, endpointResult);
 					})
 					.collect(Collectors.toList());
 		} catch (Exception e) {
@@ -100,17 +121,20 @@
 	}
 
 	@Override
-	public HealthStatusPageDTO getHeathStatus(UserInfo userInfo, boolean fullReport, boolean isAdmin) {
+	public HealthStatusPageDTO getHeathStatus(UserInfo userInfo, boolean fullReport) {
 		final String user = userInfo.getName();
 		log.debug("Request the status of resources for user {}, report type {}", user, fullReport);
 		try {
-
-			return envDAO.getHealthStatusPageDTO(user, fullReport)
-					.withBillingEnabled(configuration.isBillingSchedulerEnabled())
-					.withAdmin(isAdmin)
-					.withProjectAssinged(projectService.isAnyProjectAssigned(userInfo))
-					.withBillingQuoteUsed(billingDAO.getBillingQuoteUsed())
-					.withBillingUserQuoteUsed(billingDAO.getBillingUserQuoteUsed(user));
+			return HealthStatusPageDTO.builder()
+					.status(HealthStatusEnum.OK.toString())
+					.listResources(Collections.emptyList())
+					.billingEnabled(configuration.isBillingSchedulerEnabled())
+					.projectAdmin(UserRoles.isProjectAdmin(userInfo))
+					.admin(UserRoles.isAdmin(userInfo))
+					.projectAssigned(projectService.isAnyProjectAssigned(userInfo))
+					.billingQuoteUsed(billingDAO.getBillingQuoteUsed())
+					.billingUserQuoteUsed(billingDAO.getBillingUserQuoteUsed(user))
+					.build();
 		} catch (Exception e) {
 			log.warn("Could not return status of resources for user {}: {}", user, e.getLocalizedMessage(), e);
 			throw new DlabException(e.getMessage(), e);
@@ -128,18 +152,22 @@
 				.build();
 	}
 
-	private Map<String, String> getSharedInfo(EdgeInfo edgeInfo) {
-		Map<String, String> shared = new HashMap<>();
-		if (Objects.isNull(edgeInfo)) {
-			return shared;
+	private Map<String, String> getSharedInfo(ProjectEndpointDTO endpointDTO) {
+		Optional<EdgeInfo> edgeInfo = Optional.ofNullable(endpointDTO.getEdgeInfo());
+		if (!edgeInfo.isPresent()) {
+			return Collections.emptyMap();
 		}
-		shared.put("edge_node_ip", edgeInfo.getPublicIp());
-		if (edgeInfo instanceof EdgeInfoAws) {
-			EdgeInfoAws edgeInfoAws = (EdgeInfoAws) edgeInfo;
+		EdgeInfo edge = edgeInfo.get();
+		Map<String, String> shared = new HashMap<>();
+
+		shared.put("status", endpointDTO.getStatus().toString());
+		shared.put("edge_node_ip", edge.getPublicIp());
+		if (edge instanceof EdgeInfoAws) {
+			EdgeInfoAws edgeInfoAws = (EdgeInfoAws) edge;
 			shared.put("user_own_bicket_name", edgeInfoAws.getUserOwnBucketName());
 			shared.put("shared_bucket_name", edgeInfoAws.getSharedBucketName());
-		} else if (edgeInfo instanceof EdgeInfoAzure) {
-			EdgeInfoAzure edgeInfoAzure = (EdgeInfoAzure) edgeInfo;
+		} else if (edge instanceof EdgeInfoAzure) {
+			EdgeInfoAzure edgeInfoAzure = (EdgeInfoAzure) edge;
 			shared.put("user_container_name", edgeInfoAzure.getUserContainerName());
 			shared.put("shared_container_name", edgeInfoAzure.getSharedContainerName());
 			shared.put("user_storage_account_name", edgeInfoAzure.getUserStorageAccountName());
@@ -147,8 +175,8 @@
 			shared.put("datalake_name", edgeInfoAzure.getDataLakeName());
 			shared.put("datalake_user_directory_name", edgeInfoAzure.getDataLakeDirectoryName());
 			shared.put("datalake_shared_directory_name", edgeInfoAzure.getDataLakeSharedDirectoryName());
-		} else if (edgeInfo instanceof EdgeInfoGcp) {
-			EdgeInfoGcp edgeInfoGcp = (EdgeInfoGcp) edgeInfo;
+		} else if (edge instanceof EdgeInfoGcp) {
+			EdgeInfoGcp edgeInfoGcp = (EdgeInfoGcp) edge;
 			shared.put("user_own_bucket_name", edgeInfoGcp.getUserOwnBucketName());
 			shared.put("shared_bucket_name", edgeInfoGcp.getSharedBucketName());
 		}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java
index 0e5a2d5..11d4b62 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/ProjectServiceImpl.java
@@ -1,17 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service.impl;
 
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.annotation.BudgetLimited;
 import com.epam.dlab.backendapi.annotation.Project;
+import com.epam.dlab.backendapi.annotation.ProjectAdmin;
+import com.epam.dlab.backendapi.annotation.User;
 import com.epam.dlab.backendapi.dao.ExploratoryDAO;
 import com.epam.dlab.backendapi.dao.ProjectDAO;
 import com.epam.dlab.backendapi.dao.UserGroupDao;
 import com.epam.dlab.backendapi.domain.EndpointDTO;
 import com.epam.dlab.backendapi.domain.ProjectDTO;
 import com.epam.dlab.backendapi.domain.ProjectEndpointDTO;
-import com.epam.dlab.backendapi.domain.ProjectManagingDTO;
 import com.epam.dlab.backendapi.domain.RequestId;
 import com.epam.dlab.backendapi.domain.UpdateProjectDTO;
+import com.epam.dlab.backendapi.roles.UserRoles;
 import com.epam.dlab.backendapi.service.EndpointService;
 import com.epam.dlab.backendapi.service.ExploratoryService;
 import com.epam.dlab.backendapi.service.ProjectService;
@@ -29,7 +50,6 @@
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
-import java.util.Objects;
 import java.util.Set;
 import java.util.function.Supplier;
 import java.util.stream.Collectors;
@@ -80,10 +100,10 @@
 	}
 
 	@Override
-	public List<ProjectManagingDTO> getProjectsForManaging() {
-		return projectDAO.getProjects().stream().map(p -> new ProjectManagingDTO(
-				p.getName(), p.getBudget(), isCanBeStopped(p), isCanBeTerminated(p)))
-				.filter(projectManagingDTO -> projectManagingDTO.isCanBeTerminated())
+	public List<ProjectDTO> getProjects(UserInfo user) {
+		return projectDAO.getProjects()
+				.stream()
+				.filter(project -> UserRoles.isProjectAdmin(user, project.getGroups()) || UserRoles.isAdmin(user))
 				.collect(Collectors.toList());
 	}
 
@@ -93,11 +113,6 @@
 	}
 
 	@Override
-	public List<ProjectDTO> getProjectsWithStatus(ProjectDTO.Status status) {
-		return projectDAO.getProjectsWithStatus(status);
-	}
-
-	@Override
 	public List<ProjectDTO> getProjectsByEndpoint(String endpointName) {
 		return projectDAO.getProjectsByEndpoint(endpointName);
 	}
@@ -126,21 +141,13 @@
 		exploratoryService.updateProjectExploratoryStatuses(name, endpoint, UserInstanceStatus.TERMINATING);
 	}
 
+	@ProjectAdmin
 	@Override
-	public void terminateEndpoint(UserInfo userInfo, List<String> endpoints, String name) {
+	public void terminateEndpoint(@User UserInfo userInfo, List<String> endpoints, @Project String name) {
+		System.out.println("sd");
 		endpoints.forEach(endpoint -> terminateEndpoint(userInfo, endpoint, name));
 	}
 
-	@Override
-	public void terminateProject(UserInfo userInfo, String name) {
-		List<ProjectEndpointDTO> endpoints = get(name).getEndpoints();
-		checkProjectRelatedResourcesInProgress(name, endpoints, TERMINATE_ACTION);
-
-		endpoints.stream()
-				.map(ProjectEndpointDTO::getName)
-				.forEach(endpoint -> terminateEndpoint(userInfo, endpoint, name));
-	}
-
 	@BudgetLimited
 	@Override
 	public void start(UserInfo userInfo, String endpoint, @Project String name) {
@@ -148,8 +155,9 @@
 		projectDAO.updateEdgeStatus(name, endpoint, UserInstanceStatus.STARTING);
 	}
 
+	@ProjectAdmin
 	@Override
-	public void start(UserInfo userInfo, List<String> endpoints, String name) {
+	public void start(@User UserInfo userInfo, List<String> endpoints, @Project String name) {
 		endpoints.forEach(endpoint -> start(userInfo, endpoint, name));
 	}
 
@@ -159,13 +167,9 @@
 		projectDAO.updateEdgeStatus(name, endpoint, UserInstanceStatus.STOPPING);
 	}
 
+	@ProjectAdmin
 	@Override
-	public void stop(UserInfo userInfo, List<String> endpoints, String name) {
-		endpoints.forEach(endpoint -> stop(userInfo, endpoint, name));
-	}
-
-	@Override
-	public void stopWithResources(UserInfo userInfo, List<String> endpoints, String projectName) {
+	public void stopWithResources(@User UserInfo userInfo, List<String> endpoints, @Project String projectName) {
 		List<ProjectEndpointDTO> endpointDTOs = get(projectName)
 				.getEndpoints()
 				.stream()
@@ -185,8 +189,9 @@
 				.forEach(e -> stop(userInfo, e.getName(), projectName));
 	}
 
+	@ProjectAdmin
 	@Override
-	public void update(UserInfo userInfo, UpdateProjectDTO projectDTO) {
+	public void update(@User UserInfo userInfo, UpdateProjectDTO projectDTO, @Project String projectName) {
 		final ProjectDTO project = projectDAO.get(projectDTO.getName()).orElseThrow(projectNotFound());
 		final Set<String> endpoints = project.getEndpoints()
 				.stream()
@@ -204,11 +209,6 @@
 	}
 
 	@Override
-	public void updateBudget(String project, Integer budget) {
-		projectDAO.updateBudget(project, budget);
-	}
-
-	@Override
 	public void updateBudget(List<ProjectDTO> projects) {
 		projects.forEach(p -> projectDAO.updateBudget(p.getName(), p.getBudget()));
 	}
@@ -273,20 +273,6 @@
 		}
 	}
 
-	private boolean isCanBeStopped(ProjectDTO projectDTO) {
-        List<ProjectEndpointDTO> endpoints = projectDTO.getEndpoints();
-        return !endpoints.stream().allMatch(e -> exploratoryDAO.fetchProjectExploratoriesWhereStatusNotIn(
-                projectDTO.getName(), e.getName(), UserInstanceStatus.STOPPED, UserInstanceStatus.TERMINATED,
-                UserInstanceStatus.TERMINATING).isEmpty()) ||
-				endpoints.stream().anyMatch(e -> Arrays.asList(UserInstanceStatus.RUNNING, UserInstanceStatus.STARTING)
-						.contains(e.getStatus()));
-	}
-
-	private boolean isCanBeTerminated(ProjectDTO projectDTO) {
-        return !projectDTO.getEndpoints().stream().allMatch(e -> Objects.equals(UserInstanceStatus.TERMINATED,
-                e.getStatus()));
-	}
-
 	private Supplier<ResourceNotFoundException> projectNotFound() {
 		return () -> new ResourceNotFoundException("Project with passed name not found");
 	}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImpl.java
index 0a71587..9eb25c3 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImpl.java
@@ -18,73 +18,69 @@
  */
 package com.epam.dlab.backendapi.service.impl;
 
+import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.dao.ProjectDAO;
 import com.epam.dlab.backendapi.dao.UserGroupDao;
 import com.epam.dlab.backendapi.dao.UserRoleDao;
 import com.epam.dlab.backendapi.domain.ProjectDTO;
 import com.epam.dlab.backendapi.resources.dto.UserGroupDto;
+import com.epam.dlab.backendapi.resources.dto.UserRoleDto;
+import com.epam.dlab.backendapi.roles.UserRoles;
+import com.epam.dlab.backendapi.service.ProjectService;
 import com.epam.dlab.backendapi.service.UserGroupService;
 import com.epam.dlab.dto.UserInstanceStatus;
+import com.epam.dlab.exceptions.DlabException;
 import com.epam.dlab.exceptions.ResourceConflictException;
 import com.epam.dlab.exceptions.ResourceNotFoundException;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
 import lombok.extern.slf4j.Slf4j;
 
+import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
 import java.util.Set;
+import java.util.stream.Collectors;
 
 @Singleton
 @Slf4j
 public class UserGroupServiceImpl implements UserGroupService {
-
 	private static final String ROLE_NOT_FOUND_MSG = "Any of role : %s were not found";
+	private static final String ADMIN = "admin";
+	private static final String PROJECT_ADMIN = "projectAdmin";
+
 	@Inject
 	private UserGroupDao userGroupDao;
 	@Inject
 	private UserRoleDao userRoleDao;
 	@Inject
 	private ProjectDAO projectDAO;
+	@Inject
+	private ProjectService projectService;
 
 	@Override
 	public void createGroup(String group, Set<String> roleIds, Set<String> users) {
 		checkAnyRoleFound(roleIds, userRoleDao.addGroupToRole(Collections.singleton(group), roleIds));
-		if (!users.isEmpty()) {
-			log.debug("Adding users {} to group {}", users, group);
-			userGroupDao.addUsers(group, users);
-		}
-	}
-
-	@Override
-	public void updateGroup(String group, Set<String> roleIds, Set<String> users) {
-		log.debug("Updating users for group {}: {}", group, users);
-		userGroupDao.updateUsers(group, users);
-		log.debug("Removing group {} from existing roles", group);
-		userRoleDao.removeGroupWhenRoleNotIn(group, roleIds);
-		log.debug("Adding group {} to roles {}", group, roleIds);
-		userRoleDao.addGroupToRole(Collections.singleton(group), roleIds);
-	}
-
-	@Override
-	public void addUsersToGroup(String group, Set<String> users) {
+		log.debug("Adding users {} to group {}", users, group);
 		userGroupDao.addUsers(group, users);
 	}
 
 	@Override
-	public void updateRolesForGroup(String group, Set<String> roleIds) {
-		userRoleDao.removeGroupWhenRoleNotIn(group, roleIds);
-		checkAnyRoleFound(roleIds, userRoleDao.addGroupToRole(Collections.singleton(group), roleIds));
-	}
-
-	@Override
-	public void removeUserFromGroup(String group, String user) {
-		userGroupDao.removeUser(group, user);
-	}
-
-	@Override
-	public void removeGroupFromRole(Set<String> groups, Set<String> roleIds) {
-		checkAnyRoleFound(roleIds, userRoleDao.removeGroupFromRole(groups, roleIds));
+	public void updateGroup(UserInfo user, String group, Set<String> roleIds, Set<String> users) {
+		if (UserRoles.isAdmin(user)) {
+			updateGroup(group, roleIds, users);
+		} else if (UserRoles.isProjectAdmin(user)) {
+			projectService.getProjects(user)
+					.stream()
+					.map(ProjectDTO::getGroups)
+					.flatMap(Collection::stream)
+					.filter(g -> g.equalsIgnoreCase(group))
+					.findAny()
+					.orElseThrow(() -> new DlabException(String.format("User %s doesn't have appropriate permission", user.getName())));
+			updateGroup(group, roleIds, users);
+		} else {
+			throw new DlabException(String.format("User %s doesn't have appropriate permission", user.getName()));
+		}
 	}
 
 	@Override
@@ -102,8 +98,39 @@
 	}
 
 	@Override
-	public List<UserGroupDto> getAggregatedRolesByGroup() {
-		return userRoleDao.aggregateRolesByGroup();
+	public List<UserGroupDto> getAggregatedRolesByGroup(UserInfo user) {
+		if (UserRoles.isAdmin(user)) {
+			return userRoleDao.aggregateRolesByGroup();
+		} else if (UserRoles.isProjectAdmin(user)) {
+			Set<String> groups = projectService.getProjects(user)
+					.stream()
+					.map(ProjectDTO::getGroups)
+					.flatMap(Collection::stream)
+					.collect(Collectors.toSet());
+			return userRoleDao.aggregateRolesByGroup()
+					.stream()
+					.filter(userGroup -> groups.contains(userGroup.getGroup()) && !containsAdministrationPermissions(userGroup))
+					.collect(Collectors.toList());
+		} else {
+			throw new DlabException(String.format("User %s doesn't have appropriate permission", user.getName()));
+		}
+	}
+
+	private boolean containsAdministrationPermissions(UserGroupDto userGroup) {
+		List<String> ids = userGroup.getRoles()
+				.stream()
+				.map(UserRoleDto::getId)
+				.collect(Collectors.toList());
+		return ids.contains(ADMIN) || ids.contains(PROJECT_ADMIN);
+	}
+
+	private void updateGroup(String group, Set<String> roleIds, Set<String> users) {
+		log.debug("Updating users for group {}: {}", group, users);
+		userGroupDao.updateUsers(group, users);
+		log.debug("Removing group {} from existing roles", group);
+		userRoleDao.removeGroupWhenRoleNotIn(group, roleIds);
+		log.debug("Adding group {} to roles {}", group, roleIds);
+		userRoleDao.addGroupToRole(Collections.singleton(group), roleIds);
 	}
 
 	private void checkAnyRoleFound(Set<String> roleIds, boolean anyRoleFound) {
@@ -111,6 +138,4 @@
 			throw new ResourceNotFoundException(String.format(ROLE_NOT_FOUND_MSG, roleIds));
 		}
 	}
-
-
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/servlet/guacamole/GuacamoleServlet.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/servlet/guacamole/GuacamoleServlet.java
index d74c68d..9c79bfa 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/servlet/guacamole/GuacamoleServlet.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/servlet/guacamole/GuacamoleServlet.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.servlet.guacamole;
 
 import com.epam.dlab.auth.UserInfo;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/util/BillingUtils.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/BillingUtils.java
new file mode 100644
index 0000000..7e46a09
--- /dev/null
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/BillingUtils.java
@@ -0,0 +1,228 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.backendapi.util;
+
+import com.epam.dlab.backendapi.domain.BillingReportLine;
+import com.epam.dlab.backendapi.resources.dto.ImageInfoRecord;
+import com.epam.dlab.dto.UserInstanceDTO;
+import com.epam.dlab.dto.UserInstanceStatus;
+import com.epam.dlab.dto.base.DataEngineType;
+import com.epam.dlab.dto.computational.UserComputationalResource;
+import jersey.repackaged.com.google.common.collect.Lists;
+import org.apache.commons.lang3.StringUtils;
+
+import java.time.LocalDate;
+import java.time.format.DateTimeFormatter;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Objects;
+import java.util.Optional;
+import java.util.stream.Stream;
+
+import static com.epam.dlab.dto.billing.BillingResourceType.BUCKET;
+import static com.epam.dlab.dto.billing.BillingResourceType.COMPUTATIONAL;
+import static com.epam.dlab.dto.billing.BillingResourceType.EDGE;
+import static com.epam.dlab.dto.billing.BillingResourceType.ENDPOINT;
+import static com.epam.dlab.dto.billing.BillingResourceType.EXPLORATORY;
+import static com.epam.dlab.dto.billing.BillingResourceType.IMAGE;
+import static com.epam.dlab.dto.billing.BillingResourceType.SSN;
+import static com.epam.dlab.dto.billing.BillingResourceType.VOLUME;
+
+public class BillingUtils {
+    private static final String[] AVAILABLE_NOTEBOOKS = {"zeppelin", "tensor-rstudio", "rstudio", "tensor", "superset", "jupyterlab", "jupyter", "deeplearning"};
+    private static final String[] REPORT_HEADERS = {"DLab ID", "User", "Project", "DLab Resource Type", "Status", "Shape", "Product", "Cost"};
+    private static final String REPORT_FIRST_LINE = "Service base name: %s. Available reporting period from: %s to: %s";
+    private static final String TOTAL_LINE = "Total: %s %s";
+    private static final String SSN_FORMAT = "%s-ssn";
+    private static final String ENDPOINT_FORMAT = "%s-%s-endpoint";
+    private static final String EDGE_FORMAT = "%s-%s-%s-edge";
+    private static final String EDGE_VOLUME_FORMAT = "%s-%s-%s-edge-volume-primary";
+    private static final String PROJECT_ENDPOINT_BUCKET_FORMAT = "%s-%s-%s-bucket";
+    private static final String ENDPOINT_SHARED_BUCKET_FORMAT = "%s-%s-shared-bucket";
+    private static final String VOLUME_PRIMARY_FORMAT = "%s-volume-primary";
+    private static final String VOLUME_PRIMARY_COMPUTATIONAL_FORMAT = "%s-%s-volume-primary";
+    private static final String VOLUME_SECONDARY_FORMAT = "%s-volume-secondary";
+    private static final String VOLUME_SECONDARY_COMPUTATIONAL_FORMAT = "%s-%s-volume-secondary";
+    private static final String IMAGE_STANDARD_FORMAT1 = "%s-%s-%s-%s-notebook-image";
+    private static final String IMAGE_STANDARD_FORMAT2 = "%s-%s-%s-notebook-image";
+    private static final String IMAGE_CUSTOM_FORMAT = "%s-%s-%s-%s-%s";
+
+    private static final String SHARED_RESOURCE = "Shared resource";
+    private static final String IMAGE_NAME = "Image";
+
+    private static final String DATAENGINE_NAME_FORMAT = "%d x %s";
+    private static final String DATAENGINE_SERVICE_NAME_FORMAT = "Master: %sSlave: %s";
+
+    public static Stream<BillingReportLine> edgeBillingDataStream(String project, String sbn, String endpoint) {
+        final String userEdgeId = String.format(EDGE_FORMAT, sbn, project, endpoint).toLowerCase();
+        final String edgeVolumeId = String.format(EDGE_VOLUME_FORMAT, sbn, project, endpoint).toLowerCase();
+        final String endpointBucketId = String.format(PROJECT_ENDPOINT_BUCKET_FORMAT, sbn, project, endpoint).toLowerCase();
+
+        return Stream.concat(Stream.of(
+                BillingReportLine.builder().resourceName(endpoint).user(SHARED_RESOURCE).project(project).dlabId(userEdgeId).resourceType(EDGE).build(),
+                BillingReportLine.builder().resourceName("EDGE volume").user(SHARED_RESOURCE).project(project).dlabId(edgeVolumeId).resourceType(VOLUME).build(),
+                BillingReportLine.builder().resourceName("Project endpoint shared bucket").user(SHARED_RESOURCE).project(project).dlabId(endpointBucketId).resourceType(BUCKET).build()
+                ),
+                standardImageBillingDataStream(sbn, project, endpoint)
+        );
+    }
+
+    public static Stream<BillingReportLine> ssnBillingDataStream(String sbn) {
+        final String ssnId = String.format(SSN_FORMAT, sbn);
+        return Stream.of(
+                BillingReportLine.builder().user(SHARED_RESOURCE).project(SHARED_RESOURCE).resourceName("SSN").dlabId(ssnId).resourceType(SSN).build(),
+                BillingReportLine.builder().user(SHARED_RESOURCE).project(SHARED_RESOURCE).resourceName("SSN Volume").dlabId(String.format(VOLUME_PRIMARY_FORMAT, ssnId)).resourceType(VOLUME).build()
+        );
+    }
+
+    public static Stream<BillingReportLine> sharedEndpointBillingDataStream(String endpoint, String sbn) {
+        final String projectEndpointBucketId = String.format(ENDPOINT_SHARED_BUCKET_FORMAT, sbn, endpoint).toLowerCase();
+        final String endpointId = String.format(ENDPOINT_FORMAT, sbn, endpoint).toLowerCase();
+        return Stream.concat(Stream.of(
+                BillingReportLine.builder().resourceName("Endpoint shared bucket").user(SHARED_RESOURCE).project(SHARED_RESOURCE).dlabId(projectEndpointBucketId).resourceType(BUCKET).build(),
+                BillingReportLine.builder().resourceName("Endpoint").user(SHARED_RESOURCE).project(SHARED_RESOURCE).dlabId(endpointId).resourceType(ENDPOINT).build()
+                ),
+                standardImageBillingDataStream(sbn, endpoint));
+    }
+
+    public static Stream<BillingReportLine> exploratoryBillingDataStream(UserInstanceDTO userInstance, Integer maxSparkInstanceCount) {
+        final Stream<BillingReportLine> computationalStream = userInstance.getResources()
+                .stream()
+                .filter(cr -> cr.getComputationalId() != null)
+                .flatMap(cr -> {
+                    final String computationalId = cr.getComputationalId().toLowerCase();
+                    return Stream.concat(Stream.of(
+                            withUserProjectEndpoint(userInstance).resourceName(cr.getComputationalName()).dlabId(computationalId).resourceType(COMPUTATIONAL).shape(getComputationalShape(cr))
+                                    .exploratoryName(userInstance.getExploratoryName()).build(),
+                            withUserProjectEndpoint(userInstance).resourceName(cr.getComputationalName()).dlabId(String.format(VOLUME_PRIMARY_FORMAT, computationalId)).resourceType(VOLUME).build(),
+                            withUserProjectEndpoint(userInstance).resourceName(cr.getComputationalName()).dlabId(String.format(VOLUME_SECONDARY_FORMAT, computationalId)).resourceType(VOLUME).build(),
+                            withUserProjectEndpoint(userInstance).resourceName(cr.getComputationalName()).dlabId(String.format(VOLUME_PRIMARY_COMPUTATIONAL_FORMAT, computationalId, "m"))
+                                    .resourceType(VOLUME).build(),
+                            withUserProjectEndpoint(userInstance).resourceName(cr.getComputationalName()).dlabId(String.format(VOLUME_SECONDARY_COMPUTATIONAL_FORMAT, computationalId, "m"))
+                                    .resourceType(VOLUME).build()
+                            ),
+                            getSlaveVolumes(userInstance, cr, maxSparkInstanceCount)
+                    );
+                });
+        final String exploratoryName = userInstance.getExploratoryName();
+        final String exploratoryId = userInstance.getExploratoryId().toLowerCase();
+        final String primaryVolumeId = String.format(VOLUME_PRIMARY_FORMAT, exploratoryId);
+        final String secondaryVolumeId = String.format(VOLUME_SECONDARY_FORMAT, exploratoryId);
+        final Stream<BillingReportLine> exploratoryStream = Stream.of(
+                withUserProjectEndpoint(userInstance).resourceName(exploratoryName).dlabId(exploratoryId).resourceType(EXPLORATORY).shape(userInstance.getShape()).build(),
+                withUserProjectEndpoint(userInstance).resourceName(exploratoryName).dlabId(primaryVolumeId).resourceType(VOLUME).build(),
+                withUserProjectEndpoint(userInstance).resourceName(exploratoryName).dlabId(secondaryVolumeId).resourceType(VOLUME).build());
+
+        return Stream.concat(computationalStream, exploratoryStream);
+    }
+
+    public static Stream<BillingReportLine> customImageBillingDataStream(ImageInfoRecord image, String sbn) {
+        String imageId = String.format(IMAGE_CUSTOM_FORMAT, sbn, image.getProject(), image.getEndpoint(), image.getApplication(), image.getName()).toLowerCase();
+        return Stream.of(
+                BillingReportLine.builder().resourceName(image.getName()).project(image.getProject()).dlabId(imageId).user(image.getUser()).resourceType(IMAGE).build()
+        );
+    }
+
+    private static Stream<BillingReportLine> getSlaveVolumes(UserInstanceDTO userInstance, UserComputationalResource cr, Integer maxSparkInstanceCount) {
+        List<BillingReportLine> list = new ArrayList<>();
+        for (int i = 1; i <= maxSparkInstanceCount; i++) {
+            list.add(withUserProjectEndpoint(userInstance).resourceName(cr.getComputationalName()).dlabId(String.format(VOLUME_PRIMARY_COMPUTATIONAL_FORMAT, cr.getComputationalId().toLowerCase(), "s" + i))
+                    .resourceType(VOLUME).build());
+            list.add(withUserProjectEndpoint(userInstance).resourceName(cr.getComputationalName()).dlabId(String.format(VOLUME_SECONDARY_COMPUTATIONAL_FORMAT, cr.getComputationalId().toLowerCase(), "s" + i))
+                    .resourceType(VOLUME).build());
+        }
+        return list.stream();
+    }
+
+    private static BillingReportLine.BillingReportLineBuilder withUserProjectEndpoint(UserInstanceDTO userInstance) {
+        return BillingReportLine.builder().user(userInstance.getUser()).project(userInstance.getProject()).endpoint(userInstance.getEndpoint());
+    }
+
+    public static String getComputationalShape(UserComputationalResource resource) {
+        return DataEngineType.fromDockerImageName(resource.getImageName()) == DataEngineType.SPARK_STANDALONE ?
+                String.format(DATAENGINE_NAME_FORMAT, resource.getDataengineInstanceCount(), resource.getDataengineShape()) :
+                String.format(DATAENGINE_SERVICE_NAME_FORMAT, resource.getMasterNodeShape(), resource.getSlaveNodeShape());
+    }
+
+    private static Stream<BillingReportLine> standardImageBillingDataStream(String sbn, String endpoint) {
+        List<BillingReportLine> list = new ArrayList<>();
+        for (String notebook : AVAILABLE_NOTEBOOKS) {
+            list.add(BillingReportLine.builder().resourceName(IMAGE_NAME).dlabId(String.format(IMAGE_STANDARD_FORMAT2, sbn, endpoint, notebook).toLowerCase())
+                    .user(SHARED_RESOURCE).project(SHARED_RESOURCE).resourceType(IMAGE).build());
+        }
+
+        return list.stream();
+    }
+
+    private static Stream<BillingReportLine> standardImageBillingDataStream(String sbn, String project, String endpoint) {
+        List<BillingReportLine> list = new ArrayList<>();
+        for (String notebook : AVAILABLE_NOTEBOOKS) {
+            list.add(BillingReportLine.builder().resourceName(IMAGE_NAME).dlabId(String.format(IMAGE_STANDARD_FORMAT1, sbn, project, endpoint, notebook).toLowerCase())
+                    .project(project).user(SHARED_RESOURCE).resourceType(IMAGE).build());
+        }
+
+        return list.stream();
+    }
+
+    public static String getFirstLine(String sbn, LocalDate from, LocalDate to) {
+        return CSVFormatter.formatLine(Lists.newArrayList(String.format(REPORT_FIRST_LINE, sbn,
+                Optional.ofNullable(from).map(date -> date.format(DateTimeFormatter.ISO_DATE)).orElse(StringUtils.EMPTY),
+                Optional.ofNullable(to).map(date -> date.format(DateTimeFormatter.ISO_DATE)).orElse(StringUtils.EMPTY))),
+                CSVFormatter.SEPARATOR, '\"');
+    }
+
+    public static String getHeader(boolean isFull) {
+        List<String> headers = new ArrayList<>(Arrays.asList(BillingUtils.REPORT_HEADERS));
+        if (!isFull) {
+            headers.remove(1);
+        }
+        return CSVFormatter.formatLine(headers, CSVFormatter.SEPARATOR);
+    }
+
+    public static String printLine(BillingReportLine line, boolean isFull) {
+        List<String> lines = new ArrayList<>();
+        lines.add(getOrEmpty(line.getDlabId()));
+        if (isFull) {
+            lines.add(getOrEmpty(line.getUser()));
+        }
+        lines.add(getOrEmpty(line.getProject()));
+        lines.add(getOrEmpty(Optional.ofNullable(line.getResourceType()).map(r -> StringUtils.capitalize(r.toString().toLowerCase())).orElse(null)));
+        lines.add(getOrEmpty(Optional.ofNullable(line.getStatus()).map(UserInstanceStatus::toString).orElse(null)));
+        lines.add(getOrEmpty(line.getShape()));
+        lines.add(getOrEmpty(line.getProduct()));
+        lines.add(getOrEmpty(Optional.ofNullable(line.getCost()).map(String::valueOf).orElse(null)));
+        return CSVFormatter.formatLine(lines, CSVFormatter.SEPARATOR);
+    }
+
+    public static String getTotal(Double total, String currency) {
+        List<String> totalLine = new ArrayList<>();
+        for (int i = 0; i < REPORT_HEADERS.length - 1; i++) {
+            totalLine.add(StringUtils.EMPTY);
+        }
+        totalLine.add(REPORT_HEADERS.length - 1, String.format(TOTAL_LINE, getOrEmpty(String.valueOf(total)), getOrEmpty(currency)));
+        return CSVFormatter.formatLine(totalLine, CSVFormatter.SEPARATOR);
+
+    }
+
+    private static String getOrEmpty(String s) {
+        return Objects.nonNull(s) ? s : StringUtils.EMPTY;
+    }
+}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/util/KeycloakUtil.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/KeycloakUtil.java
index 63fc62c..072b963 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/util/KeycloakUtil.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/KeycloakUtil.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.util;
 
 import com.epam.dlab.exceptions.DlabException;
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java
index 69aeb6c..afe06cd 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java
@@ -509,7 +509,7 @@
 
 	@SuppressWarnings("unchecked")
 	public <T extends ExploratoryImageDTO> T newExploratoryImageCreate(UserInfo userInfo, UserInstanceDTO userInstance,
-																	   String imageName, EndpointDTO endpointDTO) {
+																	   String imageName, EndpointDTO endpointDTO, ProjectDTO projectDTO) {
 		checkInappropriateCloudProviderOrElseThrowException(endpointDTO.getCloudProvider());
 		return (T) newResourceSysBaseDTO(userInfo, endpointDTO.getCloudProvider(), ExploratoryImageDTO.class)
 				.withProject(userInstance.getProject())
@@ -519,7 +519,8 @@
 				.withNotebookImage(userInstance.getImageName())
 				.withImageName(imageName)
 				.withEndpoint(userInstance.getEndpoint())
-				.withTags(userInstance.getTags());
+				.withTags(userInstance.getTags())
+				.withSharedImageEnabled(String.valueOf(projectDTO.isSharedImageEnabled()));
 	}
 
 	@SuppressWarnings("unchecked")
diff --git a/services/self-service/src/main/resources/mongo/aws/mongo_roles.json b/services/self-service/src/main/resources/mongo/aws/mongo_roles.json
index 9998d84..6a8fd29 100644
--- a/services/self-service/src/main/resources/mongo/aws/mongo_roles.json
+++ b/services/self-service/src/main/resources/mongo/aws/mongo_roles.json
@@ -324,6 +324,23 @@
     ]
   },
   {
+    "_id": "projectAdmin",
+    "description": "Allow to execute administration operation per project",
+    "type": "ADMINISTRATION",
+    "cloud": "AWS",
+    "pages": [
+      "environment/*",
+      "/roleManagement",
+      "/api/settings",
+      "/user/settings",
+      "/api/project",
+      "/api/endpoint"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
     "_id": "admin",
     "description": "Allow to execute administration operation",
     "type": "ADMINISTRATION",
@@ -332,6 +349,8 @@
       "environment/*",
       "/api/infrastructure/backup",
       "/roleManagement",
+      "/roleManagement/create",
+      "/roleManagement/delete",
       "/api/settings",
       "/user/settings",
       "/api/project",
diff --git a/services/self-service/src/main/resources/mongo/azure/mongo_roles.json b/services/self-service/src/main/resources/mongo/azure/mongo_roles.json
index 113a705..86eadff 100644
--- a/services/self-service/src/main/resources/mongo/azure/mongo_roles.json
+++ b/services/self-service/src/main/resources/mongo/azure/mongo_roles.json
@@ -264,6 +264,23 @@
     ]
   },
   {
+    "_id": "projectAdmin",
+    "description": "Allow to execute administration operation per project",
+    "type": "ADMINISTRATION",
+    "cloud": "AZURE",
+    "pages": [
+      "environment/*",
+      "/roleManagement",
+      "/api/settings",
+      "/user/settings",
+      "/api/project",
+      "/api/endpoint"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
     "_id": "admin",
     "description": "Allow to execute administration operation",
     "type": "ADMINISTRATION",
@@ -272,6 +289,8 @@
       "environment/*",
       "/api/infrastructure/backup",
       "/roleManagement",
+      "/roleManagement/create",
+      "/roleManagement/delete",
       "/api/settings",
       "/user/settings",
       "/api/project",
diff --git a/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json b/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json
index 8098628..d2ef6dd 100644
--- a/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json
+++ b/services/self-service/src/main/resources/mongo/gcp/mongo_roles.json
@@ -300,6 +300,23 @@
     ]
   },
   {
+    "_id": "projectAdmin",
+    "description": "Allow to execute administration operation per project",
+    "type": "ADMINISTRATION",
+    "cloud": "GCP",
+    "pages": [
+      "environment/*",
+      "/roleManagement",
+      "/api/settings",
+      "/user/settings",
+      "/api/project",
+      "/api/endpoint"
+    ],
+    "groups": [
+      "$anyuser"
+    ]
+  },
+  {
     "_id": "admin",
     "description": "Allow to execute administration operation",
     "type": "ADMINISTRATION",
@@ -308,9 +325,12 @@
       "environment/*",
       "/api/infrastructure/backup",
       "/roleManagement",
+      "/roleManagement/create",
+      "/roleManagement/delete",
       "/api/settings",
       "/user/settings",
       "/api/project",
+      "/api/project/create",
       "/api/endpoint"
     ],
     "groups": [
diff --git a/services/self-service/src/main/resources/webapp/browserslist b/services/self-service/src/main/resources/webapp/browserslist
index 8084853..ae0116f 100644
--- a/services/self-service/src/main/resources/webapp/browserslist
+++ b/services/self-service/src/main/resources/webapp/browserslist
@@ -1,3 +1,24 @@
+# *****************************************************************************
+#
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#  http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing,
+#  software distributed under the License is distributed on an
+#  "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+#  KIND, either express or implied.  See the License for the
+#  specific language governing permissions and limitations
+#  under the License.
+#
+# ******************************************************************************
+
 # This file is used by the build system to adjust CSS and JS output to support the specified browsers below.
 # For additional information regarding the format and rule options, please see:
 # https://github.com/browserslist/browserslist#queries
@@ -9,4 +30,4 @@
 last 2 versions
 Firefox ESR
 not dead
-not IE 9-11 # For IE 9-11 support, remove 'not'.
\ No newline at end of file
+not IE 9-11 # For IE 9-11 support, remove 'not'.
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/management/manage-environment/manage-environment-dilog.component.ts b/services/self-service/src/main/resources/webapp/src/app/administration/management/manage-environment/manage-environment-dilog.component.ts
index a3ae8d2..f99944f 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/management/manage-environment/manage-environment-dilog.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/management/manage-environment/manage-environment-dilog.component.ts
@@ -69,7 +69,7 @@
   }
 
   public setBudgetLimits(value) {
-    if (this.getCurrentTotalValue() >= this.getCurrentUsersTotal()) {
+    if (this.getCurrentTotalValue() >= this.getCurrentUsersTotal() || !this.getCurrentTotalValue()) {
       this.dialogRef.close(value);
     } else {
       this.manageUsersForm.controls['total'].setErrors({ overrun: true });
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.html b/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.html
index 631e7ae..9947516 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.html
@@ -20,7 +20,7 @@
 <div class="ani">
   <table mat-table [dataSource]="allFilteredEnvironmentData" class="data-grid management mat-elevation-z6">
     <ng-container matColumnDef="user">
-      <th mat-header-cell *matHeaderCellDef class="user">
+      <th mat-header-cell *matHeaderCellDef class="user label-header">
         <span class="label">User</span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -32,7 +32,7 @@
     </ng-container>
 
     <ng-container matColumnDef="project">
-      <th mat-header-cell *matHeaderCellDef class="project">
+      <th mat-header-cell *matHeaderCellDef class="project label-header">
         <span class="label">Project</span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -44,7 +44,7 @@
     </ng-container>
 
     <ng-container matColumnDef="type">
-      <th mat-header-cell *matHeaderCellDef class="type">
+      <th mat-header-cell *matHeaderCellDef class="type label-header">
         <span class="label">Type</span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -56,7 +56,7 @@
     </ng-container>
 
     <ng-container matColumnDef="shape">
-      <th mat-header-cell *matHeaderCellDef class="shape">
+      <th mat-header-cell *matHeaderCellDef class="shape label-header">
         <span class="label">Shape / Resource id</span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -68,7 +68,7 @@
     </ng-container>
 
     <ng-container matColumnDef="status">
-      <th mat-header-cell *matHeaderCellDef class="status">
+      <th mat-header-cell *matHeaderCellDef class="status label-header">
         <span class="label">Status</span>
 
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
@@ -77,13 +77,13 @@
             <span [hidden]="filtering && filterForm.statuses.length > 0 && !collapsedFilterRow">more_vert</span>
           </i>
         </button> </th>
-      <td mat-cell *matCellDef="let element" class="ani status" >
+      <td mat-cell *matCellDef="let element" class="ani status label-header" >
         <span ngClass="{{element.status || ''}}">{{ element.status }}</span>
       </td>
     </ng-container>
 
     <ng-container matColumnDef="resources">
-      <th mat-header-cell *matHeaderCellDef class="resources">
+      <th mat-header-cell *matHeaderCellDef class="resources label-header">
         <span class="label">Computational resources</span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -122,7 +122,7 @@
     </ng-container>
 
     <ng-container matColumnDef="actions">
-      <th mat-header-cell *matHeaderCellDef class="actions">
+      <th mat-header-cell *matHeaderCellDef class="actions label-header">
         <span class="label"> Actions </span>
       </th>
       <td mat-cell *matCellDef="let element" class="settings actions-col">
@@ -166,43 +166,43 @@
 
     <!-- FILTERING -->
     <ng-container matColumnDef="user-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)" [type]="'users'" [items]="filterConfiguration.users"
           [model]="filterForm.users"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="type-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <input placeholder="Filter by environment type" type="text" class="form-control filter-field"
           [value]="filterForm.type" (input)="filterForm.type = $event.target.value" />
       </th>
     </ng-container>
     <ng-container matColumnDef="project-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)" [type]="'projects'"
           [items]="filterConfiguration.projects" [model]="filterForm.projects"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="shape-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)" [type]="'shapes'"
           [items]="filterConfiguration.shapes" [model]="filterForm.shapes"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="status-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)" [type]="'statuses'"
           [items]="filterConfiguration.statuses" [model]="filterForm.statuses"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="resource-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)" [type]="'resources'"
           [items]="filterConfiguration.resources" [model]="filterForm.resources"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="actions-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef  class="actions-col">
+      <th mat-header-cell *matHeaderCellDef  class="actions-col filter-row-item">
         <div class="actions">
           <button mat-icon-button class="btn reset" (click)="resetFilterConfigurations()">
             <i class="material-icons">close</i>
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.ts b/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.ts
index 216d79c..d0ab9dc 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/management/management-grid/management-grid.component.ts
@@ -162,11 +162,11 @@
 
       if (action === 'stop') {
         this.dialog.open(ConfirmationDialogComponent, {
-          data: { notebook: environment, type: type, manageAction: this.isAdmin }, panelClass: 'modal-md'
+          data: { notebook: environment, type: type, manageAction: true }, panelClass: 'modal-md'
         }).afterClosed().subscribe(() => this.buildGrid());
       } else if (action === 'terminate') {
         this.dialog.open(ConfirmationDialogComponent, {
-          data: { notebook: environment, type: ConfirmationDialogType.TerminateExploratory, manageAction: this.isAdmin }, panelClass: 'modal-md'
+          data: { notebook: environment, type: ConfirmationDialogType.TerminateExploratory, manageAction: true }, panelClass: 'modal-md'
         }).afterClosed().subscribe(() => this.buildGrid());
       } else if (action === 'run') {
         this.healthStatusService.runEdgeNode().subscribe(() => {
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/management/management.component.ts b/services/self-service/src/main/resources/webapp/src/app/administration/management/management.component.ts
index 477b872..87e554d 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/management/management.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/management/management.component.ts
@@ -96,10 +96,9 @@
   }
 
   openManageEnvironmentDialog() {
-    this.projectService.getProjectsManagingList().subscribe(projectsList => {
+    this.projectService.getProjectsList().subscribe(projectsList => {
       this.getTotalBudgetData().subscribe(total => {
         this.dialogRef = this.dialog.open(ManageEnvironmentComponent, { data: { projectsList, total }, panelClass: 'modal-sm' });
-        // this.dialogRef.componentInstance.manageEnv.subscribe((data) => this.manageEnvironment(data));
         this.dialogRef.afterClosed().subscribe(result => result && this.setBudgetLimits(result));
       }, () => this.toastr.error('Failed users list loading!', 'Oops!'));
     });
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/management/management.model.ts b/services/self-service/src/main/resources/webapp/src/app/administration/management/management.model.ts
index 4e7a663..b4f0701 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/management/management.model.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/management/management.model.ts
@@ -69,6 +69,7 @@
 
 export interface GeneralEnvironmentStatus {
   admin: boolean;
+  projectAdmin: boolean;
   billingEnabled: boolean;
   billingQuoteUsed: number;
   list_resources: any;
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.html b/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.html
index ab69bdb..3be7a10 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.html
@@ -20,8 +20,12 @@
 
 <div *ngIf="projectList" class="base-retreat">
   <div class="sub-nav">
-    <div>
-      <button mat-raised-button class="butt butt-create" (click)="createProject()" [disabled]="!projectList.length">
+    <div
+      matTooltip="Only admin can create new project."
+      matTooltipPosition="above"
+      [matTooltipDisabled]="healthStatus?.admin"
+    >
+      <button mat-raised-button class="butt butt-create" (click)="createProject()" [disabled]="!projectList.length || !healthStatus?.admin && healthStatus?.projectAdmin">
         <i class="material-icons">add</i>Create new
       </button>
     </div>
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.ts b/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.ts
index 9833a40..d1b172e 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/project/project.component.ts
@@ -96,7 +96,6 @@
     if (this.projectList.length)
       this.dialog.open(EditProjectComponent, { data: { action: 'create', item: null }, panelClass: 'modal-xl-s' })
         .afterClosed().subscribe(() => {
-          console.log('Create project');
           this.getEnvironmentHealthStatus();
         });
   }
@@ -122,13 +121,12 @@
 
   private toggleStatusRequest(data, action) {
     if ( action === 'terminate') {
-      const projectsResources = this.resources
-        .filter(resource => resource.project === data.project_name )[0].exploratory
-        .filter(expl => expl.status !== 'terminated' && expl.status !== 'terminating');
-
+      const projectsResources = this.resources.filter(resource => resource.project === data.project_name );
+      const activeProjectsResources = projectsResources.length ? projectsResources[0].exploratory
+        .filter(expl => expl.status !== 'terminated' && expl.status !== 'terminating' && expl.status !== 'failed') : [];
       let termResources = [];
       data.endpoint.forEach(v => {
-        termResources = [...termResources, ...projectsResources.filter(resource => resource.endpoint === v)];
+        termResources = [...termResources, ...activeProjectsResources.filter(resource => resource.endpoint === v)];
       });
 
       this.dialog.open(NotificationDialogComponent, { data: {
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.html b/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.html
index d5496ac..5c73329 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.html
@@ -19,8 +19,12 @@
 
 <div class="manage-roles base-retreat">
   <div class="sub-nav">
-    <div>
-      <button mat-raised-button class="butt add-group" (click)="stepperView = !stepperView">
+    <div matTooltip="Only admin can add group."
+         matTooltipPosition="above"
+         [matTooltipDisabled]="healthStatus?.admin"
+    >
+      <button mat-raised-button class="butt add-group" (click)="stepperView = !stepperView" [disabled]="!healthStatus?.admin"
+              >
         <i class="material-icons">people_outline</i>Add group
       </button>
     </div>
@@ -32,7 +36,7 @@
       <mat-step [completed]='false'>
         <ng-template matStepLabel>Groups</ng-template>
         <div class="inner-step mat-reset">
-          <input [validator]="groupValidarion()" type="text" placeholder="Enter group name" [(ngModel)]="setupGroup"
+          <input [validator]="groupValidation()" type="text" placeholder="Enter group name" [(ngModel)]="setupGroup"
             #setupGroupName="ngModel">
           <div class="error" *ngIf="setupGroupName.errors?.patterns && setupGroupName.dirty">Group name can only
             contain letters, numbers, hyphens and '_'</div>
@@ -68,7 +72,9 @@
               (selectionChange)="onUpdate($event)"
               name="roles"
               [items]="rolesList"
-              [model]="setupRoles">
+              [model]="setupRoles"
+              [isAdmin]="healthStatus?.admin"
+            >
             </multi-level-select-dropdown>
           </div>
         </div>
@@ -100,8 +106,9 @@
                 (selectionChange)="onUpdate($event)"
                 [type]="element.group"
                 [items]="rolesList"
-                [model]="element.selected_roles">
-
+                [model]="element.selected_roles"
+                [isAdmin]="healthStatus?.admin"
+              >
               </multi-level-select-dropdown>
           </div>
         </td>
@@ -132,9 +139,21 @@
         <th mat-header-cell *matHeaderCellDef class="actions"></th>
         <td mat-cell *matCellDef="let element" class="actions">
           <div class="actions-wrapper">
-            <span (click)="manageAction('delete', 'group', element)" class="reset ani">
-              <mat-icon>delete_forever</mat-icon>
+            <span class="action-disabled"
+              matTooltip="Only admin can delete group."
+              matTooltipPosition="above"
+              [matTooltipDisabled]="healthStatus?.admin"
+            >
+              <span
+                (click)="manageAction('delete', 'group', element)"
+                class="reset ani"
+                [ngClass]="{ 'not-allowed' : !healthStatus?.admin}"
+
+              >
+              <mat-icon >delete_forever</mat-icon>
             </span>
+            </span>
+
             <span class="apply ani big-icon" matTooltip="Group cannot be updated without any selected role"
               matTooltipPosition="above" [matTooltipDisabled]="element.selected_roles.length > 0"
               [ngClass]="{ 'not-allowed' : !element.selected_roles.length || isGroupChanded(element)}"
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.scss b/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.scss
index 66b1898..1167084 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.scss
@@ -203,6 +203,7 @@
   }
 }
 
+
 .expanded-panel {
   display: flex;
   align-items: flex-end;
@@ -359,16 +360,22 @@
     color: #607d8b;
     width: 10%;
     text-align: center;
+
     .actions-wrapper{
       height: 41px;
       display: flex;
       align-items: center;
       justify-content: flex-end;
     }
+
     span {
       transition: all .35s ease-in-out;
       cursor: pointer;
 
+      &.action-disabled{
+        cursor: not-allowed;
+      }
+
       .mat-icon {
         font-size: 18px;
         padding-top: 12px;
diff --git a/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.ts b/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.ts
index 19ab0ed..2b1e26b 100644
--- a/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/administration/roles/roles.component.ts
@@ -21,14 +21,11 @@
 import { ValidatorFn, FormControl } from '@angular/forms';
 import { MatDialog, MatDialogRef, MAT_DIALOG_DATA } from '@angular/material/dialog';
 import { ToastrService } from 'ngx-toastr';
-
-import { RolesGroupsService, HealthStatusService } from '../../core/services';
+import {RolesGroupsService, HealthStatusService, ApplicationSecurityService, AppRoutingService} from '../../core/services';
 import { CheckUtils } from '../../core/util';
 import { DICTIONARY } from '../../../dictionary/global.dictionary';
 import {ProgressBarService} from '../../core/services/progress-bar.service';
 import {ConfirmationDialogComponent, ConfirmationDialogType} from '../../shared/modal-dialog/confirmation-dialog';
-import {logger} from 'codelyzer/util/logger';
-
 
 @Component({
   selector: 'dlab-roles',
@@ -53,7 +50,7 @@
   stepperView: boolean = false;
   displayedColumns: string[] = ['name', 'roles', 'users', 'actions'];
   @Output() manageRolesGroupAction: EventEmitter<{}> = new EventEmitter();
-  private startedGroups;
+  private startedGroups: Array<any>;
 
   constructor(
     public toastr: ToastrService,
@@ -61,10 +58,11 @@
     private rolesService: RolesGroupsService,
     private healthStatusService: HealthStatusService,
     private progressBarService: ProgressBarService,
+    private applicationSecurityService: ApplicationSecurityService,
+    private appRoutingService: AppRoutingService,
   ) { }
 
   ngOnInit() {
-    this.openManageRolesDialog();
     this.getEnvironmentHealthStatus();
   }
 
@@ -74,13 +72,12 @@
       this.rolesService.getRolesData().subscribe(
         (roles: any) => {
           this.roles = roles;
-          this.rolesList = roles.map((role, index) => {
+          this.rolesList = roles.map((role) => {
               return {role: role.description, type: role.type, cloud: role.cloud};
           });
           this.rolesList = this.rolesList.sort((a, b) => (a.cloud > b.cloud) ? 1 : ((b.cloud > a.cloud) ? -1 : 0));
           this.rolesList = this.rolesList.sort((a, b) => (a.type > b.type) ? 1 : ((b.type > a.type) ? -1 : 0));
           this.updateGroupData(groups);
-
           this.stepperView = false;
         },
         error => this.toastr.error(error.message, 'Oops!'));
@@ -131,10 +128,7 @@
     } else if (action === 'update') {
       const currGroupSource = this.startedGroups.filter(cur => cur.group === item.group)[0];
       let deletedUsers = currGroupSource.users.filter(user => {
-        if (item.users.includes(user)) {
-           return false;
-        }
-        return true;
+        return !item.users.includes(user);
       });
       this.dialog.open(ConfirmationDialogComponent, { data:
           { notebook: deletedUsers, type: ConfirmationDialogType.deleteUser }, panelClass: 'modal-sm' })
@@ -144,10 +138,14 @@
           item.selected_roles = [...currGroupSource.selected_roles];
           item.roles = [...currGroupSource.roles];
         } else {
+          const isSuperAdminGroup = this.startedGroups.filter(v => v.group === item.group)[0].roles.filter(role => role.description === 'Allow to execute administration operation').length;
+          const selectedRoles = isSuperAdminGroup ?
+            [...item.selected_roles.map(v => v.role), 'Allow to execute administration operation'] :
+            item.selected_roles.map(v => v.role);
           this.manageRolesGroups({
             action, type, value: {
               name: item.group,
-              roleIds: this.extractIds(this.roles, item.selected_roles.map(v => v.role)),
+              roleIds: this.extractIds(this.roles, selectedRoles),
               users: item.users || []
             }
           });
@@ -155,36 +153,40 @@
         deletedUsers = [];
       });
     }
-    this.getEnvironmentHealthStatus();
     this.resetDialog();
   }
 
   public manageRolesGroups($event) {
     switch ($event.action) {
       case 'create':
-
-        this.rolesService.setupNewGroup($event.value).subscribe(res => {
+        this.rolesService.setupNewGroup($event.value).subscribe(() => {
           this.toastr.success('Group creation success!', 'Created!');
           this.getGroupsData();
         }, () => this.toastr.error('Group creation failed!', 'Oops!'));
         break;
 
       case 'update':
-        this.rolesService.updateGroup($event.value).subscribe(res => {
+        this.rolesService.updateGroup($event.value).subscribe(() => {
           this.toastr.success(`Group data is updated successfully!`, 'Success!');
-          this.openManageRolesDialog();
-        }, () => this.toastr.error('Failed group data updating!', 'Oops!'));
+          if (!$event.value.roleIds.includes('admin' || 'projectAdmin')) {
+            this.applicationSecurityService.isLoggedIn().subscribe(() => {
+              this.getEnvironmentHealthStatus();
+            });
+          } else {
+            this.openManageRolesDialog();
+          }
+        }, (re) => this.toastr.error('Failed group data updating!', 'Oops!'));
 
         break;
 
       case 'delete':
         if ($event.type === 'users') {
-          this.rolesService.removeUsersForGroup($event.value).subscribe(res => {
+          this.rolesService.removeUsersForGroup($event.value).subscribe(() => {
             this.toastr.success('Users was successfully deleted!', 'Success!');
             this.getGroupsData();
           }, () => this.toastr.error('Failed users deleting!', 'Oops!'));
         } else if ($event.type === 'group') {
-          this.rolesService.removeGroupById($event.value).subscribe(res => {
+          this.rolesService.removeGroupById($event.value).subscribe(() => {
             this.toastr.success('Group was successfully deleted!', 'Success!');
             this.getGroupsData();
           }, (error) => this.toastr.error(error.message, 'Oops!'));
@@ -210,7 +212,7 @@
       return v;
     }).sort((a, b) => (a.group > b.group) ? 1 : ((b.group > a.group) ? -1 : 0));
     this.groupsData.forEach(item => {
-      item.selected_roles = item.roles.map(role => ({role: role.description, type: role.type, cloud: role.cloud}));
+        item.selected_roles = item.roles.map(role => ({role: role.description, type: role.type, cloud: role.cloud}));
     });
     this.getGroupsListCopy();
   }
@@ -219,7 +221,7 @@
     this.startedGroups = JSON.parse(JSON.stringify(this.groupsData));
   }
 
-  public groupValidarion(): ValidatorFn {
+  public groupValidation(): ValidatorFn {
     const duplicateList: any = this.groupsData.map(item => item.group.toLowerCase());
     return <ValidatorFn>((control: FormControl) => {
       if (control.value && duplicateList.includes(CheckUtils.delimitersFiltering(control.value.toLowerCase()))) {
@@ -269,7 +271,15 @@
 
   private getEnvironmentHealthStatus() {
     this.healthStatusService.getEnvironmentHealthStatus()
-      .subscribe((result: any) => this.healthStatus = result);
+      .subscribe((result: any) => {
+        this.healthStatus = result;
+          if (!this.healthStatus.admin && !this.healthStatus.projectAdmin) {
+            this.appRoutingService.redirectToHomePage();
+          } else {
+            this.openManageRolesDialog();
+          }
+      }
+      );
   }
 
   public onUpdate($event): void {
diff --git a/services/self-service/src/main/resources/webapp/src/app/app.routing.module.ts b/services/self-service/src/main/resources/webapp/src/app/app.routing.module.ts
index 3c8ae3f..f2649fb 100644
--- a/services/self-service/src/main/resources/webapp/src/app/app.routing.module.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/app.routing.module.ts
@@ -21,7 +21,7 @@
 import { Routes, RouterModule } from '@angular/router';
 
 import { LoginComponent } from './login/login.module';
-import { LayoutComponent } from './layout/layout.component'
+import { LayoutComponent } from './layout/layout.component';
 import { ResourcesComponent } from './resources/resources.component';
 import { AccessNotebookGuideComponent, PublicKeyGuideComponent } from './help';
 import { NotFoundComponent } from './service-pages/not-found/not-found.component';
@@ -68,10 +68,10 @@
       component: ManagementComponent,
       canActivate: [AuthorizationGuard, AdminGuard]
     }, {
-      path: 'swagger',
-      component: SwaggerComponent,
-      canActivate: [AuthorizationGuard]
-    }, {
+    //   path: 'swagger',
+    //   component: SwaggerComponent,
+    //   canActivate: [AuthorizationGuard]
+    // }, {
       path: 'help/publickeyguide',
       component: PublicKeyGuideComponent,
       canActivate: [AuthorizationGuard]
diff --git a/services/self-service/src/main/resources/webapp/src/app/core/services/applicationServiceFacade.service.ts b/services/self-service/src/main/resources/webapp/src/app/core/services/applicationServiceFacade.service.ts
index 121188e..75d2087 100644
--- a/services/self-service/src/main/resources/webapp/src/app/core/services/applicationServiceFacade.service.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/core/services/applicationServiceFacade.service.ts
@@ -418,16 +418,6 @@
       null);
   }
 
-  public buildManageEnvironment(action, data): Observable<any> {
-    return this.buildRequest(HTTPMethod.POST,
-      this.requestRegistry.Item(ApplicationServiceFacade.ENV) + action,
-      data,
-      {
-        observe: 'response',
-        headers: { 'Content-Type': 'text/plain' }
-      });
-  }
-
   public buildGetAllEnvironmentData(): Observable<any> {
     return this.buildRequest(HTTPMethod.GET,
       this.requestRegistry.Item(ApplicationServiceFacade.FULL_ACTIVE_LIST),
diff --git a/services/self-service/src/main/resources/webapp/src/app/core/services/healthStatus.service.ts b/services/self-service/src/main/resources/webapp/src/app/core/services/healthStatus.service.ts
index d593d08..12086bc 100644
--- a/services/self-service/src/main/resources/webapp/src/app/core/services/healthStatus.service.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/core/services/healthStatus.service.ts
@@ -119,7 +119,7 @@
               this.appRoutingService.redirectToHomePage();
               return false;
             }
-            if (parameter === 'administration' && !data.admin) {
+            if (parameter === 'administration' && !data.admin && !data.projectAdmin) {
               this.appRoutingService.redirectToNoAccessPage();
               return false;
             }
@@ -136,15 +136,6 @@
         catchError(ErrorUtils.handleServiceError));
   }
 
-  public manageEnvironment(act, data): Observable<Response | {}> {
-    const action = `/${act}`;
-    return this.applicationServiceFacade
-      .buildManageEnvironment(action, data)
-      .pipe(
-        map(response => response),
-        catchError(ErrorUtils.handleServiceError));
-  }
-
   public getSsnMonitorData(): Observable<{}> {
     return this.applicationServiceFacade
       .buildGetSsnMonitorData()
diff --git a/services/self-service/src/main/resources/webapp/src/app/core/services/project.service.ts b/services/self-service/src/main/resources/webapp/src/app/core/services/project.service.ts
index 0bb54b5..ccf93f8 100644
--- a/services/self-service/src/main/resources/webapp/src/app/core/services/project.service.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/core/services/project.service.ts
@@ -52,15 +52,6 @@
         catchError(ErrorUtils.handleServiceError));
   }
 
-  public getProjectsManagingList(): Observable<{}> {
-    const params = '/managing';
-    return this.applicationServiceFacade
-      .buildGetUserProjectsList(params)
-      .pipe(
-        map(response => response),
-        catchError(ErrorUtils.handleServiceError));
-  }
-
   public getUserProjectsList(isActive?): Observable<{}> {
     const params = isActive ? '/me?active=true' : '';
     return this.applicationServiceFacade
@@ -79,15 +70,6 @@
         catchError(ErrorUtils.handleServiceError));
   }
 
-  public stopProjectAction(data): Observable<{}> {
-    const url = `/managing/stop/${data}`;
-    return this.applicationServiceFacade
-      .buildToggleProjectStatus(url, data)
-      .pipe(
-        map(response => response),
-        catchError(ErrorUtils.handleServiceError));
-  }
-
   public updateProjectsBudget(data): Observable<{}> {
     const url = '/budget';
     return this.applicationServiceFacade
diff --git a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.html b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.html
index 932bd3e..537cbb5 100644
--- a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.html
@@ -21,8 +21,8 @@
   <table mat-table [dataSource]="reportData" class="data-grid reporting mat-elevation-z6">
 
     <ng-container matColumnDef="name">
-      <th mat-header-cell *matHeaderCellDef class="env_name">
-        <span class="label"> Environment name </span>
+      <th mat-header-cell *matHeaderCellDef class="env_name label-header">
+        <div class="label"><span class="text"> Environment name</span></div>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
             <span *ngIf="filteredReportData.dlab_id.length > 0; else dlab_id_filtered">filter_list</span>
@@ -30,16 +30,22 @@
           </i>
         </button>
       </th>
-      <td mat-cell *matCellDef="let element"> {{element[DICTIONARY[PROVIDER].billing.dlabId]}} </td>
+      <td mat-cell *matCellDef="let element"><span class="table-item">{{element.dlabId}}</span></td>
       <td mat-footer-cell *matFooterCellDef class="table-footer"></td>
     </ng-container>
 
     <ng-container matColumnDef="user">
-      <th mat-header-cell *matHeaderCellDef class="th_user">
-        <span class="label"> User </span>
+      <th mat-header-cell *matHeaderCellDef class="th_user label-header">
+        <div class="sort">
+          <div class="sort-arrow up" (click)="sortBy('user', 'down')" [ngClass]="{'active': !!this.active['userdown']}"></div>
+          <div class="sort-arrow down" (click)="sortBy('user', 'up')" [ngClass]="{'active': !!this.active['userup']}"></div>
+        </div>
+        <div class="label">
+          <span class="text"> User </span>
+        </div>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
-            <span *ngIf="filteredReportData.user.length > 0; else user_filtered">filter_list</span>
+            <span *ngIf="filteredReportData.users.length > 0; else user_filtered">filter_list</span>
             <ng-template #user_filtered>more_vert</ng-template>
           </i>
         </button>
@@ -49,11 +55,15 @@
     </ng-container>
 
     <ng-container matColumnDef="project">
-      <th mat-header-cell *matHeaderCellDef class="th_project">
-        <span class="label">Project</span>
+      <th mat-header-cell *matHeaderCellDef class="th_project label-header">
+        <div class="sort">
+          <div class="sort-arrow up" (click)="sortBy('project', 'down')" [ngClass]="{'active': !!this.active['projectdown']}"></div>
+          <div class="sort-arrow down" (click)="sortBy('project', 'up')" [ngClass]="{'active': !!this.active['projectup']}"></div>
+        </div>
+        <div class="label"><span class="text">Project</span></div>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
-            <span *ngIf="filteredReportData.project.length > 0; else project_filtered">filter_list</span>
+            <span *ngIf="filteredReportData.projects.length > 0; else project_filtered">filter_list</span>
             <ng-template #project_filtered>more_vert</ng-template>
           </i>
         </button>
@@ -63,8 +73,8 @@
     </ng-container>
 
     <ng-container matColumnDef="type">
-      <th mat-header-cell *matHeaderCellDef class="th_type">
-        <span class="label"> Resource Type </span>
+      <th mat-header-cell *matHeaderCellDef class="th_type label-header">
+        <div class="label"><span class="text"> Resource Type</span> </div>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
             <span *ngIf="filteredReportData.resource_type.length > 0; else type_filtered">filter_list</span>
@@ -72,16 +82,16 @@
           </i>
         </button>
       </th>
-      <td mat-cell *matCellDef="let element"> {{element[DICTIONARY[PROVIDER].billing.resourceType]}} </td>
+      <td mat-cell *matCellDef="let element"> {{element.resource_type | titlecase}} </td>
       <td mat-footer-cell *matFooterCellDef class="table-footer"></td>
     </ng-container>
 
     <ng-container matColumnDef="status">
-      <th mat-header-cell *matHeaderCellDef class="th_status">
-        <span class="label"> Status </span>
+      <th mat-header-cell *matHeaderCellDef class="th_status label-header">
+        <div class="label"><span class="text"> Status</span> </div>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
-            <span *ngIf="filteredReportData.status.length > 0; else status_filtered">filter_list</span>
+            <span *ngIf="filteredReportData.statuses.length > 0; else status_filtered">filter_list</span>
             <ng-template #status_filtered>more_vert</ng-template>
           </i>
         </button>
@@ -95,103 +105,112 @@
     </ng-container>
 
     <ng-container matColumnDef="shape">
-      <th mat-header-cell *matHeaderCellDef class="th_shape">
-        <span class="label"> Instance size</span>
+      <th mat-header-cell *matHeaderCellDef class="th_shape label-header">
+        <div class="label"><span class="text"> Instance size</span></div>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
             <span
-              *ngIf="filteredReportData['shape'].length > 0; else shape_filtered">filter_list</span>
+              *ngIf="filteredReportData['shapes'].length > 0; else shape_filtered">filter_list</span>
             <ng-template #shape_filtered>more_vert</ng-template>
           </i>
         </button>
       </th>
       <td mat-cell *matCellDef="let element">
-        <span>{{element[DICTIONARY[PROVIDER].billing.instance_size]}}</span>
+        <ng-container *ngIf="element.shape">
+          <div *ngFor="let shape of shapeSplit(element.shape)">{{shape}}</div>
+        </ng-container>
       </td>
       <td mat-footer-cell *matFooterCellDef class="table-footer"></td>
     </ng-container>
 
     <ng-container matColumnDef="service">
-      <th mat-header-cell *matHeaderCellDef class="service">
-        <span class="label"> Service </span>
+      <th mat-header-cell *matHeaderCellDef class="service label-header">
+        <div class="label"><span class="text"> Product</span> </div>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
             <span
-              *ngIf="filteredReportData['shape'].length > 0; else service_filtered">filter_list</span>
+              *ngIf="filteredReportData['shapes'].length > 0; else service_filtered">filter_list</span>
             <ng-template #service_filtered>more_vert</ng-template>
           </i>
         </button>
       </th>
       <td mat-cell *matCellDef="let element">
-        <span *ngIf="element[DICTIONARY[PROVIDER].billing.service]">{{ element[DICTIONARY[PROVIDER].billing.service] }}</span>
+        {{ element.product }}
+<!--        <span *ngIf="element.product">{{ element.product }}</span>-->
       </td>
       <td mat-footer-cell *matFooterCellDef class="table-footer"></td>
     </ng-container>
 
     <ng-container matColumnDef="charge" stickyEnd>
-      <th mat-header-cell *matHeaderCellDef class="th_charges">
-        <span class="label"> Service Charges </span>
+      <th mat-header-cell *matHeaderCellDef class="th_charges label-header">
+        <div class="label">
+          <div class="sort">
+            <div class="sort-arrow up" (click)="sortBy('cost', 'down')" [ngClass]="{'active': !!this.active['costdown']}"></div>
+            <div class="sort-arrow down" (click)="sortBy('cost', 'up')" [ngClass]="{'active': !!this.active['costup']}"></div>
+          </div>
+          <span class="text">Service Charges</span>
+        </div>
       </th>
 
       <td mat-cell *matCellDef="let element">
-        {{ element[DICTIONARY[PROVIDER].billing.cost] }} {{ element[DICTIONARY[PROVIDER].billing.currencyCode] }}
+        {{ element.cost }} {{ element['currency'] }}
       </td>
-      <td mat-footer-cell *matFooterCellDef class="table-footer">
-        Total <span *ngIf="reportData?.length"> {{ fullReport['cost_total'] }}
-          {{ fullReport[DICTIONARY[PROVIDER].billing.currencyCode] }}</span>
+      <td mat-footer-cell *matFooterCellDef class="table-footer total-cost">
+        Total <span *ngIf="reportData?.length"> {{ fullReport['total_cost'] }}
+          {{ fullReport['currency'] }}</span>
       </td>
     </ng-container>
 
     <!-- ----------------FILTER -->
     <ng-container matColumnDef="name-filter">
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <input #nameFilter type="text" placeholder="Filter by environment name" class="form-control filter-field"
-          [value]="filtered?.dlab_id" (input)="filteredReportData.dlab_id = $event.target.value" />
+          [value]="filtered?.dlab_id" (input)="filteredReportData.dlab_id = $event.target['value']" />
       </th>
     </ng-container>
     <ng-container matColumnDef="user-filter">
-      <th mat-header-cell *matHeaderCellDef>
-        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="'user'"
-          [items]="filterConfiguration.user" [model]="filteredReportData.user"></multi-select-dropdown>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
+        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="'users'"
+          [items]="filterConfiguration.users" [model]="filteredReportData.users"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="project-filter">
-      <th mat-header-cell *matHeaderCellDef>
-        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="'project'"
-          [items]="filterConfiguration.project" [model]="filteredReportData.project"></multi-select-dropdown>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
+        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="'projects'"
+          [items]="filterConfiguration.projects" [model]="filteredReportData.projects"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="type-filter">
-      <th mat-header-cell *matHeaderCellDef>
-        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="'resource_type'"
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
+        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="['resource_type']"
           [items]="filterConfiguration.resource_type" [model]="filteredReportData.resource_type">
         </multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="status-filter">
-      <th mat-header-cell *matHeaderCellDef>
-        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="'status'"
-          [items]="filterConfiguration.status" [model]="filteredReportData.status"></multi-select-dropdown>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
+        <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)" [type]="'statuses'"
+          [items]="filterConfiguration.statuses" [model]="filteredReportData.statuses"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="shape-filter">
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)"
-          [type]="'shape'"[items]="filterConfiguration['shape']"
-          [model]="filteredReportData['shape']"></multi-select-dropdown>
+          [type]="'shapes'" [items]="filterConfiguration['shapes']"
+          [model]="filteredReportData['shapes']"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="service-filter">
-      <th mat-header-cell *matHeaderCellDef>
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
         <multi-select-dropdown *ngIf="filterConfiguration" (selectionChange)="onUpdate($event)"
-          [type]="['service']"
-          [items]="filterConfiguration['service']"
-          [model]="filteredReportData['service']"></multi-select-dropdown>
+          [type]="['products']"
+          [items]="filterConfiguration['products']"
+          [model]="filteredReportData['products']"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="actions" stickyEnd>
-      <th mat-header-cell *matHeaderCellDef>
-        <div class="actions">
+      <th mat-header-cell *matHeaderCellDef class="filter-row-item">
+        <div class="actions th_charges">
           <button mat-icon-button class="btn reset" (click)="resetFiltering(); isFiltered = !isFiltered">
             <i class="material-icons">close</i>
           </button>
@@ -210,7 +229,7 @@
 
     <tr mat-header-row *matHeaderRowDef="displayedColumns; sticky: true" class="header-row"></tr>
 
-    <tr [hidden]="!collapseFilterRow || !PROVIDER" mat-header-row *matHeaderRowDef="displayedFilterColumns; sticky: true"
+    <tr [hidden]="!collapseFilterRow" mat-header-row *matHeaderRowDef="displayedFilterColumns; sticky: true"
       class="filter-row"></tr>
     <tr mat-row *matRowDef="let row; columns: displayedColumns;" class="content-row"></tr>
 
diff --git a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.scss b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.scss
index f8f872f..9c4f819 100644
--- a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.scss
@@ -67,6 +67,9 @@
     &.header-row {
       th {
         font-size: 11px;
+        .label{
+          padding-left: 0;
+        }
       }
     }
   }
@@ -101,9 +104,13 @@
     min-width: 200px;
   }
 
-  .th_type {
+  .th_project{
     width: 12%;
-    min-width: 200px;
+  }
+
+  .th_type {
+    width: 10%;
+    min-width: 150px;
   }
 
   .th_status {
@@ -112,9 +119,8 @@
   }
 
   .th_charges {
-    width: 8%;
-    min-width: 130px;
-    padding-right: 15px;
+    width: 10%;
+    min-width: 155px;
     text-align: right;
 
     .label {
@@ -148,20 +154,55 @@
 
   .header-row {
     position: unset;
+
     .th_charges {
       padding-top: 0;
+
       .label {
         padding-top: 12px;
       }
     }
+
     .label {
       display: inline-block;
       padding-top: 13px;
       vertical-align: super !important;
-      padding-left: 15px;
+
+      .text{
+     padding-left: 15px;
+      }
+    }
+
+    .sort{
+      position: absolute;
+      bottom: 20px;
+
+      &-arrow{
+        width: 6px;
+        height: 6px;
+        border: 3px solid transparent;
+        border-bottom: 3px solid rgba(0,0,0,.54);
+        border-left: 3px solid rgba(0,0,0,.54);
+        cursor: pointer;
+
+        &.active{
+          border-bottom: 3px solid #35afd5;
+          border-left: 3px solid #35afd5;
+        }
+      }
+
+      .down{
+        transform: rotate(-45deg);
+      }
+
+      .up{
+        transform: rotate(135deg);
+      }
     }
   }
 
+
+
   .filter-row {
     .actions {
       text-align: right;
@@ -193,15 +234,26 @@
   }
 }
 
+.table-footer{
+  position: sticky;
+  bottom: 0;
+  background: inherit;
+  border-top: 1px solid #E0E0E0;
+  transform: translateY(-1px);
+  border-bottom: none;
 
+  &.total-cost{
+    min-width: 140px;
+    padding-left: 0 !important;
+  }
+}
 
 @media screen and (max-width: 1280px) {
   .dashboard_table.reporting {
-
     .env_name,
     .service,
     .th_type,
-    .th_rstatus {
+    .th_status {
       width: 10%;
     }
 
diff --git a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.ts b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.ts
index 1856688..d4e0076 100644
--- a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting-grid/reporting-grid.component.ts
@@ -18,9 +18,7 @@
  */
 
 import {Component, OnInit, Output, EventEmitter, ViewChild, Input} from '@angular/core';
-
-import { DICTIONARY, ReportingConfigModel } from '../../../dictionary/global.dictionary';
-import {logger} from 'codelyzer/util/logger';
+import { ReportingConfigModel } from '../../../dictionary/global.dictionary';
 
 @Component({
   selector: 'dlab-reporting-grid',
@@ -30,7 +28,6 @@
 
 })
 export class ReportingGridComponent implements OnInit {
-  readonly DICTIONARY = DICTIONARY;
 
   filterConfiguration: ReportingConfigModel;
   filteredReportData: ReportingConfigModel = new ReportingConfigModel([], [], [], [], [], '', '', '', []);
@@ -38,14 +35,15 @@
   reportData: Array<any> = [];
   fullReport: Array<any>;
   isFiltered: boolean = false;
+  active: object = {};
 
   @ViewChild('nameFilter', { static: false }) filter;
 
   @Output() filterReport: EventEmitter<{}> = new EventEmitter();
   @Output() resetRangePicker: EventEmitter<boolean> = new EventEmitter();
-  @Input() PROVIDER: string;
   displayedColumns: string[] = ['name', 'user', 'project', 'type', 'status', 'shape', 'service', 'charge'];
   displayedFilterColumns: string[] = ['name-filter', 'user-filter', 'project-filter', 'type-filter', 'status-filter', 'shape-filter', 'service-filter', 'actions'];
+  filtered: any;
 
   ngOnInit() {}
 
@@ -65,6 +63,33 @@
     }
   }
 
+  sortBy(sortItem, direction) {
+  let report: Array<object>;
+  if (direction === 'down') {
+    report = this.reportData.sort((a, b) => {
+      if (a[sortItem] === null) a = '';
+      if (b[sortItem] === null) b = '';
+     return (a[sortItem] > b[sortItem]) ? 1 : -1;
+    });
+  }
+  if (direction === 'up') {
+    report = this.reportData.sort((a, b) => {
+      if (a[sortItem] === null) a = '';
+      if (b[sortItem] === null) b = '';
+      return (a[sortItem] < b[sortItem]) ? 1 : -1 ;
+    });
+  }
+  this.refreshData(this.fullReport, report);
+  this.removeSorting();
+  this.active[sortItem + direction] = true;
+  }
+
+  removeSorting() {
+    for (const item in this.active) {
+      this.active[item] = false;
+    }
+  }
+
   toggleFilterRow(): void {
     this.collapseFilterRow = !this.collapseFilterRow;
   }
@@ -76,13 +101,18 @@
   filter_btnClick(): void {
     this.filterReport.emit(this.filteredReportData);
     this.isFiltered = true;
+    this.removeSorting();
   }
 
   resetFiltering(): void {
     this.filteredReportData.defaultConfigurations();
-
-    this.filter.nativeElement.value = ''
+    this.removeSorting();
+    this.filter.nativeElement.value = '';
     this.filterReport.emit(this.filteredReportData);
     this.resetRangePicker.emit(true);
   }
+
+  shapeSplit(shape) {
+    return shape.split(/(?=Slave)/g);
+  }
 }
diff --git a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting.component.ts b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting.component.ts
index 3433383..1692ef1 100644
--- a/services/self-service/src/main/resources/webapp/src/app/reporting/reporting.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/reporting/reporting.component.ts
@@ -20,8 +20,7 @@
 
 import { Component, OnInit, OnDestroy, ViewChild } from '@angular/core';
 import { ToastrService } from 'ngx-toastr';
-
-import {BillingReportService, EndpointService, HealthStatusService} from '../core/services';
+import {ApplicationSecurityService, BillingReportService, HealthStatusService} from '../core/services';
 import { ReportingGridComponent } from './reporting-grid/reporting-grid.component';
 import { ToolbarComponent } from './toolbar/toolbar.component';
 
@@ -38,15 +37,15 @@
                   (setRangeOption)="setRangeOption($event)">
     </dlab-toolbar>
     <mat-divider></mat-divider>
-    <dlab-reporting-grid [PROVIDER]="PROVIDER" (filterReport)="filterReport($event)" (resetRangePicker)="resetRangePicker()"></dlab-reporting-grid>
+    <dlab-reporting-grid (filterReport)="filterReport($event)" (resetRangePicker)="resetRangePicker()"></dlab-reporting-grid>
   </div>
 
   `,
   styles: [`
     footer {
       position: fixed;
-      left: 0px;
-      bottom: 0px;
+      left: 0;
+      bottom: 0;
       width: 100%;
       background: #a1b7d1;
       color: #ffffff;
@@ -68,56 +67,31 @@
   data: any;
   billingEnabled: boolean;
   admin: boolean;
-  public PROVIDER: string;
 
   constructor(
     private billingReportService: BillingReportService,
     private healthStatusService: HealthStatusService,
     public toastr: ToastrService,
     private progressBarService: ProgressBarService,
-    private endpointService: EndpointService,
+    private applicationSecurityService: ApplicationSecurityService,
   ) { }
 
   ngOnInit() {
     this.getEnvironmentHealthStatus();
+    this.buildBillingReport();
   }
 
   ngOnDestroy() {
     this.clearStorage();
   }
 
-  getBillingProvider() {
-    if (this.admin) {
-      this.endpointService.getEndpointsData().subscribe(list => {
-        const endpoints = JSON.parse(JSON.stringify(list));
-        const localEndpoint = endpoints.filter(endpoint => endpoint.name === 'local');
-        if (localEndpoint.length) {
-          this.PROVIDER = localEndpoint[0].cloudProvider.toLowerCase();
-          if (this.PROVIDER) {
-            this.rebuildBillingReport();
-          }
-        }
-      }, e => {
-        this.PROVIDER = 'azure';
-        if (this.PROVIDER) {
-          this.rebuildBillingReport();
-        }
-      }) ;
-    } else {
-      this.PROVIDER = 'azure';
-      if (this.PROVIDER) {
-        this.rebuildBillingReport();
-      }
-    }
-  }
-
   getGeneralBillingData() {
     setTimeout(() => {this.progressBarService.startProgressBar(); } , 0);
     this.billingReportService.getGeneralBillingData(this.reportData)
       .subscribe(data => {
         this.data = data;
-        this.reportingGrid.refreshData(this.data, this.data.lines);
-        this.reportingGrid.setFullReport(this.data.full_report);
+        this.reportingGrid.refreshData(this.data, this.data.report_lines);
+        this.reportingGrid.setFullReport(this.data.is_full);
 
         this.reportingToolbar.reportData = this.data;
         if (!localStorage.getItem('report_period')) {
@@ -138,26 +112,37 @@
       }, () => this.progressBarService.stopProgressBar());
   }
 
-  rebuildBillingReport($event?): void {
-    if (this.PROVIDER) {
-      this.clearStorage();
-      this.resetRangePicker();
-      this.reportData.defaultConfigurations();
-      this.getGeneralBillingData();
-    }
+  rebuildBillingReport(): void {
+    this.checkAutorize();
+    this.buildBillingReport();
+
+  }
+
+  buildBillingReport() {
+    this.clearStorage();
+    this.resetRangePicker();
+    this.reportData.defaultConfigurations();
+    this.getGeneralBillingData();
+  }
+
+  private checkAutorize() {
+    this.applicationSecurityService.isLoggedIn().subscribe( () => {
+        this.getEnvironmentHealthStatus();
+      }
+    );
   }
 
   exportBillingReport(): void {
     this.billingReportService.downloadReport(this.reportData)
       .subscribe(
         data => FileUtils.downloadFile(data),
-        error => this.toastr.error('Billing report export failed!', 'Oops!'));
+        () => this.toastr.error('Billing report export failed!', 'Oops!'));
   }
 
   getDefaultFilterConfiguration(data): void {
     const users = [], types = [], shapes = [], services = [], statuses = [], projects = [];
 
-    data.lines.forEach((item: any) => {
+    data.report_lines.forEach((item: any) => {
       if (item.user && users.indexOf(item.user) === -1)
         users.push(item.user);
 
@@ -167,30 +152,29 @@
       if (item.project && projects.indexOf(item.project) === -1)
         projects.push(item.project);
 
-      if (item[DICTIONARY[this.PROVIDER].billing.resourceType] && types.indexOf(item[DICTIONARY[this.PROVIDER].billing.resourceType]) === -1)
-        types.push(item[DICTIONARY[this.PROVIDER].billing.resourceType]);
+      if (item['resource_type'] && types.indexOf(item['resource_type']) === -1)
+        types.push(item['resource_type']);
 
-      if (item[DICTIONARY[this.PROVIDER].billing.instance_size]) {
-        if (item[DICTIONARY[this.PROVIDER].billing.instance_size].indexOf('Master') > -1) {
-          for (let shape of item[DICTIONARY[this.PROVIDER].billing.instance_size].split('\n')) {
+      if (item.shape && types.indexOf(item.shape)) {
+       if (item.shape.indexOf('Master') > -1) {
+          for (let shape of item.shape.split(/(?=Slave)/g)) {
             shape = shape.replace('Master: ', '');
-            shape = shape.replace(/Slave:\s+\d+ x /, '');
+            shape = shape.replace(/Slave: /, '');
             shape = shape.replace(/\s+/g, '');
-
             shapes.indexOf(shape) === -1 && shapes.push(shape);
           }
-        } else if (item[DICTIONARY[this.PROVIDER].billing.instance_size].match(/\d x \S+/)) {
-          const parsedShape = item[DICTIONARY[this.PROVIDER].billing.instance_size].match(/\d x \S+/)[0].split(' x ')[1];
+        } else if (item.shape.match(/\d x \S+/)) {
+          const parsedShape = item.shape.match(/\d x \S+/)[0].split(' x ')[1];
           if (shapes.indexOf(parsedShape) === -1) {
             shapes.push(parsedShape);
           }
         } else {
-          shapes.indexOf(item[DICTIONARY[this.PROVIDER].billing.instance_size]) === -1 && shapes.push(item[DICTIONARY[this.PROVIDER].billing.instance_size]);
+          shapes.indexOf(item.shape) === -1 && shapes.push(item.shape);
         }
       }
 
-      if (item[DICTIONARY[this.PROVIDER].billing.service] && services.indexOf(item[DICTIONARY[this.PROVIDER].billing.service]) === -1)
-        services.push(item[DICTIONARY[this.PROVIDER].billing.service]);
+      if (item.product && services.indexOf(item.product) === -1)
+        services.push(item.product);
     });
 
     if (!this.reportingGrid.filterConfiguration || !localStorage.getItem('report_config')) {
@@ -225,7 +209,6 @@
       .subscribe((result: any) => {
         this.billingEnabled = result.billingEnabled;
         this.admin = result.admin;
-        this.getBillingProvider();
       });
   }
 }
diff --git a/services/self-service/src/main/resources/webapp/src/app/reporting/toolbar/toolbar.component.html b/services/self-service/src/main/resources/webapp/src/app/reporting/toolbar/toolbar.component.html
index 30de007..583371e 100644
--- a/services/self-service/src/main/resources/webapp/src/app/reporting/toolbar/toolbar.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/reporting/toolbar/toolbar.component.html
@@ -19,7 +19,7 @@
 <section class="toolbar">
   <div class="info_color" *ngIf="reportData">
     <div class="general">
-      <div><span>Service base name: </span><strong>{{ reportData.service_base_name }}</strong></div>
+      <div><span>Service base name: </span><strong>{{ reportData.sbn }}</strong></div>
       <div *ngIf="reportData.tag_resource_id"><span>Resource tag ID:
         </span><strong>{{ reportData.tag_resource_id }}</strong></div>
       <div class="report-period info_color" *ngIf="availablePeriodFrom && availablePeriodTo">
@@ -34,7 +34,7 @@
     <ng-daterangepicker [(ngModel)]="value" [options]="options" (ngModelChange)="onChange($event)"></ng-daterangepicker>
   </div>
   <div class="action-butt">
-    <button mat-raised-button class="butt" (click)="export($event)" [disabled]="!reportData?.lines.length">
+    <button mat-raised-button class="butt" (click)="export($event)" [disabled]="!reportData?.report_lines.length">
       <i class="material-icons">file_download</i>Export
     </button>
     <button mat-raised-button class="butt" (click)="rebuild($event)">
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/computational/cluster-details/cluster-details.component.ts b/services/self-service/src/main/resources/webapp/src/app/resources/computational/cluster-details/cluster-details.component.ts
index 11002e1..16fb1ae 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/computational/cluster-details/cluster-details.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/computational/cluster-details/cluster-details.component.ts
@@ -57,8 +57,6 @@
 
   ngOnInit() {
     this.open(this.data.environment, this.data.resource);
-
-    console.log(this.PROVIDER);
   }
 
   public open(environment, resource): void {
@@ -98,7 +96,9 @@
 
   public editClusterConfiguration(data): void {
     this.dataengineConfigurationService
-      .editClusterConfiguration(data.configuration_parameters, this.environment.project, this.environment.name, this.resource.computational_name, this.PROVIDER)
+      .editClusterConfiguration(
+        data.configuration_parameters, this.environment.project, this.environment.name, this.resource.computational_name, this.PROVIDER
+      )
       .subscribe(result => {
         this.dialogRef.close();
       },
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.html
index 95ae591..1331f5f 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.html
@@ -35,24 +35,24 @@
         <mat-list>
             <mat-list-item class="list-header">
               <div class="resource-name ellipsis" [ngClass]="{ 'wide-name-field' : provider === 'azure' }">Name</div>
-              <div class="service">{{ DICTIONARY[provider].service }}</div>
-              <div class="resource-type" *ngIf="provider === 'aws'">Type</div>
-              <div class="cost-currency">Cost</div>
+              <div class="service">Product</div>
+<!--              <div class="resource-type" *ngIf="provider === 'aws'">Type</div>-->
               <div class="usage-date-start">Start</div>
               <div class="usage-date-end">End</div>
+              <div class="cost-currency">Cost</div>
             </mat-list-item>
             <div class="scrolling-content" id="scrolling">
-              <mat-list-item *ngFor="let item of notebook.billing">
+              <mat-list-item *ngFor="let item of notebook.billing.report_lines">
                 <div class="resource-name" [ngClass]="{ 'wide-name-field' : provider === 'azure' }"
-                     matTooltip="{{ item[DICTIONARY[provider].billing.resourceName] }}"
+                     matTooltip="{{ item.resource_name }}"
                      matTooltipPosition="above">
-                     {{ item[DICTIONARY[provider].billing.resourceName] }}
+                     {{ item.resource_name }}
                 </div>
-                <div class="service">{{ item[DICTIONARY[provider].billing.service] }}</div>
-                <div class="resource-type" *ngIf="provider === 'aws'">{{ item[DICTIONARY[provider].billing.type] }}</div>
-                <div class="cost-currency">{{ item[DICTIONARY[provider].billing.cost] }} {{ item[DICTIONARY[provider].billing.currencyCode] }}</div>
-                <div class="usage-date-start">{{ item[DICTIONARY[provider].billing.dateFrom] | date }}</div>
-                <div class="usage-date-end">{{ item[DICTIONARY[provider].billing.dateTo] | date }}</div>
+                <div class="service">{{ item.product }}</div>
+<!--                <div class="resource-type" >{{ item.resourse_type }}</div>-->
+                <div class="usage-date-start">{{ item.from | date }}</div>
+                <div class="usage-date-end">{{ item.to | date }}</div>
+                <div class="cost-currency">{{ item.cost }} {{ item.currency }}</div>
               </mat-list-item>
             </div>
         </mat-list>
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.scss b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.scss
index 64ef43c..18998ea 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/cost-details-dialog/cost-details-dialog.component.scss
@@ -43,7 +43,7 @@
   .resource-name,
   .usage-date-start,
   .usage-date-end {
-    width: 15%;
+    width: 20%;
     overflow: hidden;
     text-overflow: ellipsis;
     padding-right: 10px;
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html
index 4977b22..d41f69a 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html
@@ -47,11 +47,13 @@
             <p>Open following URL(s) in your browser to access this box:</p>
             <div class="links_block">
               <p *ngFor="let item of notebook.url">
+               <ng-container *ngIf="!(item.description === 'Ungit' && notebook.template_name.indexOf('Apache Zeppelin') !== -1)">
                 <span class="description">{{item.description}}: &nbsp;</span>
                 <a class="ellipsis" matTooltip="{{item.url}}" matTooltipPosition="above" href="{{item.url}}"
                   target="_blank">
                   &nbsp;{{item.url}}
                 </a>
+               </ng-container>
               </p>
             </div>
             <p class="flex" *ngIf="notebook.username">Node User: &nbsp;<span
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html
index 965f9d8..69edcc3 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.html
@@ -39,7 +39,7 @@
     </ng-container> -->
 
     <ng-container matColumnDef="name" sticky>
-      <th mat-header-cell *matHeaderCellDef class="name-col">
+      <th mat-header-cell *matHeaderCellDef class="name-col label-header">
         <span class="label">Environment name</span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -50,7 +50,7 @@
       </th>
     </ng-container>
     <ng-container matColumnDef="statuses">
-      <th mat-header-cell *matHeaderCellDef class="status-col">
+      <th mat-header-cell *matHeaderCellDef class="status-col label-header">
         <span class="label"> Status </span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -61,7 +61,7 @@
       </th>
     </ng-container>
     <ng-container matColumnDef="shapes">
-      <th mat-header-cell *matHeaderCellDef class="shape-col">
+      <th mat-header-cell *matHeaderCellDef class="shape-col label-header">
         <span class="label"> Size </span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -72,12 +72,12 @@
       </th>
     </ng-container>
     <ng-container matColumnDef="tag">
-      <th mat-header-cell *matHeaderCellDef class="tag-col">
+      <th mat-header-cell *matHeaderCellDef class="tag-col label-header">
         <span class="label"> Tags </span>
       </th>
     </ng-container>
     <ng-container matColumnDef="resources">
-      <th mat-header-cell *matHeaderCellDef class="resources-col">
+      <th mat-header-cell *matHeaderCellDef class="resources-col label-header">
         <span class="label"> Computational resources </span>
         <button mat-icon-button aria-label="More" class="ar" (click)="toggleFilterRow()">
           <i class="material-icons">
@@ -88,12 +88,12 @@
       </th>
     </ng-container>
     <ng-container matColumnDef="cost">
-      <th mat-header-cell *matHeaderCellDef class="cost-col">
+      <th mat-header-cell *matHeaderCellDef class="cost-col label-header">
         <span class="label"> Cost </span>
       </th>
     </ng-container>
     <ng-container matColumnDef="actions" stickyEnd>
-      <th mat-header-cell *matHeaderCellDef class="actions-col">
+      <th mat-header-cell *matHeaderCellDef class="actions-col label-header">
         <span class="label"> Actions </span>
       </th>
     </ng-container>
@@ -139,7 +139,7 @@
           <td *ngIf="healthStatus?.billingEnabled" class="cost-col">
             <span class="total_cost">{{ element.cost || 'N/A' }} {{ element.currency_code || '' }}</span>
             <span (click)="element.billing && printCostDetails(element)" class="currency_details"
-              [ngClass]="{ 'not-allowed' : !element.billing }">
+              [ngClass]="{ 'not-allowed' : !element.billing.report_lines.length }">
               <i class="material-icons">help_outline</i>
             </span>
           </td>
@@ -168,10 +168,12 @@
                     </div>
                   </li>
                   <li *ngIf="element.status.toLowerCase() === 'stopped' || element.status.toLowerCase() === 'stopping'"
-                    matTooltip="{{isEdgeNodeStopped(element) ? 'Unable to run notebook if edge node is stopped.' : 'Unable to run notebook until it will be stopped.'}}" matTooltipPosition="above"
-                    [matTooltipDisabled]="!isResourcesInProgress(element) && element.status.toLowerCase() !== 'stopping' && !isEdgeNodeStopped(element)">
+                    matTooltip="{{element.edgeNodeStatus !== 'running' ? 'Unable to run notebook if edge node is not running.' : 'Unable to run notebook until it will be stopped.'}}" matTooltipPosition="above"
+                    [matTooltipDisabled]="!isResourcesInProgress(element) && element.status.toLowerCase() !== 'stopping' && element.edgeNodeStatus === 'running'"
+                    [ngClass]="{'not-allow': isResourcesInProgress(element) || element.status.toLowerCase() === 'stopping' || element.edgeNodeStatus !== 'running' }"
+                  >
                     <div (click)="exploratoryAction(element, 'run')"
-                      [ngClass]="{'not-allowed': isResourcesInProgress(element) || element.status.toLowerCase() === 'stopping' || isEdgeNodeStopped(element) }">
+                      [ngClass]="{'not-allowed': isResourcesInProgress(element) || element.status.toLowerCase() === 'stopping' || element.edgeNodeStatus !== 'running' }">
                       <i class="material-icons">play_circle_outline</i>
                       <span>Run</span>
                     </div>
@@ -222,37 +224,37 @@
 
     <!-- FILTER START -->
     <ng-container matColumnDef="name-filter" sticky>
-      <th mat-header-cell *matHeaderCellDef class="name-col">
+      <th mat-header-cell *matHeaderCellDef class="name-col filter-row-item">
         <input placeholder="Filter by environment name" type="text" class="form-control filter-field"
-          [value]="filterForm.name" (input)="filterForm.name = $event.target.value" />
+          [value]="filterForm.name" (input)="filterForm.name = $event.target['value']" />
       </th>
     </ng-container>
     <ng-container matColumnDef="status-filter">
-      <th mat-header-cell *matHeaderCellDef class="status-col">
+      <th mat-header-cell *matHeaderCellDef class="status-col filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)" [type]="'statuses'"
           [items]="filterConfiguration.statuses" [model]="filterForm.statuses"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="shape-filter">
-      <th mat-header-cell *matHeaderCellDef class="shape-col">
+      <th mat-header-cell *matHeaderCellDef class="shape-col filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)"
           [type]="'sizes'" [items]="filterConfiguration.shapes"
           [model]="filterForm.shapes"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="tag-filter">
-      <th mat-header-cell *matHeaderCellDef class="tag-col">
+      <th mat-header-cell *matHeaderCellDef class="tag-col filter-row-item">
 
       </th>
     </ng-container>
     <ng-container matColumnDef="resource-filter">
-      <th mat-header-cell *matHeaderCellDef class="resources-col">
+      <th mat-header-cell *matHeaderCellDef class="resources-col filter-row-item">
         <multi-select-dropdown (selectionChange)="onUpdate($event)" [type]="'resources'"
           [items]="filterConfiguration.resources" [model]="filterForm.resources"></multi-select-dropdown>
       </th>
     </ng-container>
     <ng-container matColumnDef="cost-filter">
-      <th mat-header-cell *matHeaderCellDef class="cost-col">
+      <th mat-header-cell *matHeaderCellDef class="cost-col filter-row-item">
 
       </th>
     </ng-container>
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.scss b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.scss
index e09ff3e..d6c0556 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.scss
@@ -395,3 +395,7 @@
 .content-row{
   background-clip: padding-box;
 }
+
+.not-allow{
+  cursor: not-allowed !important;
+}
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.ts b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.ts
index 3052d84..3a7aabd 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.component.ts
@@ -46,6 +46,7 @@
 
 
 
+
 @Component({
   selector: 'resources-grid',
   templateUrl: 'resources-grid.component.html',
@@ -151,12 +152,6 @@
     return false;
   }
 
-  public isEdgeNodeStopped(resource) {
-    const currProject = this.projects.filter(proj => proj.name === resource.project);
-    const currEdgenodeStatus =  currProject[0].endpoints.filter(node => node.name === resource.endpoint)[0].status;
-    return currEdgenodeStatus === 'STOPPED' || currEdgenodeStatus === 'STOPPING';
-  }
-
   public filterActiveInstances(): FilterConfigurationModel {
     return (<FilterConfigurationModel | any>Object).assign({}, this.filterConfiguration, {
       statuses: SortUtils.activeStatuses(),
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.model.ts b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.model.ts
index 14a2824..e769dbe 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.model.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/resources-grid/resources-grid.model.ts
@@ -50,6 +50,7 @@
     public project: string,
     public endpoint: string,
     public tags: any,
+    public edgeNodeStatus: string
   ) { }
 
   public static loadEnvironments(data: Array<any>) {
@@ -59,36 +60,39 @@
           project: value.project,
           exploratory: value.exploratory.map(el => {
             const provider = el.cloud_provider.toLowerCase();
+            const billing = value.exploratoryBilling.filter(res => res.name === el.exploratory_name)[0];
             return new ExploratoryModel(
-            provider,
-            el.exploratory_name,
-            el.template_name,
-            el.image,
-            el.status,
-            el.shape,
-            el.computational_resources,
-            el.up_time,
-            el.exploratory_url,
-            value.shared[el.endpoint].edge_node_ip,
-            el.private_ip,
-            el.exploratory_user,
-            el.exploratory_pass,
-            value.shared[el.endpoint][DICTIONARY[provider].bucket_name],
-            value.shared[el.endpoint][DICTIONARY[provider].shared_bucket_name],
-            el.error_message,
-            el[DICTIONARY[provider].billing.cost],
-            el[DICTIONARY[provider].billing.currencyCode],
-            el.billing,
-            el.libs,
-            value.shared[el.endpoint][DICTIONARY[provider].user_storage_account_name],
-            value.shared[el.endpoint][DICTIONARY[provider].shared_storage_account_name],
-            value.shared[el.endpoint][DICTIONARY[provider].datalake_name],
-            value.shared[el.endpoint][DICTIONARY[provider].datalake_user_directory_name],
-            value.shared[el.endpoint][DICTIONARY[provider].datalake_shared_directory_name],
-            el.project,
-            el.endpoint,
-            el.tags,
-          )})
+              provider,
+              el.exploratory_name,
+              el.template_name,
+              el.image,
+              el.status,
+              el.shape,
+              el.computational_resources,
+              el.up_time,
+              el.exploratory_url,
+              value.shared[el.endpoint].edge_node_ip,
+              el.private_ip,
+              el.exploratory_user,
+              el.exploratory_pass,
+              value.shared[el.endpoint][DICTIONARY[provider].bucket_name],
+              value.shared[el.endpoint][DICTIONARY[provider].shared_bucket_name],
+              el.error_message,
+              billing ? billing.total_cost : '',
+              billing ? billing.currency : '',
+              billing,
+              el.libs,
+              value.shared[el.endpoint][DICTIONARY[provider].user_storage_account_name],
+              value.shared[el.endpoint][DICTIONARY[provider].shared_storage_account_name],
+              value.shared[el.endpoint][DICTIONARY[provider].datalake_name],
+              value.shared[el.endpoint][DICTIONARY[provider].datalake_user_directory_name],
+              value.shared[el.endpoint][DICTIONARY[provider].datalake_shared_directory_name],
+              el.project,
+              el.endpoint,
+              el.tags,
+              value.shared[el.endpoint].status
+            );
+          })
         };
       });
     }
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.html
index 091ccb7..b705c38 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.html
@@ -63,5 +63,5 @@
     </div>
   </div>
   <mat-divider></mat-divider>
-  <resources-grid [projects] = "projects"></resources-grid>
+  <resources-grid></resources-grid>
 </div>
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.ts b/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.ts
index bab05a7..7eb6ff6 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/resources.component.ts
@@ -17,14 +17,14 @@
  * under the License.
  */
 
-import { Component, OnInit, ViewChild, OnDestroy } from '@angular/core';
+import { Component, OnInit, ViewChild } from '@angular/core';
 import { ToastrService } from 'ngx-toastr';
 import { MatDialog } from '@angular/material/dialog';
 
 import { ResourcesGridComponent } from './resources-grid/resources-grid.component';
 import { ExploratoryEnvironmentCreateComponent } from './exploratory/create-environment';
 import { Exploratory } from './resources-grid/resources-grid.model';
-import { HealthStatusService, ProjectService } from '../core/services';
+import {ApplicationSecurityService, HealthStatusService} from '../core/services';
 import { ManageUngitComponent } from './manage-ungit/manage-ungit.component';
 import { Project } from './../administration/project/project.component';
 
@@ -45,12 +45,11 @@
     public toastr: ToastrService,
     private healthStatusService: HealthStatusService,
     private dialog: MatDialog,
-    private projectService: ProjectService
+    private applicationSecurityService: ApplicationSecurityService
   ) { }
 
   ngOnInit() {
     this.getEnvironmentHealthStatus();
-    this.getProjects();
     this.exploratoryEnvironments = this.resourcesGrid.environments;
   }
 
@@ -61,8 +60,7 @@
 
   public refreshGrid(): void {
     this.resourcesGrid.buildGrid();
-    this.getProjects();
-    this.getEnvironmentHealthStatus();
+    this.checkAutorize();
     this.exploratoryEnvironments = this.resourcesGrid.environments;
   }
 
@@ -84,13 +82,15 @@
   }
 
   public getActiveProject() {
-    console.log('activeProject: ', this.resourcesGrid.activeProject);
 
     return this.resourcesGrid.activeProject;
   }
 
-  private getProjects() {
-    this.projectService.getUserProjectsList().subscribe((projects: any) => this.projects = projects);
+  private checkAutorize() {
+   this.applicationSecurityService.isLoggedIn().subscribe( () => {
+     this.getEnvironmentHealthStatus();
+     }
+   );
   }
 
 
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts
index 0df2a59..8b31445 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts
@@ -88,7 +88,6 @@
 
   public open(notebook, type, resource?): void {
     this.notebook = notebook;
-    console.log(this.notebook)
     this.zones = _moment.tz.names()
       .map(item => [_moment.tz(item).format('Z'), item])
       .sort()
@@ -256,8 +255,11 @@
   }
 
   private setScheduleByInactivity() {
-    console.log(this.notebook)
-    const data = { sync_start_required: this.parentInherit, check_inactivity_required: this.enableIdleTime, max_inactivity: this.schedulerForm.controls.inactivityTime.value };
+    const data = {
+      sync_start_required: this.parentInherit,
+      check_inactivity_required: this.enableIdleTime,
+      max_inactivity: this.schedulerForm.controls.inactivityTime.value
+    };
     (this.destination.type === 'СOMPUTATIONAL')
       ? this.setInactivity(this.notebook.project, this.notebook.name, data, this.destination.computational_name)
       : this.setInactivity(this.notebook.project, this.notebook.name, { ...data, consider_inactivity: this.considerInactivity });
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.html b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.html
index 4e41606..420aa09 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.html
@@ -38,9 +38,11 @@
       </li>
 
         <ng-template  ngFor let-item [ngForOf]="items" let-i="index">
-          <li class="role-label" role="presentation" *ngIf="i === 0 || model && item.type !== items[i - 1].type" (click)="toggleItemsForLable(item.type, $event)">
+          <li class="role-label" role="presentation" *ngIf="i === 0 || model && item.type !== items[i - 1].type" (click)="toggleItemsForLable(item.type, $event)" >
             <a href="#" class="list-item" role="menuitem">
-              <span class="arrow" [ngClass]="{'rotate-arrow': isOpenCategory[item.type], 'arrow-checked': selectedAllInCattegory(item.type) || selectedSomeInCattegory(item.type)}"></span>
+              <span class="arrow" [ngClass]="{'rotate-arrow': isOpenCategory[item.type], 'arrow-checked': selectedAllInCattegory(item.type) || selectedSomeInCattegory(item.type)}">
+                <i class="material-icons">keyboard_arrow_right</i>
+              </span>
               <span class="empty-checkbox" [ngClass]="{'checked': selectedAllInCattegory(item.type) || selectedSomeInCattegory(item.type)}" (click)="toggleselectedCategory($event, model, item.type);$event.stopPropagation()" >
                 <span class="checked-checkbox" *ngIf="selectedAllInCattegory(item.type)"></span>
                 <span class="line-checkbox" *ngIf="selectedSomeInCattegory(item.type)"></span>
@@ -49,7 +51,11 @@
             </a>
           </li>
 
-          <li class="role-item" role="presentation" *ngIf="model && isOpenCategory[item.type] && item.type !== 'COMPUTATIONAL_SHAPE' && item.type !== 'NOTEBOOK_SHAPE'" >
+          <li class="role-item"
+              role="presentation"
+              *ngIf="model && isOpenCategory[item.type] && item.type !== 'COMPUTATIONAL_SHAPE' && item.type !== 'NOTEBOOK_SHAPE'"
+              [hidden]="!isAdmin && item.role === 'Allow to execute administration operation'"
+          >
             <a href="#" class="list-item" role="menuitem" (click)="toggleSelectedOptions($event, model, item)">
               <span class="empty-checkbox" [ngClass]="{'checked': checkInModel(item.role)}">
                 <span class="checked-checkbox" *ngIf="checkInModel(item.role)"></span>
@@ -64,12 +70,20 @@
               || model && isOpenCategory[item.type] && item.type === 'COMPUTATIONAL_SHAPE' && item.type !== items[i - 1].type"
           >
             <a href="#" class="list-item" role="menuitem">
-              <span class="arrow" [ngClass]="{'rotate-arrow': isCloudOpen[item.type + item.cloud], 'arrow-checked': selectedAllInCloud(item.type, item.cloud) || selectedSomeInCloud(item.type, item.cloud)}"></span>
-              <span class="empty-checkbox" [ngClass]="{'checked': selectedAllInCloud(item.type, item.cloud) || selectedSomeInCloud(item.type, item.cloud)}" (click)="toggleSelectedCloud($event, model, item.type, item.cloud);$event.stopPropagation()" >
+              <span class="arrow" [ngClass]="{'rotate-arrow': isCloudOpen[item.type + item.cloud], 'arrow-checked': selectedAllInCloud(item.type, item.cloud) || selectedSomeInCloud(item.type, item.cloud)}">
+                 <i class="material-icons">keyboard_arrow_right</i>
+              </span>
+              <span class="empty-checkbox"
+                    [ngClass]="{
+                    'checked': selectedAllInCloud(item.type, item.cloud)
+                    || selectedSomeInCloud(item.type, item.cloud)}"
+                    (click)="toggleSelectedCloud($event, model, item.type, item.cloud);
+                    $event.stopPropagation()"
+              >
                 <span class="checked-checkbox" *ngIf="selectedAllInCloud(item.type, item.cloud)"></span>
                 <span class="line-checkbox" *ngIf="selectedSomeInCloud(item.type, item.cloud)"></span>
               </span>
-              {{item.cloud || 'AWS'}}
+              {{item.cloud}}
             </a>
           </li>
           <li class="role-cloud-item" role="presentation" *ngIf="model && isCloudOpen[item.type + item.cloud] && isOpenCategory[item.type]" >
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.scss b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.scss
index 5323a24..a066dd5 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.scss
@@ -278,23 +278,22 @@
     }
 
     &.arrow{
-      width: 16px;
-      height: 14px;
-      border: 8px solid transparent;
-      border-left: 8px solid lightgrey;
-      left: 10px;
-      top: 12px;
-      border-radius: 3px;
-
+      left: 2px;
+      top: 9px;
+      i{
+        color: lightgrey;
+      }
       &.rotate-arrow{
         transform: rotate(90deg);
         transition: .1s ease-in-out;
-        top: 15px;
-        left: 6px;
+        top: 6px;
+        left: 0;
       }
 
       &.arrow-checked{
-        border-left: 8px solid #35afd5;
+        i{
+          color: #36afd5
+        }
       }
     }
   }
@@ -319,3 +318,7 @@
   }
 }
 
+.d-none{
+  display: none;
+}
+
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.ts b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.ts
index cabf7d9..5b9c1a9 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-level-select-dropdown/multi-level-select-dropdown.component.ts
@@ -30,6 +30,7 @@
   @Input() items: Array<any>;
   @Input() model: Array<any>;
   @Input() type: string;
+  @Input() isAdmin: boolean;
   @Output() selectionChange: EventEmitter<{}> = new EventEmitter();
 
   public isOpenCategory = {
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-select-dropdown/multi-select-dropdown.component.html b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-select-dropdown/multi-select-dropdown.component.html
index 7f58adf..60744c4 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-select-dropdown/multi-select-dropdown.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/multi-select-dropdown/multi-select-dropdown.component.html
@@ -40,7 +40,8 @@
           <li role="presentation" *ngIf="model">
             <a href="#" class="list-item" role="menuitem" (click)="toggleSelectedOptions($event, model, item)">
               <span class="material-icons" *ngIf="model.indexOf(item) >= 0">done</span>
-              {{item}}
+              <ng-container *ngIf="type[0] !== 'resource_type'">{{item}}</ng-container>
+              <ng-container *ngIf="type[0] === 'resource_type'">{{item | titlecase}}</ng-container>
             </a>
           </li>
         </ng-template>
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.component.html b/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.component.html
index 9bb12e7..b80443a 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.component.html
@@ -38,7 +38,8 @@
       <div *ngIf="data.type === 4" class="text-center m-bot-20">
         <h3 class="strong">Group data will be updated.</h3>
       </div>
-      <p *ngIf="data.type === 4 && data.notebook.length" class="text-center delete-user">User<span *ngIf="data.notebook.length>1">s</span>  <span class="strong"> {{data.notebook.join(', ')}} </span>will be deleted from this group. All <span *ngIf="data.notebook.length===1">his</span><span *ngIf="data.notebook.length>1">their</span> resources authorized within this group will be terminated.</p>
+      <p *ngIf="data.type === 4 && data.notebook.length" class="text-center delete-user">User<span *ngIf="data.notebook.length>1">s</span>  <span class="strong"> {{data.notebook.join(', ')}} </span>will be deleted from this group.</p>
+<!--        All <span *ngIf="data.notebook.length===1">his</span><span *ngIf="data.notebook.length>1">their</span> resources authorized within this group will be terminated.-->
       <mat-list class="resources"
         [hidden]="model.notebook.type === 'Edge Node' || model.notebook.name === 'edge node'
                                   || !model.notebook.resources || model.notebook.resources.length === 0 || (!isAliveResources && !confirmationType) || onlyKilled">
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.model.ts b/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.model.ts
index 1bfcd06..79b0512 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.model.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/modal-dialog/confirmation-dialog/confirmation-dialog.model.ts
@@ -76,9 +76,9 @@
   }
 
   private terminateExploratory(): Observable<{}> {
-    return this.manageAction
-      ? this.manageEnvironmentsService.environmentManagement(this.notebook.user, 'terminate', this.notebook.project,  this.notebook.name)
-      : this.userResourceService.suspendExploratoryEnvironment(this.notebook, 'terminate');
+    return this.manageAction ? this.manageEnvironmentsService.environmentManagement(
+        this.notebook.user, 'terminate', this.notebook.project,  this.notebook.name
+      ) : this.userResourceService.suspendExploratoryEnvironment(this.notebook, 'terminate');
   }
 
   private stopEdgeNode(): Observable<{}> {
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html
index 9485a87..d8b2322 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html
@@ -75,7 +75,7 @@
             <span *ngIf="isExpanded; else resources">List of Resources</span>
             <ng-template #resources><i class="material-icons">dashboard</i></ng-template>
           </a>
-          <a class="nav-item has-children" *ngIf="healthStatus?.admin">
+          <a class="nav-item has-children" *ngIf="healthStatus?.admin || healthStatus?.projectAdmin">
             <span *ngIf="isExpanded">Administration</span>
 
             <a class="sub-nav-item" [style.margin-left.px]="isExpanded ? '30' : '0'" [routerLink]="['/roles']"
@@ -101,23 +101,23 @@
             <ng-template #billing><i class="material-icons">account_balance_wallet</i></ng-template>
           </a>
         </div>
-        <div>
-          <a class="nav-item" [routerLink]="['/swagger']" [routerLinkActive]="['active']"
-            [routerLinkActiveOptions]="{exact:true}">
-            <span *ngIf="isExpanded; else endpoint">Cloud Endpoint API</span>
-            <ng-template #endpoint>
-              <span>
-                <svg width="30px" height="27px" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">
-                  <g>
-                    <path d="M127.059657,255.996921 C58.8506544,255.526472 -0.457073619,198.918442 0.00265506057,126.998303 C0.444649399,57.7958628 57.9516598,-0.468967577 129.11002,0.00284555012 C198.267128,0.462386081 256.613109,57.8667711 255.995136,128.194199 C256.568091,197.883453 197.934268,256.489189 127.059657,255.996921 Z M127.059657,255.996921 C58.8506544,255.526472 -0.457073619,198.918442 0.00265506057,126.998303 C0.444649399,57.7958628 57.9516598,-0.468967577 129.11002,0.00284555012 C198.267128,0.462386081 256.613109,57.8667711 255.995136,128.194199 C256.568091,197.883453 197.934268,256.489189 127.059657,255.996921 Z" fill="#FFFFFF"></path>
-                    <path id="swager-bgr" d="M127.184644,238.997327 C68.0323765,238.589271 16.6036091,189.498744 17.0023028,127.131428 C17.3860285,67.1185953 67.2554,16.5917106 128.963117,17.0024872 C188.934544,17.4010221 239.531905,67.1825241 238.995778,128.169251 C239.492444,188.602381 188.64743,239.424426 127.184644,238.997327 Z M127.184644,238.997327 C68.0323765,238.589271 16.6036091,189.498744 17.0023028,127.131428 C17.3860285,67.1185953 67.2554,16.5917106 128.963117,17.0024872 C188.934544,17.4010221 239.531905,67.1825241 238.995778,128.169251 C239.492444,188.602381 188.64743,239.424426 127.184644,238.997327 Z" fill="#577289"></path>
-                    <path d="M169.327319,127.956161 C169.042723,133.246373 164.421106,137.639224 159.866213,136.872586 C159.844426,136.872586 159.821277,136.872586 159.798128,136.872586 C154.753021,136.879395 150.658383,132.794288 150.652936,127.749182 C150.824511,122.690458 155.019915,118.703395 160.08,118.789182 C165.125106,118.813692 169.59966,123.077182 169.327319,127.956161 Z M88.2011915,179.220161 C90.1034894,179.27599 92.0071489,179.235139 94.2008511,179.235139 L94.2008511,193.021012 C80.5661277,195.326373 69.3348085,191.455054 66.5787234,179.929607 C65.6350638,175.69199 65.0549787,171.380841 64.8425532,167.04382 C64.5497872,162.452161 65.0563404,157.808756 64.706383,153.225267 C63.7368511,140.613182 62.1028085,136.30748 50,135.711054 L50,120.014714 C50.8674043,119.81182 51.7470638,119.662033 52.6321702,119.562629 C59.2677447,119.23582 62.0646809,117.201437 63.5489362,110.665267 C64.2243404,106.992756 64.6246809,103.275309 64.7431489,99.5428839 C65.268766,92.3258627 65.0822128,84.991735 66.2845957,77.8918201 C68.0221277,67.6245861 74.3962553,62.6366712 84.9249362,62.0783733 C87.9206809,61.9176925 90.9259574,62.0538627 94.3206809,62.0538627 L94.3206809,76.1447563 C92.9235745,76.2441605 91.6435745,76.4470542 90.3717447,76.4089265 C81.7916596,76.146118 81.3477447,79.0683308 80.7213617,86.1709691 C80.3305532,90.6250967 80.8697872,95.1554797 80.5661277,99.6245861 C80.2488511,104.071905 79.6537872,108.496075 78.7850213,112.869863 C77.547234,119.208586 73.6500426,123.922799 68.2495319,127.92348 C78.7332766,134.745607 79.9261277,145.346458 80.6069787,156.110714 C80.9732766,161.895224 80.8057872,167.720586 81.3926809,173.476501 C81.8502128,177.944246 83.5877447,179.08399 88.2011915,179.220161 Z M97.0372766,118.789182 C97.0917447,118.789182 97.1448511,118.789182 97.1993191,118.789182 C102.211745,118.872246 106.209702,123.002288 106.126638,128.016075 C106.126638,128.180841 106.121191,128.344246 106.11166,128.50765 C105.829787,133.407054 101.630298,137.149012 96.7308936,136.867139 C96.5334468,136.871224 96.3373617,136.867139 96.1399149,136.857607 C91.1506383,136.609778 87.3065532,132.36399 87.554383,127.374714 C87.8022128,122.385437 92.048,118.541352 97.0372766,118.789182 Z M128.273362,118.789182 C133.755574,118.746969 137.396766,122.29965 137.425362,127.719224 C137.455319,133.284501 134.003404,136.845352 128.556596,136.868501 C123.017191,136.893012 119.370553,133.389352 119.340596,128.002458 C119.324255,127.727395 119.32017,127.452331 119.32834,127.177267 C119.482213,122.390884 123.486979,118.635309 128.273362,118.789182 Z M193.673191,111.92348 C195.131574,117.370288 197.970723,119.284841 203.704851,119.546288 C204.644426,119.589863 205.579915,119.749182 206.868085,119.892161 L206.868085,135.584416 C206.170894,135.813182 205.456,135.984756 204.730213,136.096416 C197.046128,136.574373 193.54383,139.726714 192.76766,147.431224 C192.272,152.349692 192.312851,157.322629 191.972426,162.258799 C191.829447,167.678373 191.336511,173.082969 190.49634,178.438544 C188.535489,188.142033 182.477277,192.982884 172.467404,193.573863 C169.245617,193.764501 166.000681,193.60382 162.526979,193.60382 L162.526979,179.578288 C164.396596,179.462544 166.046979,179.303224 167.701447,179.263735 C173.682043,179.120756 175.796766,177.192586 176.089532,171.252841 C176.413617,164.727565 176.555234,158.194118 176.846638,151.66748 C177.270128,142.233607 179.853277,133.806033 188.641702,127.922118 C183.612936,124.336756 179.575489,119.994288 178.529702,114.138969 C177.264681,107.041778 176.85617,99.7879903 176.175319,92.5913946 C175.838979,88.9937776 175.855319,85.3648414 175.504,81.7699478 C175.125447,77.8890967 172.459234,76.5464584 168.926979,76.4593095 C166.903489,76.4102882 164.87183,76.4497776 162.284596,76.4497776 L162.284596,62.7537776 C178.793872,60.0126712 190.198128,65.5057776 191.257532,81.3015222 C191.701447,87.9343733 191.636085,94.5985435 192.060936,101.231395 C192.247489,104.839905 192.786723,108.421182 193.673191,111.92348 Z" fill="#ffffff"></path>
-                  </g>
-                </svg>
-              </span>
-            </ng-template>
-          </a>
-        </div>
+<!--        <div>-->
+<!--          <a class="nav-item" [routerLink]="['/swagger']" [routerLinkActive]="['active']"-->
+<!--            [routerLinkActiveOptions]="{exact:true}">-->
+<!--            <span *ngIf="isExpanded; else endpoint">Cloud Endpoint API</span>-->
+<!--            <ng-template #endpoint>-->
+<!--              <span>-->
+<!--                <svg width="30px" height="27px" viewBox="0 0 256 256" version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" preserveAspectRatio="xMidYMid">-->
+<!--                  <g>-->
+<!--                    <path d="M127.059657,255.996921 C58.8506544,255.526472 -0.457073619,198.918442 0.00265506057,126.998303 C0.444649399,57.7958628 57.9516598,-0.468967577 129.11002,0.00284555012 C198.267128,0.462386081 256.613109,57.8667711 255.995136,128.194199 C256.568091,197.883453 197.934268,256.489189 127.059657,255.996921 Z M127.059657,255.996921 C58.8506544,255.526472 -0.457073619,198.918442 0.00265506057,126.998303 C0.444649399,57.7958628 57.9516598,-0.468967577 129.11002,0.00284555012 C198.267128,0.462386081 256.613109,57.8667711 255.995136,128.194199 C256.568091,197.883453 197.934268,256.489189 127.059657,255.996921 Z" fill="#FFFFFF"></path>-->
+<!--                    <path id="swager-bgr" d="M127.184644,238.997327 C68.0323765,238.589271 16.6036091,189.498744 17.0023028,127.131428 C17.3860285,67.1185953 67.2554,16.5917106 128.963117,17.0024872 C188.934544,17.4010221 239.531905,67.1825241 238.995778,128.169251 C239.492444,188.602381 188.64743,239.424426 127.184644,238.997327 Z M127.184644,238.997327 C68.0323765,238.589271 16.6036091,189.498744 17.0023028,127.131428 C17.3860285,67.1185953 67.2554,16.5917106 128.963117,17.0024872 C188.934544,17.4010221 239.531905,67.1825241 238.995778,128.169251 C239.492444,188.602381 188.64743,239.424426 127.184644,238.997327 Z" fill="#577289"></path>-->
+<!--                    <path d="M169.327319,127.956161 C169.042723,133.246373 164.421106,137.639224 159.866213,136.872586 C159.844426,136.872586 159.821277,136.872586 159.798128,136.872586 C154.753021,136.879395 150.658383,132.794288 150.652936,127.749182 C150.824511,122.690458 155.019915,118.703395 160.08,118.789182 C165.125106,118.813692 169.59966,123.077182 169.327319,127.956161 Z M88.2011915,179.220161 C90.1034894,179.27599 92.0071489,179.235139 94.2008511,179.235139 L94.2008511,193.021012 C80.5661277,195.326373 69.3348085,191.455054 66.5787234,179.929607 C65.6350638,175.69199 65.0549787,171.380841 64.8425532,167.04382 C64.5497872,162.452161 65.0563404,157.808756 64.706383,153.225267 C63.7368511,140.613182 62.1028085,136.30748 50,135.711054 L50,120.014714 C50.8674043,119.81182 51.7470638,119.662033 52.6321702,119.562629 C59.2677447,119.23582 62.0646809,117.201437 63.5489362,110.665267 C64.2243404,106.992756 64.6246809,103.275309 64.7431489,99.5428839 C65.268766,92.3258627 65.0822128,84.991735 66.2845957,77.8918201 C68.0221277,67.6245861 74.3962553,62.6366712 84.9249362,62.0783733 C87.9206809,61.9176925 90.9259574,62.0538627 94.3206809,62.0538627 L94.3206809,76.1447563 C92.9235745,76.2441605 91.6435745,76.4470542 90.3717447,76.4089265 C81.7916596,76.146118 81.3477447,79.0683308 80.7213617,86.1709691 C80.3305532,90.6250967 80.8697872,95.1554797 80.5661277,99.6245861 C80.2488511,104.071905 79.6537872,108.496075 78.7850213,112.869863 C77.547234,119.208586 73.6500426,123.922799 68.2495319,127.92348 C78.7332766,134.745607 79.9261277,145.346458 80.6069787,156.110714 C80.9732766,161.895224 80.8057872,167.720586 81.3926809,173.476501 C81.8502128,177.944246 83.5877447,179.08399 88.2011915,179.220161 Z M97.0372766,118.789182 C97.0917447,118.789182 97.1448511,118.789182 97.1993191,118.789182 C102.211745,118.872246 106.209702,123.002288 106.126638,128.016075 C106.126638,128.180841 106.121191,128.344246 106.11166,128.50765 C105.829787,133.407054 101.630298,137.149012 96.7308936,136.867139 C96.5334468,136.871224 96.3373617,136.867139 96.1399149,136.857607 C91.1506383,136.609778 87.3065532,132.36399 87.554383,127.374714 C87.8022128,122.385437 92.048,118.541352 97.0372766,118.789182 Z M128.273362,118.789182 C133.755574,118.746969 137.396766,122.29965 137.425362,127.719224 C137.455319,133.284501 134.003404,136.845352 128.556596,136.868501 C123.017191,136.893012 119.370553,133.389352 119.340596,128.002458 C119.324255,127.727395 119.32017,127.452331 119.32834,127.177267 C119.482213,122.390884 123.486979,118.635309 128.273362,118.789182 Z M193.673191,111.92348 C195.131574,117.370288 197.970723,119.284841 203.704851,119.546288 C204.644426,119.589863 205.579915,119.749182 206.868085,119.892161 L206.868085,135.584416 C206.170894,135.813182 205.456,135.984756 204.730213,136.096416 C197.046128,136.574373 193.54383,139.726714 192.76766,147.431224 C192.272,152.349692 192.312851,157.322629 191.972426,162.258799 C191.829447,167.678373 191.336511,173.082969 190.49634,178.438544 C188.535489,188.142033 182.477277,192.982884 172.467404,193.573863 C169.245617,193.764501 166.000681,193.60382 162.526979,193.60382 L162.526979,179.578288 C164.396596,179.462544 166.046979,179.303224 167.701447,179.263735 C173.682043,179.120756 175.796766,177.192586 176.089532,171.252841 C176.413617,164.727565 176.555234,158.194118 176.846638,151.66748 C177.270128,142.233607 179.853277,133.806033 188.641702,127.922118 C183.612936,124.336756 179.575489,119.994288 178.529702,114.138969 C177.264681,107.041778 176.85617,99.7879903 176.175319,92.5913946 C175.838979,88.9937776 175.855319,85.3648414 175.504,81.7699478 C175.125447,77.8890967 172.459234,76.5464584 168.926979,76.4593095 C166.903489,76.4102882 164.87183,76.4497776 162.284596,76.4497776 L162.284596,62.7537776 C178.793872,60.0126712 190.198128,65.5057776 191.257532,81.3015222 C191.701447,87.9343733 191.636085,94.5985435 192.060936,101.231395 C192.247489,104.839905 192.786723,108.421182 193.673191,111.92348 Z" fill="#ffffff"></path>-->
+<!--                  </g>-->
+<!--                </svg>-->
+<!--              </span>-->
+<!--            </ng-template>-->
+<!--          </a>-->
+<!--        </div>-->
       </nav>
     </mat-nav-list>
   </mat-sidenav>
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.ts b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.ts
index 952dfd0..1f33caa 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.ts
@@ -111,7 +111,7 @@
         this.subscriptions.add(this.healthStatusService.statusData.pipe(skip(1)).subscribe(result => {
           this.healthStatus = result;
           result.status && this.checkQuoteUsed(this.healthStatus);
-          result.status && !result.projectAssigned && this.checkAssignment(this.healthStatus);
+          result.status && !result.projectAssigned && !result.admin && this.checkAssignment(this.healthStatus);
         }));
         this.subscriptions.add(timer(0, this.CHECK_ACTIVE_SCHEDULE_TIMEOUT).subscribe(() => this.refreshSchedulerData()));
         this.currentUserName = this.getUserName();
diff --git a/services/self-service/src/main/resources/webapp/src/app/webterminal/webterminal.component.html b/services/self-service/src/main/resources/webapp/src/app/webterminal/webterminal.component.html
index 4aedd1c..6fe2d98 100644
--- a/services/self-service/src/main/resources/webapp/src/app/webterminal/webterminal.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/webterminal/webterminal.component.html
@@ -15,6 +15,6 @@
   ~ KIND, either express or implied.  See the License for the
   ~ specific language governing permissions and limitations
   ~ under the License.
--->
+  -->
 
 <div #terminal id="display" class="guac-display guac-loading"></div>
diff --git a/services/self-service/src/main/resources/webapp/src/assets/styles/_theme.scss b/services/self-service/src/main/resources/webapp/src/assets/styles/_theme.scss
index 7b48bba..43e9c50 100644
--- a/services/self-service/src/main/resources/webapp/src/assets/styles/_theme.scss
+++ b/services/self-service/src/main/resources/webapp/src/assets/styles/_theme.scss
@@ -662,3 +662,8 @@
   }
 }
 
+.filter-row-item, .label-header{
+  box-shadow: inset 0 -1px 0 lightgrey;
+  border-bottom: none !important;
+}
+
diff --git a/services/self-service/src/main/resources/webapp/src/dictionary/gcp.dictionary.ts b/services/self-service/src/main/resources/webapp/src/dictionary/gcp.dictionary.ts
index 0d02f63..d92b7ff 100644
--- a/services/self-service/src/main/resources/webapp/src/dictionary/gcp.dictionary.ts
+++ b/services/self-service/src/main/resources/webapp/src/dictionary/gcp.dictionary.ts
@@ -42,7 +42,7 @@
         'service_filter_key': 'product',
         'type': 'dlab_resource_type',
         'resourceType': 'dlab_resource_type',
-        'instance_size': 'shape',
+        'instance_size': 'shapes',
         'dlabId': 'dlab_id'
     },
     'service': 'Product',
diff --git a/services/self-service/src/main/resources/webapp/src/dictionary/global.dictionary.ts b/services/self-service/src/main/resources/webapp/src/dictionary/global.dictionary.ts
index dfb97b0..26fe456 100644
--- a/services/self-service/src/main/resources/webapp/src/dictionary/global.dictionary.ts
+++ b/services/self-service/src/main/resources/webapp/src/dictionary/global.dictionary.ts
@@ -36,27 +36,27 @@
   }
 
   constructor(
-    public user: Array<string>,
-    public service: Array<string>,
+    public users: Array<string>,
+    public products: Array<string>,
     public resource_type: Array<string>,
-    public status: Array<string>,
-    public shape: Array<string>,
+    public statuses: Array<string>,
+    public shapes: Array<string>,
     public date_start: string,
     public date_end: string,
     public dlab_id: string,
-    public project?: Array<string>
+    public projects: Array<string>
   ) { }
 
   defaultConfigurations(): void {
-    this.user = [];
-    this.service = [];
+    this.users = [];
+    this.products = [];
     this.resource_type = [];
-    this.status = [];
-    this.shape = [];
+    this.statuses = [];
+    this.shapes = [];
     this.date_start = '';
     this.date_end = '';
     this.dlab_id = '';
-    this.project = [];
+    this.projects = [];
   }
 }
 
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/EnvironmentResourceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/EnvironmentResourceTest.java
index c257995..5ac537b 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/EnvironmentResourceTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/EnvironmentResourceTest.java
@@ -20,7 +20,6 @@
 package com.epam.dlab.backendapi.resources;
 
 import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.resources.dto.UserDTO;
 import com.epam.dlab.backendapi.service.EnvironmentService;
 import com.epam.dlab.exceptions.ResourceConflictException;
 import io.dropwizard.auth.AuthenticationException;
@@ -31,16 +30,15 @@
 import org.junit.Test;
 
 import javax.ws.rs.client.Entity;
-import javax.ws.rs.core.GenericType;
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
 import java.util.Collections;
-import java.util.List;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.mockito.Matchers.anyString;
+import static org.mockito.Matchers.eq;
 import static org.mockito.Mockito.any;
 import static org.mockito.Mockito.doNothing;
 import static org.mockito.Mockito.doThrow;
@@ -63,45 +61,9 @@
 	}
 
 	@Test
-	public void getUsersWithActiveEnv() {
-		when(environmentService.getUsers()).thenReturn(Collections.singletonList(new UserDTO("activeUser",
-				null, UserDTO.Status.ACTIVE)));
-		final Response response = resources.getJerseyTest()
-				.target("/environment/user")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.get();
-
-		assertEquals(HttpStatus.SC_OK, response.getStatus());
-		assertEquals(Collections.singletonList(new UserDTO("activeUser", null, UserDTO.Status.ACTIVE)),
-				response.readEntity(new GenericType<List<UserDTO>>() {
-				}));
-		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
-
-		verify(environmentService).getUsers();
-		verifyNoMoreInteractions(environmentService);
-	}
-
-	@Test
-	public void getUsersWithActiveEnvWithFailedAuth() throws AuthenticationException {
-		authFailSetup();
-		when(environmentService.getUsers()).thenReturn(Collections.singletonList(new UserDTO("activeUser",
-				null, UserDTO.Status.ACTIVE)));
-		final Response response = resources.getJerseyTest()
-				.target("/environment/user")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.get();
-
-		assertEquals(HttpStatus.SC_FORBIDDEN, response.getStatus());
-		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
-
-		verifyZeroInteractions(environmentService);
-	}
-
-	@Test
 	public void getAllEnv() {
-		when(environmentService.getAllEnv()).thenReturn(Collections.emptyList());
+		UserInfo userInfo = getUserInfo();
+		when(environmentService.getAllEnv(userInfo)).thenReturn(Collections.emptyList());
 		final Response response = resources.getJerseyTest()
 				.target("/environment/all")
 				.request()
@@ -111,14 +73,14 @@
 		assertEquals(HttpStatus.SC_OK, response.getStatus());
 		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
 
-		verify(environmentService).getAllEnv();
+		verify(environmentService).getAllEnv(eq(userInfo));
 		verifyNoMoreInteractions(environmentService);
 	}
 
 	@Test
 	public void getAllEnvWithFailedAuth() throws AuthenticationException {
 		authFailSetup();
-		when(environmentService.getAllEnv()).thenReturn(Collections.emptyList());
+		when(environmentService.getAllEnv(getUserInfo())).thenReturn(Collections.emptyList());
 		final Response response = resources.getJerseyTest()
 				.target("/environment/all")
 				.request()
@@ -132,55 +94,6 @@
 	}
 
 	@Test
-	public void stopEnv() {
-		doNothing().when(environmentService).stopEnvironment(any(UserInfo.class), anyString(), anyString());
-		final Response response = resources.getJerseyTest()
-				.target("/environment/stop/projectName")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.post(Entity.text(USER));
-
-		assertEquals(HttpStatus.SC_OK, response.getStatus());
-		assertNull(response.getHeaderString(HttpHeaders.CONTENT_TYPE));
-
-		verify(environmentService).stopEnvironment(new UserInfo(USER, TOKEN), USER, "projectName");
-		verifyNoMoreInteractions(environmentService);
-	}
-
-	@Test
-	public void stopEnvWithFailedAuth() throws AuthenticationException {
-		authFailSetup();
-		doNothing().when(environmentService).stopEnvironment(any(UserInfo.class), anyString(), anyString());
-		final Response response = resources.getJerseyTest()
-				.target("/environment/stop/projectName")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.post(Entity.text(USER));
-
-		assertEquals(HttpStatus.SC_FORBIDDEN, response.getStatus());
-		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
-
-		verifyZeroInteractions(environmentService);
-	}
-
-	@Test
-	public void stopEnvWithResourceConflictException() {
-		doThrow(new ResourceConflictException("Can not stop environment because one of the user resources is in " +
-				"status CREATING or STARTING")).when(environmentService).stopEnvironment(any(UserInfo.class), anyString(), anyString());
-		final Response response = resources.getJerseyTest()
-				.target("/environment/stop/projectName")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.post(Entity.text(USER));
-
-		assertEquals(HttpStatus.SC_INTERNAL_SERVER_ERROR, response.getStatus());
-		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
-
-		verify(environmentService).stopEnvironment(new UserInfo(USER, TOKEN), USER, "projectName");
-		verifyNoMoreInteractions(environmentService);
-	}
-
-	@Test
 	public void stopNotebook() {
 		doNothing().when(environmentService).stopExploratory(any(UserInfo.class), anyString(), anyString(), anyString());
 		final Response response = resources.getJerseyTest()
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ImageExploratoryResourceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ImageExploratoryResourceTest.java
index 38c0e46..d74f94e 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ImageExploratoryResourceTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ImageExploratoryResourceTest.java
@@ -272,7 +272,7 @@
 	}
 
 	private List<ImageInfoRecord> getImageList() {
-		ImageInfoRecord imageInfoRecord = new ImageInfoRecord("someName", "someDescription", "someProject", "someEndpoint", "someApp",
+		ImageInfoRecord imageInfoRecord = new ImageInfoRecord("someName", "someDescription", "someProject", "someEndpoint", "someUser", "someApp",
 				"someFullName", ImageStatus.CREATED);
 		return Collections.singletonList(imageInfoRecord);
 	}
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResourceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResourceTest.java
index b92335e..0f63cb9 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResourceTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/InfrastructureInfoResourceTest.java
@@ -37,7 +37,17 @@
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.anyBoolean;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.eq;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.refEq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
 
 public class InfrastructureInfoResourceTest extends TestBase {
 
@@ -84,7 +94,7 @@
 	@Test
 	public void healthStatus() {
 		HealthStatusPageDTO hspDto = getHealthStatusPageDTO();
-		when(infrastructureInfoService.getHeathStatus(any(UserInfo.class), anyBoolean(), anyBoolean())).thenReturn(hspDto);
+		when(infrastructureInfoService.getHeathStatus(any(UserInfo.class), anyBoolean())).thenReturn(hspDto);
 		final Response response = resources.getJerseyTest()
 				.target("/infrastructure/status")
 				.queryParam("full", "1")
@@ -96,7 +106,7 @@
 		assertEquals(hspDto.getStatus(), response.readEntity(HealthStatusPageDTO.class).getStatus());
 		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
 
-		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(true), anyBoolean());
+		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(true));
 		verifyNoMoreInteractions(infrastructureInfoService);
 	}
 
@@ -104,7 +114,7 @@
 	public void healthStatusWithFailedAuth() throws AuthenticationException {
 		authFailSetup();
 		HealthStatusPageDTO hspDto = getHealthStatusPageDTO();
-		when(infrastructureInfoService.getHeathStatus(any(UserInfo.class), anyBoolean(), anyBoolean())).thenReturn(hspDto);
+		when(infrastructureInfoService.getHeathStatus(any(UserInfo.class), anyBoolean())).thenReturn(hspDto);
 		final Response response = resources.getJerseyTest()
 				.target("/infrastructure/status")
 				.queryParam("full", "1")
@@ -116,14 +126,14 @@
 		assertEquals(hspDto.getStatus(), response.readEntity(HealthStatusPageDTO.class).getStatus());
 		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
 
-		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(true), anyBoolean());
+		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(true));
 		verifyNoMoreInteractions(infrastructureInfoService);
 	}
 
 	@Test
 	public void healthStatusWithDefaultQueryParam() {
 		HealthStatusPageDTO hspDto = getHealthStatusPageDTO();
-		when(infrastructureInfoService.getHeathStatus(any(UserInfo.class), anyBoolean(), anyBoolean())).thenReturn(hspDto);
+		when(infrastructureInfoService.getHeathStatus(any(UserInfo.class), anyBoolean())).thenReturn(hspDto);
 		final Response response = resources.getJerseyTest()
 				.target("/infrastructure/status")
 				.request()
@@ -134,14 +144,14 @@
 		assertEquals(hspDto.getStatus(), response.readEntity(HealthStatusPageDTO.class).getStatus());
 		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
 
-		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(false), anyBoolean());
+		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(false));
 		verifyNoMoreInteractions(infrastructureInfoService);
 	}
 
 	@Test
 	public void healthStatusWithException() {
 		doThrow(new DlabException("Could not return status of resources for user"))
-				.when(infrastructureInfoService).getHeathStatus(any(UserInfo.class), anyBoolean(), anyBoolean());
+				.when(infrastructureInfoService).getHeathStatus(any(UserInfo.class), anyBoolean());
 		final Response response = resources.getJerseyTest()
 				.target("/infrastructure/status")
 				.request()
@@ -151,7 +161,7 @@
 		assertEquals(HttpStatus.SC_INTERNAL_SERVER_ERROR, response.getStatus());
 		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
 
-		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(false), anyBoolean());
+		verify(infrastructureInfoService).getHeathStatus(refEq(getUserInfo()), eq(false));
 		verifyNoMoreInteractions(infrastructureInfoService);
 	}
 
@@ -159,7 +169,7 @@
 	@Test
 	public void getUserResourcesWithException() {
 		doThrow(new DlabException("Could not load list of provisioned resources for user"))
-				.when(infrastructureInfoService).getUserResources(anyString());
+				.when(infrastructureInfoService).getUserResources(any(UserInfo.class));
 		final Response response = resources.getJerseyTest()
 				.target("/infrastructure/info")
 				.request()
@@ -169,7 +179,7 @@
 		assertEquals(HttpStatus.SC_INTERNAL_SERVER_ERROR, response.getStatus());
 		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
 
-		verify(infrastructureInfoService).getUserResources(USER.toLowerCase());
+		verify(infrastructureInfoService).getUserResources(any());
 		verifyNoMoreInteractions(infrastructureInfoService);
 	}
 
@@ -191,8 +201,8 @@
 	}
 
 	private HealthStatusPageDTO getHealthStatusPageDTO() {
-		HealthStatusPageDTO hspdto = new HealthStatusPageDTO();
-		hspdto.setStatus("someStatus");
-		return hspdto;
+		return HealthStatusPageDTO.builder()
+				.status("someStatus")
+				.build();
 	}
 }
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/KeycloakResourceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/KeycloakResourceTest.java
index 37c4c5a..c0be4df 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/KeycloakResourceTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/KeycloakResourceTest.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources;
 
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ProjectResourceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ProjectResourceTest.java
index b85f631..1f6fc46 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ProjectResourceTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/ProjectResourceTest.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.resources;
 
 import com.epam.dlab.auth.UserInfo;
@@ -17,13 +36,17 @@
 import javax.ws.rs.core.HttpHeaders;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-
 import java.util.Collections;
 
-import static org.junit.Assert.*;
+import static org.junit.Assert.assertEquals;
 import static org.mockito.Matchers.any;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.anyList;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
 import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
 
 
 public class ProjectResourceTest extends TestBase {
@@ -40,20 +63,6 @@
     }
 
     @Test
-    public void getProjectsForManaging() {
-        final Response response = resources.getJerseyTest()
-                .target("project/managing")
-                .request()
-                .header("Authorization", "Bearer " + TOKEN)
-                .get();
-
-        assertEquals(HttpStatus.SC_OK, response.getStatus());
-        assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
-        verify(projectService, times(1)).getProjectsForManaging();
-        verifyNoMoreInteractions(projectService);
-    }
-
-    @Test
     public void stopProject() {
         final Response response = resources.getJerseyTest()
                 .target("project/stop")
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserGroupResourceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserGroupResourceTest.java
index 1953694..713eda9 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserGroupResourceTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserGroupResourceTest.java
@@ -19,14 +19,13 @@
 
 package com.epam.dlab.backendapi.resources;
 
+import com.epam.dlab.auth.UserInfo;
+import com.epam.dlab.backendapi.dao.ProjectDAO;
 import com.epam.dlab.backendapi.resources.dto.GroupDTO;
-import com.epam.dlab.backendapi.resources.dto.UpdateRoleGroupDto;
-import com.epam.dlab.backendapi.resources.dto.UpdateUserGroupDto;
 import com.epam.dlab.backendapi.resources.dto.UserGroupDto;
 import com.epam.dlab.backendapi.service.UserGroupService;
 import io.dropwizard.auth.AuthenticationException;
 import io.dropwizard.testing.junit.ResourceTestRule;
-import org.apache.commons.lang3.StringUtils;
 import org.apache.http.HttpStatus;
 import org.junit.Before;
 import org.junit.Rule;
@@ -41,26 +40,31 @@
 import java.util.List;
 import java.util.Set;
 
-import static java.util.Collections.singleton;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.verifyZeroInteractions;
+import static org.mockito.Mockito.when;
 
 public class UserGroupResourceTest extends TestBase {
 
-	private static final String USER = "user";
-	private static final String ROLE_ID = "id";
-	private static final String GROUP = "group";
-	private UserGroupService userGroupService = mock(UserGroupService.class);
+    private static final String USER = "user";
+    private static final String ROLE_ID = "id";
+    private static final String GROUP = "group";
+    private UserGroupService userGroupService = mock(UserGroupService.class);
+    private ProjectDAO projectDAO = mock(ProjectDAO.class);
 
-	@Before
-	public void setup() throws AuthenticationException {
-		authSetup();
-	}
+    @Before
+    public void setup() throws AuthenticationException {
+        authSetup();
+    }
 
-	@Rule
-	public final ResourceTestRule resources =
-			getResourceTestRuleInstance(new UserGroupResource(userGroupService));
+    @Rule
+    public final ResourceTestRule resources =
+            getResourceTestRuleInstance(new UserGroupResource(userGroupService));
 
 	@Test
 	public void createGroup() {
@@ -116,77 +120,31 @@
 
 		assertEquals(HttpStatus.SC_OK, response.getStatus());
 
-		verify(userGroupService).updateGroup(GROUP, Collections.singleton(ROLE_ID), Collections.singleton(USER));
+		verify(userGroupService).updateGroup(getUserInfo(), GROUP, Collections.singleton(ROLE_ID), Collections.singleton(USER));
 		verifyNoMoreInteractions(userGroupService);
 	}
 
 	@Test
 	public void getGroups() {
-		when(userGroupService.getAggregatedRolesByGroup()).thenReturn(Collections.singletonList(getUserGroup()));
+        when(userGroupService.getAggregatedRolesByGroup(any(UserInfo.class))).thenReturn(Collections.singletonList(getUserGroup()));
 
-		final Response response = resources.getJerseyTest()
-				.target("/group")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.get();
+        final Response response = resources.getJerseyTest()
+                .target("/group")
+                .request()
+                .header("Authorization", "Bearer " + TOKEN)
+                .get();
 
-		final List<UserGroupDto> actualRoles = response.readEntity(new GenericType<List<UserGroupDto>>() {
-		});
+        final List<UserGroupDto> actualRoles = response.readEntity(new GenericType<List<UserGroupDto>>() {
+        });
 
-		assertEquals(HttpStatus.SC_OK, response.getStatus());
-		assertEquals(GROUP, actualRoles.get(0).getGroup());
-		assertTrue(actualRoles.get(0).getRoles().isEmpty());
-		assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
+        assertEquals(HttpStatus.SC_OK, response.getStatus());
+        assertEquals(GROUP, actualRoles.get(0).getGroup());
+        assertTrue(actualRoles.get(0).getRoles().isEmpty());
+        assertEquals(MediaType.APPLICATION_JSON, response.getHeaderString(HttpHeaders.CONTENT_TYPE));
 
-		verify(userGroupService).getAggregatedRolesByGroup();
-		verifyNoMoreInteractions(userGroupService);
-	}
-
-	@Test
-	public void addRolesToGroup() {
-
-		final Response response = resources.getJerseyTest()
-				.target("/group/role")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.put(Entity.json(new UpdateRoleGroupDto(singleton(ROLE_ID), GROUP)));
-
-		assertEquals(HttpStatus.SC_OK, response.getStatus());
-
-		verify(userGroupService).updateRolesForGroup(GROUP, singleton(ROLE_ID));
-		verifyNoMoreInteractions(userGroupService);
-	}
-
-	@Test
-	public void addRolesToGroupWithValidationException() {
-
-		final Response response = resources.getJerseyTest()
-				.target("/group/role")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.put(Entity.json(new UpdateRoleGroupDto(singleton(ROLE_ID), StringUtils.EMPTY)));
-
-		assertEquals(HttpStatus.SC_UNPROCESSABLE_ENTITY, response.getStatus());
-
-		verifyZeroInteractions(userGroupService);
-	}
-
-	@Test
-	public void deleteGroupFromRole() {
-		final Response response = resources.getJerseyTest()
-				.target("/group/role")
-				.queryParam("group", GROUP)
-				.queryParam("roleId", ROLE_ID)
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.delete();
-
-		assertEquals(HttpStatus.SC_OK, response.getStatus());
-
-
-		verify(userGroupService).removeGroupFromRole(singleton(GROUP), singleton(ROLE_ID));
-		verifyNoMoreInteractions(userGroupService);
-	}
+        verify(userGroupService).getAggregatedRolesByGroup(getUserInfo());
+        verifyNoMoreInteractions(userGroupService);
+    }
 
 	@Test
 	public void deleteGroup() {
@@ -203,89 +161,15 @@
 		verifyNoMoreInteractions(userGroupService);
 	}
 
-	@Test
-	public void deleteGroupFromRoleWithValidationException() {
-		final Response response = resources.getJerseyTest()
-				.target("/group/role")
-				.queryParam("group", GROUP)
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.delete();
-
-		assertEquals(HttpStatus.SC_BAD_REQUEST, response.getStatus());
-
-		verifyZeroInteractions(userGroupService);
-	}
-
-	@Test
-	public void addUserToGroup() {
-		final Response response = resources.getJerseyTest()
-				.target("/group/user")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.put(Entity.json(new UpdateUserGroupDto(GROUP, singleton(USER))));
-
-		assertEquals(HttpStatus.SC_OK, response.getStatus());
-
-		verify(userGroupService).addUsersToGroup(GROUP, singleton(USER));
-		verifyNoMoreInteractions(userGroupService);
-	}
-
-	@Test
-	public void addUserToGroupWithValidationException() {
-		final Response response = resources.getJerseyTest()
-				.target("/group/user")
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.put(Entity.json(new UpdateUserGroupDto(StringUtils.EMPTY, singleton(USER))));
-
-		assertEquals(HttpStatus.SC_UNPROCESSABLE_ENTITY, response.getStatus());
-
-		verifyZeroInteractions(userGroupService);
-	}
-
-	@Test
-	public void deleteUserFromGroup() {
-		final Response response = resources.getJerseyTest()
-				.target("/group/user")
-				.queryParam("user", USER)
-				.queryParam("group", GROUP)
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.delete();
-
-		assertEquals(HttpStatus.SC_OK, response.getStatus());
-
-
-		verify(userGroupService).removeUserFromGroup(GROUP, USER);
-		verifyNoMoreInteractions(userGroupService);
-	}
-
-	@Test
-	public void deleteUserFromGroupWithValidationException() {
-		final Response response = resources.getJerseyTest()
-				.target("/group/user")
-				.queryParam("group", GROUP)
-				.request()
-				.header("Authorization", "Bearer " + TOKEN)
-				.delete();
-
-		assertEquals(HttpStatus.SC_BAD_REQUEST, response.getStatus());
-
-		verifyZeroInteractions(userGroupService);
-	}
-
 	private UserGroupDto getUserGroup() {
 		return new UserGroupDto(GROUP, Collections.emptyList(), Collections.emptySet());
-	}
+    }
 
-	private GroupDTO getCreateGroupDto(String group, Set<String> roleIds) {
-		final GroupDTO dto = new GroupDTO();
-		dto.setName(group);
-		dto.setRoleIds(roleIds);
-		dto.setUsers(Collections.singleton(USER));
-		return dto;
-	}
-
-
+    private GroupDTO getCreateGroupDto(String group, Set<String> roleIds) {
+        final GroupDTO dto = new GroupDTO();
+        dto.setName(group);
+        dto.setRoleIds(roleIds);
+        dto.setUsers(Collections.singleton(USER));
+        return dto;
+    }
 }
\ No newline at end of file
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserRoleResourceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserRoleResourceTest.java
index 6c0f5be..c4e2bd6 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserRoleResourceTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/resources/UserRoleResourceTest.java
@@ -35,9 +35,12 @@
 import java.util.Collections;
 import java.util.List;
 
-import static java.util.Collections.singleton;
 import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.refEq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
 
 public class UserRoleResourceTest extends TestBase {
 
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/KeycloakServiceImplTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/KeycloakServiceImplTest.java
index 3c64dc6..5050391 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/KeycloakServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/KeycloakServiceImplTest.java
@@ -1,3 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.backendapi.conf.SelfServiceApplicationConfiguration;
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/UserRoleServiceImplTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/UserRoleServiceImplTest.java
index 83fab66..883630c 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/UserRoleServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/UserRoleServiceImplTest.java
@@ -19,6 +19,7 @@
 package com.epam.dlab.backendapi.service;
 
 import com.epam.dlab.backendapi.dao.UserRoleDao;
+import com.epam.dlab.backendapi.resources.TestBase;
 import com.epam.dlab.backendapi.resources.dto.UserRoleDto;
 import com.epam.dlab.exceptions.ResourceNotFoundException;
 import org.junit.Rule;
@@ -29,37 +30,24 @@
 import org.mockito.Mock;
 import org.mockito.runners.MockitoJUnitRunner;
 
-import java.util.Collections;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.any;
+import static org.mockito.Mockito.refEq;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
 
 @RunWith(MockitoJUnitRunner.class)
-public class UserRoleServiceImplTest {
+public class UserRoleServiceImplTest extends TestBase {
 
-	private static final String ROLE_ID = "roleId";
-	@Mock
-	private UserRoleDao dao;
-	@InjectMocks
-	private UserRoleServiceImpl userRoleService;
-	@Rule
-	public ExpectedException expectedException = ExpectedException.none();
+    private static final String ROLE_ID = "roleId";
+    @Mock
+    private UserRoleDao dao;
+    @InjectMocks
+    private UserRoleServiceImpl userRoleService;
+    @Rule
+    public ExpectedException expectedException = ExpectedException.none();
 
-	@Test
-	public void getUserRoles() {
-		when(dao.findAll()).thenReturn(Collections.singletonList(getUserRole()));
-		final List<UserRoleDto> roles = userRoleService.getUserRoles();
-
-		assertEquals(1, roles.size());
-		assertEquals(ROLE_ID, roles.get(0).getId());
-
-		verify(dao).findAll();
-		verifyNoMoreInteractions(dao);
-	}
-
-
-	@Test
+    @Test
 	public void createRole() {
 
 		userRoleService.createRole(getUserRole());
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/aws/AwsBillingServiceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/aws/AwsBillingServiceTest.java
deleted file mode 100644
index 0c9cf26..0000000
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/aws/AwsBillingServiceTest.java
+++ /dev/null
@@ -1,224 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.service.aws;
-
-import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.dao.aws.AwsBillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.exceptions.DlabException;
-import org.bson.Document;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
-
-import java.text.ParseException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyNoMoreInteractions;
-import static org.mockito.Mockito.when;
-
-@RunWith(MockitoJUnitRunner.class)
-public class AwsBillingServiceTest {
-
-	private UserInfo userInfo;
-	private BillingFilter billingFilter;
-	private Document basicDocument;
-
-	@Mock
-	private AwsBillingDAO billingDAO;
-
-	@InjectMocks
-	private AwsBillingService awsBillingService;
-
-	@Rule
-	public ExpectedException expectedException = ExpectedException.none();
-
-	@Before
-	public void setUp() {
-		userInfo = getUserInfo();
-		billingFilter = new BillingFilter();
-		basicDocument = getBasicDocument();
-	}
-
-	@Test
-	public void getReportWithTheSameInstanceOfDocument() {
-		Document expectedDocument = new Document();
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(expectedDocument);
-
-		Document actualDocument = awsBillingService.getReport(userInfo, billingFilter);
-		assertEquals(expectedDocument, actualDocument);
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void getReportWithAnotherInstanceOfDocument() {
-		Document expectedDocument = new Document().append("someField", "someValue");
-		Document anotherDocument = new Document().append("someField", "anotherValue");
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(anotherDocument);
-
-		Document actualDocument = awsBillingService.getReport(userInfo, billingFilter);
-		assertNotEquals(expectedDocument, actualDocument);
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void getReportWithException() {
-		doThrow(new RuntimeException()).when(billingDAO).getReport(any(UserInfo.class), any(BillingFilter.class));
-
-		try {
-			awsBillingService.getReport(userInfo, billingFilter);
-		} catch (DlabException e) {
-			assertEquals("Cannot load billing report: null", e.getMessage());
-		}
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void downloadReport() {
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(basicDocument);
-
-		byte[] result = awsBillingService.downloadReport(userInfo, billingFilter);
-		assertNotNull(result);
-		assertTrue(result.length > 0);
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void downloadReportWithInapproprietaryDateFormatInDocument() {
-		basicDocument.put("from", "someDateStart");
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(basicDocument);
-
-		try {
-			awsBillingService.downloadReport(userInfo, billingFilter);
-		} catch (DlabException e) {
-			assertEquals("Cannot prepare CSV file", e.getMessage());
-		}
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void downloadReportWhenDocumentHasNotAllRequiredFields() {
-		basicDocument.remove("lines");
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(basicDocument);
-
-		expectedException.expect(NullPointerException.class);
-
-		awsBillingService.downloadReport(userInfo, billingFilter);
-	}
-
-	@Test
-	public void getReportFileName() {
-		String result = awsBillingService.getReportFileName(userInfo, billingFilter);
-		assertEquals("aws-billing-report.csv", result);
-	}
-
-	@Test
-	public void getFirstLine() throws ParseException {
-		String result = awsBillingService.getFirstLine(basicDocument);
-		assertEquals("Service base name: someSBN  Resource tag ID: someTagResourceId  Available reporting " +
-				"period from: Mar 21, 2018 to: Mar 22, 2018", result);
-	}
-
-	@Test
-	public void getFirstLineWithException() throws ParseException {
-		basicDocument.put("from", "someStartDate");
-
-		expectedException.expect(ParseException.class);
-		expectedException.expectMessage("Unparseable date: \"someStartDate\"");
-
-		awsBillingService.getFirstLine(basicDocument);
-
-	}
-
-	@Test
-	public void getHeadersList() {
-		List<String> expectedResult1 =
-				Arrays.asList("USER", "PROJECT", "ENVIRONMENT NAME", "RESOURCE TYPE", "SHAPE", "SERVICE", "SERVICE CHARGES");
-		List<String> expectedResult2 = expectedResult1.subList(1, expectedResult1.size());
-
-		List<String> actualResult1 = awsBillingService.getHeadersList(true);
-		assertEquals(expectedResult1, actualResult1);
-
-		List<String> actualResult2 = awsBillingService.getHeadersList(false);
-		assertEquals(expectedResult2, actualResult2);
-	}
-
-	@Test
-	public void getLine() {
-		String expectedResult1 = "someUser,someProject,someId,someResType,someShape,someProduct,someCost someCode\n";
-		String actualResult1 = awsBillingService.getLine(true, basicDocument);
-		assertEquals(expectedResult1, actualResult1);
-
-		basicDocument.remove("user");
-		String expectedResult2 = "someProject,someId,someResType,someShape,someProduct,someCost someCode\n";
-		String actualResult2 = awsBillingService.getLine(false, basicDocument);
-		assertEquals(expectedResult2, actualResult2);
-	}
-
-	@Test
-	public void getTotal() {
-		String expectedResult1 = ",,,,,,Total: someCostTotal someCode\n";
-		String actualResult1 = awsBillingService.getTotal(true, basicDocument);
-		assertEquals(expectedResult1, actualResult1);
-
-		String expectedResult2 = ",,,,,Total: someCostTotal someCode\n";
-		String actualResult2 = awsBillingService.getTotal(false, basicDocument);
-		assertEquals(expectedResult2, actualResult2);
-	}
-
-	private UserInfo getUserInfo() {
-		return new UserInfo("user", "token");
-	}
-
-	private Document getBasicDocument() {
-		return new Document().append("service_base_name", "someSBN").append("user", "someUser")
-				.append("project", "someProject").append("dlab_id", "someId")
-				.append("dlab_resource_type", "someResType").append("tag_resource_id", "someTagResourceId")
-				.append("from", "2018-03-21").append("to", "2018-03-22").append("full_report", false)
-				.append("shape", "someShape").append("product", "someProduct").append("cost", "someCost")
-				.append("cost_total", "someCostTotal").append("currency_code", "someCode")
-				.append("lines", Collections.singletonList(new Document()));
-	}
-
-}
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/azure/AzureBillingServiceTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/azure/AzureBillingServiceTest.java
deleted file mode 100644
index ebd4b83..0000000
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/azure/AzureBillingServiceTest.java
+++ /dev/null
@@ -1,208 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package com.epam.dlab.backendapi.service.azure;
-
-import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.dao.BillingDAO;
-import com.epam.dlab.backendapi.resources.dto.BillingFilter;
-import com.epam.dlab.exceptions.DlabException;
-import org.bson.Document;
-import org.junit.Before;
-import org.junit.Rule;
-import org.junit.Test;
-import org.junit.rules.ExpectedException;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
-
-import java.text.ParseException;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.doThrow;
-import static org.mockito.Mockito.verify;
-import static org.mockito.Mockito.verifyNoMoreInteractions;
-import static org.mockito.Mockito.when;
-
-@RunWith(MockitoJUnitRunner.class)
-public class AzureBillingServiceTest {
-
-	private UserInfo userInfo;
-	private BillingFilter billingFilter;
-	private Document basicDocument;
-
-	@Mock
-	private BillingDAO billingDAO;
-
-	@InjectMocks
-	private AzureBillingService azureBillingService;
-
-	@Rule
-	public ExpectedException expectedException = ExpectedException.none();
-
-	@Before
-	public void setUp() {
-		userInfo = getUserInfo();
-		billingFilter = new BillingFilter();
-		basicDocument = getBasicDocument();
-	}
-
-	@Test
-	public void getReportWithTheSameInstanceOfDocument() {
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(new Document());
-
-		Document actualDocument = azureBillingService.getReport(userInfo, billingFilter);
-		assertEquals(new Document(), actualDocument);
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void getReportWithException() {
-		doThrow(new RuntimeException()).when(billingDAO).getReport(any(UserInfo.class), any(BillingFilter.class));
-
-		try {
-			azureBillingService.getReport(userInfo, billingFilter);
-		} catch (DlabException e) {
-			assertEquals("Cannot load billing report: null", e.getMessage());
-		}
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void downloadReport() {
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(basicDocument);
-
-		byte[] result = azureBillingService.downloadReport(userInfo, billingFilter);
-		assertNotNull(result);
-		assertTrue(result.length > 0);
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void downloadReportWithInapproprietaryDateFormatInDocument() {
-		basicDocument.put("from", "someDateStart");
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(basicDocument);
-
-		try {
-			azureBillingService.downloadReport(userInfo, billingFilter);
-		} catch (DlabException e) {
-			assertEquals("Cannot prepare CSV file", e.getMessage());
-		}
-
-		verify(billingDAO).getReport(userInfo, billingFilter);
-		verifyNoMoreInteractions(billingDAO);
-	}
-
-	@Test
-	public void downloadReportWhenDocumentHasNotAllRequiredFields() {
-		basicDocument.remove("lines");
-		when(billingDAO.getReport(any(UserInfo.class), any(BillingFilter.class))).thenReturn(basicDocument);
-
-		expectedException.expect(NullPointerException.class);
-
-		azureBillingService.downloadReport(userInfo, billingFilter);
-	}
-
-	@Test
-	public void getReportFileName() {
-		String result = azureBillingService.getReportFileName(userInfo, billingFilter);
-		assertEquals("azure-billing-report.csv", result);
-	}
-
-	@Test
-	public void getFirstLine() throws ParseException {
-		String result = azureBillingService.getFirstLine(basicDocument);
-		assertEquals("Service base name: someSBN  Available reporting period from: Mar 21, 2018 " +
-				"to: Mar 22, 2018", result);
-	}
-
-	@Test
-	public void getFirstLineWithException() throws ParseException {
-		basicDocument.put("from", "someStartDate");
-
-		expectedException.expect(ParseException.class);
-
-		expectedException.expectMessage("Unparseable date: \"someStartDate\"");
-		azureBillingService.getFirstLine(basicDocument);
-	}
-
-	@Test
-	public void getHeadersList() {
-		List<String> expectedResult1 =
-				Arrays.asList("USER", "PROJECT" ,"ENVIRONMENT NAME", "RESOURCE TYPE", "INSTANCE SIZE", "CATEGORY", "SERVICE " +
-						"CHARGES");
-		List<String> expectedResult2 = expectedResult1.subList(1, expectedResult1.size());
-
-		List<String> actualResult1 = azureBillingService.getHeadersList(true);
-		assertEquals(expectedResult1, actualResult1);
-
-		List<String> actualResult2 = azureBillingService.getHeadersList(false);
-		assertEquals(expectedResult2, actualResult2);
-	}
-
-	@Test
-	public void getLine() {
-		String expectedResult1 = "someUser,someProject,someId,someResType,someSize,someMeterCategory,someCost someCode\n";
-		String actualResult1 = azureBillingService.getLine(true, basicDocument);
-		assertEquals(expectedResult1, actualResult1);
-
-		basicDocument.remove("user");
-		String expectedResult2 = "someProject,someId,someResType,someSize,someMeterCategory,someCost someCode\n";
-		String actualResult2 = azureBillingService.getLine(false, basicDocument);
-		assertEquals(expectedResult2, actualResult2);
-	}
-
-	@Test
-	public void getTotal() {
-		String expectedResult1 = ",,,,,,Total: someCost someCode\n";
-		String actualResult1 = azureBillingService.getTotal(true, basicDocument);
-		assertEquals(expectedResult1, actualResult1);
-
-		String expectedResult2 = ",,,,,Total: someCost someCode\n";
-		String actualResult2 = azureBillingService.getTotal(false, basicDocument);
-		assertEquals(expectedResult2, actualResult2);
-	}
-
-	private UserInfo getUserInfo() {
-		return new UserInfo("user", "token");
-	}
-
-	private Document getBasicDocument() {
-		return new Document().append("service_base_name", "someSBN").append("user", "someUser")
-				.append("project", "someProject").append("dlabId", "someId").append("resourceType", "someResType")
-				.append("from", "2018-03-21").append("size", "someSize").append("to", "2018-03-22")
-				.append("full_report", false).append("meterCategory", "someMeterCategory")
-				.append("costString", "someCost").append("currencyCode", "someCode")
-				.append("lines", Collections.singletonList(new Document()));
-	}
-
-}
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImplTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImplTest.java
index dca6e0f..460c2f9 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/EnvironmentServiceImplTest.java
@@ -34,7 +34,6 @@
 import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.dto.base.edge.EdgeInfo;
 import com.epam.dlab.exceptions.DlabException;
-import com.epam.dlab.exceptions.ResourceConflictException;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -47,14 +46,11 @@
 import java.util.Collections;
 import java.util.List;
 import java.util.Optional;
-import java.util.Set;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
 import static org.mockito.Mockito.any;
 import static org.mockito.Mockito.anySet;
 import static org.mockito.Mockito.anyString;
-import static org.mockito.Mockito.anyVararg;
 import static org.mockito.Mockito.doNothing;
 import static org.mockito.Mockito.doReturn;
 import static org.mockito.Mockito.doThrow;
@@ -126,73 +122,6 @@
 		environmentService.getUsers();
 	}
 
-	@Test
-	public void getAllUsers() {
-		doReturn(Collections.singleton(USER)).when(envDAO).fetchAllUsers();
-		final Set<String> users = environmentService.getUserNames();
-
-		assertEquals(1, users.size());
-		assertTrue(users.contains(USER));
-
-		verify(envDAO).fetchAllUsers();
-		verifyNoMoreInteractions(envDAO);
-	}
-
-	@Test
-	public void getAllUsersWithException() {
-		doThrow(new DlabException("Users not found")).when(envDAO).fetchAllUsers();
-
-		expectedException.expect(DlabException.class);
-		expectedException.expectMessage("Users not found");
-
-		environmentService.getUserNames();
-	}
-
-
-	@Test
-	public void stopEnvironment() {
-		final UserInfo userInfo = getUserInfo();
-		when(exploratoryDAO.fetchRunningExploratoryFields(anyString())).thenReturn(getUserInstances());
-		when(exploratoryService.stop(any(UserInfo.class), anyString(), anyString())).thenReturn(UUID);
-
-		environmentService.stopEnvironment(userInfo, USER, PROJECT_NAME);
-
-		verify(exploratoryDAO).fetchRunningExploratoryFields(USER);
-		verify(exploratoryService).stop(refEq(userInfo), eq(PROJECT_NAME), eq(EXPLORATORY_NAME_1));
-		verify(exploratoryService).stop(refEq(userInfo), eq(PROJECT_NAME), eq(EXPLORATORY_NAME_2));
-		verify(exploratoryDAO).fetchUserExploratoriesWhereStatusIn(USER, Arrays.asList(UserInstanceStatus.CREATING,
-				UserInstanceStatus.STARTING, UserInstanceStatus.CREATING_IMAGE),
-				UserInstanceStatus.CREATING,
-				UserInstanceStatus.STARTING, UserInstanceStatus.CREATING_IMAGE);
-		verifyNoMoreInteractions(exploratoryDAO, exploratoryService);
-	}
-
-	@Test
-	@SuppressWarnings("unchecked")
-	public void stopEnvironmentWithWrongResourceState() {
-		when(exploratoryDAO.fetchUserExploratoriesWhereStatusIn(anyString(), any(List.class), anyVararg()))
-				.thenReturn(getUserInstances());
-		expectedException.expect(ResourceConflictException.class);
-
-		environmentService.stopEnvironment(getUserInfo(), USER, PROJECT_NAME);
-	}
-
-	@Test
-	public void stopEnvironmentWithoutEdge() {
-		final UserInfo userInfo = getUserInfo();
-		when(exploratoryDAO.fetchRunningExploratoryFields(anyString())).thenReturn(getUserInstances());
-		when(exploratoryService.stop(any(UserInfo.class), anyString(), anyString())).thenReturn(UUID);
-
-		environmentService.stopEnvironment(userInfo, USER, PROJECT_NAME);
-
-		verify(exploratoryDAO).fetchRunningExploratoryFields(USER);
-		verify(exploratoryService).stop(refEq(userInfo), eq(PROJECT_NAME), eq(EXPLORATORY_NAME_1));
-		verify(exploratoryService).stop(refEq(userInfo), eq(PROJECT_NAME), eq(EXPLORATORY_NAME_2));
-		verify(exploratoryDAO).fetchUserExploratoriesWhereStatusIn(USER, Arrays.asList(UserInstanceStatus.CREATING,
-				UserInstanceStatus.STARTING, UserInstanceStatus.CREATING_IMAGE),
-				UserInstanceStatus.CREATING, UserInstanceStatus.STARTING, UserInstanceStatus.CREATING_IMAGE);
-		verifyNoMoreInteractions(envDAO, exploratoryDAO, exploratoryService);
-	}
 
 	@Test
 	public void stopProjectEnvironment() {
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImplTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImplTest.java
index 57d0284..e15044b 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/ImageExploratoryServiceImplTest.java
@@ -24,8 +24,10 @@
 import com.epam.dlab.backendapi.dao.ExploratoryLibDAO;
 import com.epam.dlab.backendapi.dao.ImageExploratoryDao;
 import com.epam.dlab.backendapi.domain.EndpointDTO;
+import com.epam.dlab.backendapi.domain.ProjectDTO;
 import com.epam.dlab.backendapi.resources.dto.ImageInfoRecord;
 import com.epam.dlab.backendapi.service.EndpointService;
+import com.epam.dlab.backendapi.service.ProjectService;
 import com.epam.dlab.backendapi.util.RequestBuilder;
 import com.epam.dlab.cloud.CloudProvider;
 import com.epam.dlab.dto.UserInstanceDTO;
@@ -91,6 +93,8 @@
 	private RequestBuilder requestBuilder;
 	@Mock
 	private EndpointService endpointService;
+	@Mock
+	private ProjectService projectService;
 
 	@InjectMocks
 	private ImageExploratoryServiceImpl imageExploratoryService;
@@ -107,6 +111,7 @@
 
 	@Test
 	public void createImage() {
+		when(projectService.get(anyString())).thenReturn(getProjectDTO());
 		when(exploratoryDAO.fetchRunningExploratoryFields(anyString(), anyString(), anyString())).thenReturn(userInstance);
 		when(imageExploratoryDao.exist(anyString(), anyString())).thenReturn(false);
 
@@ -117,7 +122,7 @@
 		ExploratoryImageDTO eiDto = new ExploratoryImageDTO();
 		when(endpointService.get(anyString())).thenReturn(endpointDTO());
 		when(requestBuilder.newExploratoryImageCreate(any(UserInfo.class), any(UserInstanceDTO.class), anyString(),
-				any(EndpointDTO.class))).thenReturn(eiDto);
+				any(EndpointDTO.class), any(ProjectDTO.class))).thenReturn(eiDto);
 
 		String expectedUuid = "someUuid";
 		when(provisioningService.post(anyString(), anyString(), any(ExploratoryImageDTO.class), any()))
@@ -129,15 +134,16 @@
 		assertNotNull(actualUuid);
 		assertEquals(expectedUuid, actualUuid);
 
+		verify(projectService).get(PROJECT);
 		verify(exploratoryDAO).fetchRunningExploratoryFields(USER, PROJECT, EXPLORATORY_NAME);
 		verify(exploratoryDAO).updateExploratoryStatus(any(ExploratoryStatusDTO.class));
 		verify(imageExploratoryDao).exist(imageName, PROJECT);
 		verify(imageExploratoryDao).save(any(Image.class));
 		verify(libDAO).getLibraries(USER, PROJECT, EXPLORATORY_NAME);
-		verify(requestBuilder).newExploratoryImageCreate(userInfo, userInstance, imageName, endpointDTO());
+		verify(requestBuilder).newExploratoryImageCreate(userInfo, userInstance, imageName, endpointDTO(), getProjectDTO());
 		verify(endpointService).get(anyString());
 		verify(provisioningService).post(endpointDTO().getUrl() + "exploratory/image", TOKEN, eiDto, String.class);
-		verifyNoMoreInteractions(exploratoryDAO, imageExploratoryDao, libDAO, requestBuilder, endpointService, provisioningService);
+		verifyNoMoreInteractions(projectService, exploratoryDAO, imageExploratoryDao, libDAO, requestBuilder, endpointService, provisioningService);
 	}
 
 	@Test
@@ -170,6 +176,7 @@
 
 	@Test
 	public void createImageWhenMethodNewExploratoryImageCreateThrowsException() {
+		when(projectService.get(anyString())).thenReturn(getProjectDTO());
 		when(exploratoryDAO.fetchRunningExploratoryFields(anyString(), anyString(), anyString())).thenReturn(userInstance);
 		when(imageExploratoryDao.exist(anyString(), anyString())).thenReturn(false);
 
@@ -178,7 +185,7 @@
 		when(exploratoryDAO.updateExploratoryStatus(any(ExploratoryStatusDTO.class)))
 				.thenReturn(mock(UpdateResult.class));
 		doThrow(new DlabException("Cannot create instance of resource class")).when(requestBuilder)
-				.newExploratoryImageCreate(any(UserInfo.class), any(UserInstanceDTO.class), anyString(), any(EndpointDTO.class));
+				.newExploratoryImageCreate(any(UserInfo.class), any(UserInstanceDTO.class), anyString(), any(EndpointDTO.class), any(ProjectDTO.class));
 		when(endpointService.get(anyString())).thenReturn(endpointDTO());
 
 		String imageName = "someImageName", imageDescription = "someDescription";
@@ -188,14 +195,15 @@
 			assertEquals("Cannot create instance of resource class", e.getMessage());
 		}
 
+		verify(projectService).get(PROJECT);
 		verify(exploratoryDAO).fetchRunningExploratoryFields(USER, PROJECT, EXPLORATORY_NAME);
 		verify(exploratoryDAO).updateExploratoryStatus(any(ExploratoryStatusDTO.class));
 		verify(imageExploratoryDao).exist(imageName, PROJECT);
 		verify(imageExploratoryDao).save(any(Image.class));
 		verify(libDAO).getLibraries(USER, PROJECT, EXPLORATORY_NAME);
-		verify(requestBuilder).newExploratoryImageCreate(userInfo, userInstance, imageName, endpointDTO());
+		verify(requestBuilder).newExploratoryImageCreate(userInfo, userInstance, imageName, endpointDTO(), getProjectDTO());
 		verify(endpointService).get(anyString());
-		verifyNoMoreInteractions(exploratoryDAO, imageExploratoryDao, libDAO, requestBuilder, endpointService);
+		verifyNoMoreInteractions(projectService, exploratoryDAO, imageExploratoryDao, libDAO, requestBuilder, endpointService);
 	}
 
 	@Test
@@ -300,7 +308,7 @@
 	}
 
 	private ImageInfoRecord getImageInfoRecord() {
-		return new ImageInfoRecord("someName", "someDescription", "someProject", "someEndpoint", "someApp",
+		return new ImageInfoRecord("someName", "someDescription", "someProject", "someEndpoint", "someUser", "someApp",
 				"someFullName", ImageStatus.CREATED);
 	}
 
@@ -337,4 +345,8 @@
 	private EndpointDTO endpointDTO() {
 		return new EndpointDTO("test", "url", "", null, EndpointDTO.EndpointStatus.ACTIVE, CloudProvider.AWS);
 	}
+
+	private ProjectDTO getProjectDTO() {
+		return ProjectDTO.builder().name(PROJECT).build();
+	}
 }
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/InfrastructureTemplateServiceBaseTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/InfrastructureTemplateServiceBaseTest.java
index e53b78c..56c838a 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/InfrastructureTemplateServiceBaseTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/InfrastructureTemplateServiceBaseTest.java
@@ -89,7 +89,7 @@
 		emDto2.setExploratoryEnvironmentShapes(shapes2);
 		List<ExploratoryMetadataDTO> expectedEmdDtoList = Arrays.asList(emDto1, emDto2);
 		when(userGroupDao.getUserGroups(anyString())).thenReturn(Collections.emptySet());
-		when(provisioningService.get(anyString(), anyString(), any())).thenReturn(expectedEmdDtoList.toArray());
+		when(provisioningService.get(anyString(), anyString(), any(Class.class))).thenReturn(expectedEmdDtoList.toArray());
 		when(settingsDAO.getConfOsFamily()).thenReturn("someConfOsFamily");
 
 		UserInfo userInfo = new UserInfo("test", "token");
@@ -108,7 +108,7 @@
 	public void getExploratoryTemplatesWithException() {
 		when(endpointService.get(anyString())).thenReturn(endpointDTO());
 		doThrow(new DlabException("Could not load list of exploratory templates for user"))
-				.when(provisioningService).get(anyString(), anyString(), any());
+				.when(provisioningService).get(anyString(), anyString(), any(Class.class));
 
 		UserInfo userInfo = new UserInfo("test", "token");
 		try {
@@ -131,7 +131,7 @@
 		);
 		when(projectDAO.get(anyString())).thenReturn(Optional.of(new ProjectDTO("project", Collections.emptySet(),
 				null, null, null, null, true)));
-		when(provisioningService.get(anyString(), anyString(), any())).thenReturn(expectedCmdDtoList.toArray(new ComputationalMetadataDTO[]{}));
+		when(provisioningService.get(anyString(), anyString(), any(Class.class))).thenReturn(expectedCmdDtoList.toArray(new ComputationalMetadataDTO[]{}));
 
 		List<FullComputationalTemplate> expectedFullCmdDtoList = expectedCmdDtoList.stream()
 				.map(e -> infrastructureTemplateServiceBaseChild.getCloudFullComputationalTemplate(e))
@@ -154,7 +154,7 @@
 	public void getComputationalTemplatesWhenMethodThrowsException() {
 		when(endpointService.get(anyString())).thenReturn(endpointDTO());
 		doThrow(new DlabException("Could not load list of computational templates for user"))
-				.when(provisioningService).get(anyString(), anyString(), any());
+				.when(provisioningService).get(anyString(), anyString(), any(Class.class));
 
 		UserInfo userInfo = new UserInfo("test", "token");
 		try {
@@ -173,7 +173,7 @@
 		final ComputationalMetadataDTO computationalMetadataDTO = new ComputationalMetadataDTO("dataengine-service");
 		computationalMetadataDTO.setComputationResourceShapes(Collections.emptyMap());
 		List<ComputationalMetadataDTO> expectedCmdDtoList = Collections.singletonList(computationalMetadataDTO);
-		when(provisioningService.get(anyString(), anyString(), any())).thenReturn(expectedCmdDtoList.toArray(new ComputationalMetadataDTO[]{}));
+		when(provisioningService.get(anyString(), anyString(), any(Class.class))).thenReturn(expectedCmdDtoList.toArray(new ComputationalMetadataDTO[]{}));
 		when(projectDAO.get(anyString())).thenReturn(Optional.of(new ProjectDTO("project", Collections.emptySet(),
 				null, null, null, null, true)));
 		when(configuration.getMinEmrInstanceCount()).thenReturn(1);
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImplTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImplTest.java
index 305e852..4fec7c6 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/UserGroupServiceImplTest.java
@@ -23,10 +23,13 @@
 import com.epam.dlab.backendapi.dao.UserGroupDao;
 import com.epam.dlab.backendapi.dao.UserRoleDao;
 import com.epam.dlab.backendapi.domain.ProjectDTO;
+import com.epam.dlab.backendapi.resources.TestBase;
 import com.epam.dlab.backendapi.resources.dto.UserGroupDto;
 import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.exceptions.DlabException;
 import com.epam.dlab.exceptions.ResourceNotFoundException;
+import io.dropwizard.auth.AuthenticationException;
+import org.junit.Before;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.ExpectedException;
@@ -36,39 +39,50 @@
 import org.mockito.runners.MockitoJUnitRunner;
 
 import java.util.Collections;
+import java.util.HashSet;
 import java.util.List;
 
 import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-import static org.mockito.Mockito.*;
+import static org.mockito.Mockito.anySet;
+import static org.mockito.Mockito.anyString;
+import static org.mockito.Mockito.doNothing;
+import static org.mockito.Mockito.never;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
+import static org.mockito.Mockito.when;
 
 @RunWith(MockitoJUnitRunner.class)
-public class UserGroupServiceImplTest {
+public class UserGroupServiceImplTest extends TestBase {
 
-	private static final String ROLE_ID = "Role id";
-	private static final String USER = "test";
-	private static final String GROUP = "admin";
-	@Mock
-	private UserRoleDao userRoleDao;
-	@Mock
-	private UserGroupDao userGroupDao;
-	@Mock
-	private ProjectDAO projectDAO;
-	@InjectMocks
-	private UserGroupServiceImpl userGroupService;
+    private static final String ROLE_ID = "Role id";
+    private static final String USER = "test";
+    private static final String GROUP = "admin";
+    @Mock
+    private UserRoleDao userRoleDao;
+    @Mock
+    private UserGroupDao userGroupDao;
+    @Mock
+    private ProjectDAO projectDAO;
+    @InjectMocks
+    private UserGroupServiceImpl userGroupService;
 
-	@Rule
-	public ExpectedException expectedException = ExpectedException.none();
+    @Rule
+    public ExpectedException expectedException = ExpectedException.none();
 
-	@Test
-	public void createGroup() {
-		when(userRoleDao.addGroupToRole(anySet(), anySet())).thenReturn(true);
+    @Before
+    public void setup() throws AuthenticationException {
+        authSetup();
+    }
 
-		userGroupService.createGroup(GROUP, Collections.singleton(ROLE_ID), Collections.singleton(USER));
+    @Test
+    public void createGroup() {
+        when(userRoleDao.addGroupToRole(anySet(), anySet())).thenReturn(true);
 
-		verify(userRoleDao).addGroupToRole(Collections.singleton(GROUP), Collections.singleton(ROLE_ID));
-		verify(userGroupDao).addUsers(GROUP, Collections.singleton(USER));
-	}
+        userGroupService.createGroup(GROUP, Collections.singleton(ROLE_ID), Collections.singleton(USER));
+
+        verify(userRoleDao).addGroupToRole(Collections.singleton(GROUP), Collections.singleton(ROLE_ID));
+        verify(userGroupDao).addUsers(GROUP, Collections.singleton(USER));
+    }
 
 	@Test
 	public void createGroupWithNoUsers() {
@@ -77,7 +91,7 @@
 		userGroupService.createGroup(GROUP, Collections.singleton(ROLE_ID), Collections.emptySet());
 
 		verify(userRoleDao).addGroupToRole(Collections.singleton(GROUP), Collections.singleton(ROLE_ID));
-		verify(userGroupDao, never()).addUsers(anyString(), anySet());
+		verify(userGroupDao).addUsers(anyString(), anySet());
 	}
 
 	@Test
@@ -89,70 +103,6 @@
 	}
 
 	@Test
-	public void getAggregatedRoles() {
-		when(userRoleDao.aggregateRolesByGroup()).thenReturn(Collections.singletonList(getUserGroup()));
-
-		final List<UserGroupDto> aggregatedRolesByGroup = userGroupService.getAggregatedRolesByGroup();
-
-		assertEquals(1, aggregatedRolesByGroup.size());
-		assertEquals(GROUP, aggregatedRolesByGroup.get(0).getGroup());
-		assertTrue(aggregatedRolesByGroup.get(0).getRoles().isEmpty());
-
-		verify(userRoleDao).aggregateRolesByGroup();
-		verifyNoMoreInteractions(userRoleDao);
-	}
-
-	@Test
-	public void addUserToGroup() {
-		userGroupService.addUsersToGroup(GROUP, Collections.singleton(USER));
-
-		verify(userGroupDao).addUsers(eq(GROUP), refEq(Collections.singleton(USER)));
-		verifyNoMoreInteractions(userRoleDao, userGroupDao);
-	}
-
-	@Test
-	public void addRolesToGroup() {
-		when(userRoleDao.addGroupToRole(anySetOf(String.class), anySetOf(String.class))).thenReturn(true);
-
-		userGroupService.updateRolesForGroup(GROUP, Collections.singleton(ROLE_ID));
-
-		verify(userRoleDao).addGroupToRole(refEq(Collections.singleton(GROUP)), refEq(Collections.singleton(ROLE_ID)));
-		verify(userRoleDao).removeGroupWhenRoleNotIn(GROUP, Collections.singleton(ROLE_ID));
-		verifyNoMoreInteractions(userRoleDao);
-	}
-
-	@Test
-	public void removeUserFromGroup() {
-
-		userGroupService.removeUserFromGroup(GROUP, USER);
-
-		verify(userGroupDao).removeUser(GROUP, USER);
-		verifyNoMoreInteractions(userGroupDao);
-	}
-
-	@Test
-	public void removeGroupFromRole() {
-
-		when(userRoleDao.removeGroupFromRole(anySetOf(String.class), anySetOf(String.class))).thenReturn(true);
-
-		userGroupService.removeGroupFromRole(Collections.singleton(GROUP), Collections.singleton(ROLE_ID));
-
-		verify(userRoleDao).removeGroupFromRole(refEq(Collections.singleton(GROUP)),
-				refEq(Collections.singleton(ROLE_ID)));
-		verifyNoMoreInteractions(userRoleDao);
-	}
-
-	@Test
-	public void removeGroupFromRoleWithException() {
-		when(userRoleDao.removeGroupFromRole(anySetOf(String.class), anySetOf(String.class))).thenReturn(false);
-
-		expectedException.expectMessage("Any of role : [" + ROLE_ID + "] were not found");
-		expectedException.expect(ResourceNotFoundException.class);
-
-		userGroupService.removeGroupFromRole(Collections.singleton(GROUP), Collections.singleton(ROLE_ID));
-	}
-
-	@Test
 	public void removeGroup() {
 
 		when(userRoleDao.removeGroup(anyString())).thenReturn(true);
@@ -220,17 +170,13 @@
 		userGroupService.removeGroup(GROUP);
 	}
 
-	@Test
-	public void updateGroup() {
-		userGroupService.updateGroup(GROUP, Collections.singleton(ROLE_ID), Collections.singleton(USER));
+    private UserGroupDto getUserGroup() {
+        return new UserGroupDto(GROUP, Collections.emptyList(), Collections.emptySet());
+    }
 
-		verify(userGroupDao).updateUsers(GROUP, Collections.singleton(USER));
-		verify(userRoleDao).removeGroupWhenRoleNotIn(GROUP, Collections.singleton(ROLE_ID));
-		verify(userRoleDao).addGroupToRole(Collections.singleton(GROUP), Collections.singleton(ROLE_ID));
-		verifyNoMoreInteractions(userRoleDao, userGroupDao);
-	}
-
-	private UserGroupDto getUserGroup() {
-		return new UserGroupDto(GROUP, Collections.emptyList(), Collections.emptySet());
-	}
+    private List<ProjectDTO> getProjects() {
+        return Collections.singletonList(ProjectDTO.builder()
+                .groups(new HashSet<>(Collections.singletonList(GROUP)))
+                .build());
+    }
 }
\ No newline at end of file