Merge remote-tracking branch 'remotes/origin/develop' into feature-DLAB-2

# Conflicts:
#	services/self-service/self-service.yml
diff --git a/README.md b/README.md
index 08fb730..88d49de 100644
--- a/README.md
+++ b/README.md
@@ -1727,7 +1727,17 @@
 ...
 docker logs <container_id> – to get log for particular Docker container.
 ```
-To change Docker images on existing environment, execute following steps:
+
+To change Docker images on existing environment, you can run script on SSN node that rebuilds docker images with the command:
+```
+docker-build all  #to rebuild all images
+```
+or
+```
+docker-build <notebook_name> #to rebuild certain images
+```
+
+You can also rebuild images manually by executing the following steps:
 
 1.  SSH to SSN instance
 2.  go to */opt/dlab/sources/*
diff --git a/infrastructure-provisioning/src/base/entrypoint.py b/infrastructure-provisioning/src/base/entrypoint.py
index 7bb848d..4780acf 100644
--- a/infrastructure-provisioning/src/base/entrypoint.py
+++ b/infrastructure-provisioning/src/base/entrypoint.py
@@ -178,3 +178,7 @@
     elif args.action == 'reconfigure_spark':
         with hide('running'):
             local("/bin/reconfigure_spark.py")
+
+    elif args.action == 'check_inactivity':
+        with hide('running'):
+            local("/bin/check_inactivity.py")
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/deeplearning/fabfile.py b/infrastructure-provisioning/src/deeplearning/fabfile.py
index b1f8df8..cd9fbb5 100644
--- a/infrastructure-provisioning/src/deeplearning/fabfile.py
+++ b/infrastructure-provisioning/src/deeplearning/fabfile.py
@@ -223,3 +223,19 @@
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
         sys.exit(1)
+
+# Main function for checking inactivity status
+def check_inactivity():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    try:
+        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to check inactivity status.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
index 3903bbb..098cb33 100644
--- a/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
+++ b/infrastructure-provisioning/src/deeplearning/scripts/configure_deep_learning_node.py
@@ -41,6 +41,7 @@
 parser.add_argument('--spark_version', type=str, default='')
 parser.add_argument('--hadoop_version', type=str, default='')
 parser.add_argument('--r_mirror', type=str, default='')
+parser.add_argument('--ip_adress', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 args = parser.parse_args()
 
@@ -158,6 +159,10 @@
     if exists('/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
+    # INSTALL INACTIVITY CHECKER
+    print("Install inactivity checker")
+    install_inactivity_checker(args.os_user, args.ip_adress)
+
     # INSTALL OPTIONAL PACKAGES
     print("Installing additional Python packages")
     ensure_additional_python_libs(args.os_user)
diff --git a/infrastructure-provisioning/src/general/api/check_inactivity.py b/infrastructure-provisioning/src/general/api/check_inactivity.py
new file mode 100644
index 0000000..5bacfee
--- /dev/null
+++ b/infrastructure-provisioning/src/general/api/check_inactivity.py
@@ -0,0 +1,66 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import os
+import json
+import sys
+from fabric.api import local
+
+
+if __name__ == "__main__":
+    success = True
+    try:
+        local('cd /root; fab check_inactivity')
+    except:
+        success = False
+
+    reply = dict()
+    reply['request_id'] = os.environ['request_id']
+    if success:
+        reply['status'] = 'ok'
+    else:
+        reply['status'] = 'err'
+
+    reply['response'] = dict()
+
+    try:
+        with open("/root/result.json") as f:
+            reply['response']['result'] = json.loads(f.read())
+    except:
+        reply['response']['result'] = {"error": "Failed to open result.json"}
+
+    reply['response']['log'] = "/var/log/dlab/{0}/{0}_{1}_{2}.log".format(os.environ['conf_resource'],
+                                                                          os.environ['edge_user_name'],
+                                                                          os.environ['request_id'])
+
+    with open("/response/{}_{}_{}.json".format(os.environ['conf_resource'], os.environ['edge_user_name'],
+                                               os.environ['request_id']), 'w') as response_file:
+        response_file.write(json.dumps(reply))
+
+    try:
+        local('chmod 666 /response/*')
+    except:
+        success = False
+
+    if not success:
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile b/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile
index c78ad9f..6e0c7bc 100644
--- a/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/dataengine-service_Dockerfile
@@ -32,6 +32,9 @@
 COPY general/scripts/os/common_* /root/scripts/
 COPY general/lib/os/redhat/common_lib.py /usr/lib/python2.7/dlab/common_lib.py
 COPY general/lib/os/redhat/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile b/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile
index cbc5e74..a0be3e1 100644
--- a/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/dataengine_Dockerfile
@@ -25,6 +25,7 @@
 
 COPY dataengine/ /root/
 COPY general/scripts/os/dataengine_* /root/scripts/
+COPY general/scripts/os/update_inactivity_on_start.py /root/scripts/
 COPY general/scripts/os/reconfigure_spark.py /root/scripts/
 COPY general/scripts/os/install_additional_libs.py /root/scripts/install_additional_libs.py
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
diff --git a/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile b/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile
index 20bfe77..e460e39 100644
--- a/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/deeplearning_Dockerfile
@@ -37,6 +37,9 @@
 COPY general/templates/os/r_template.json /root/templates/
 COPY general/templates/os/run_template.sh /root/templates/
 COPY general/templates/os/tensorboard.service /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/files/os/toree-assembly-0.2.0.jar /root/files/
 COPY general/files/os/toree_kernel.tar.gz /root/files/
 COPY general/templates/os/pyspark_dataengine-service_template.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile b/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile
index a7f4093..1f401fc 100644
--- a/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/jupyter_Dockerfile
@@ -38,6 +38,9 @@
 COPY general/templates/os/r_template.json /root/templates/
 COPY general/templates/os/run_template.sh /root/templates/
 COPY general/templates/os/toree_dataengine-service_* /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/files/os/toree-assembly-0.2.0.jar /root/files/
 COPY general/files/os/toree_kernel.tar.gz /root/files/
 COPY general/templates/os/pyspark_dataengine_template.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile
index db55b45..aafe294 100644
--- a/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/rstudio_Dockerfile
@@ -30,6 +30,9 @@
 COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
+COPY general/templates/os/inactive_rs.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile
index 620d99c..7efbb3c 100644
--- a/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_Dockerfile
@@ -32,6 +32,9 @@
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/tensorboard.service /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile b/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile
index 321d57c..74b14f2 100644
--- a/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/tensor_Dockerfile
@@ -37,6 +37,9 @@
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/tensorboard.service /root/templates/
 COPY general/templates/os/pyspark_dataengine-service_template.json /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile b/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile
index 9d9b7e9..6c0705f 100644
--- a/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/aws/zeppelin_Dockerfile
@@ -35,6 +35,9 @@
 COPY general/templates/os/dataengine_interpreter_spark.json /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile b/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile
index fbc0bdd..8e394cc 100644
--- a/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/dataengine_Dockerfile
@@ -26,6 +26,7 @@
 COPY dataengine/ /root/
 COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
 COPY general/scripts/os/dataengine_* /root/scripts/
+COPY general/scripts/os/update_inactivity_on_start.py /root/scripts/
 COPY general/scripts/os/reconfigure_spark.py /root/scripts/
 COPY general/scripts/os/install_additional_libs.py /root/scripts/install_additional_libs.py
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
diff --git a/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile b/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile
index dc8c371..220086e 100644
--- a/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/deeplearning_Dockerfile
@@ -37,6 +37,9 @@
 COPY general/templates/os/r_template.json /root/templates/
 COPY general/templates/os/run_template.sh /root/templates/
 COPY general/templates/os/tensorboard.service /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/files/os/toree-assembly-0.2.0.jar /root/files/
 COPY general/files/os/toree_kernel.tar.gz /root/files/
 COPY general/templates/azure/core-site* /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile b/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile
index 550aefc..acbb7a8 100644
--- a/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/jupyter_Dockerfile
@@ -40,8 +40,12 @@
 COPY general/templates/os/pyspark_dataengine_template.json /root/templates/
 COPY general/templates/os/r_dataengine_template.json /root/templates/
 COPY general/templates/os/toree_dataengine_template.json /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/templates/azure/core-site* /root/templates/
 
+
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
 
diff --git a/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile
index eb2da2d..dc77e72 100644
--- a/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/rstudio_Dockerfile
@@ -30,6 +30,9 @@
 COPY general/lib/os/${OS}/notebook_lib.py /usr/lib/python2.7/dlab/notebook_lib.py
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
+COPY general/templates/os/inactive_rs.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/templates/azure/core-site* /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
diff --git a/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile b/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile
index daccbf6..5b2ac3c 100644
--- a/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/tensor_Dockerfile
@@ -36,6 +36,9 @@
 COPY general/templates/os/pyspark_dataengine_template.json /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/tensorboard.service /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/templates/azure/core-site* /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
diff --git a/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile b/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile
index 272f106..d86ebb7 100644
--- a/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/azure/zeppelin_Dockerfile
@@ -34,6 +34,9 @@
 COPY general/templates/os/dataengine_interpreter_spark.json /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/templates/azure/core-site* /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
diff --git a/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile
index a53ef18..00cad0d 100644
--- a/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/dataengine-service_Dockerfile
@@ -32,6 +32,9 @@
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
 COPY general/templates/gcp/dataengine-service_cluster.json /root/templates/dataengine-service_cluster.json
 COPY general/templates/gcp/dataengine-service_job.json /root/templates/dataengine-service_job.json
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile
index 9f71acc..0f4f14a 100644
--- a/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/dataengine_Dockerfile
@@ -25,6 +25,7 @@
 
 COPY dataengine/ /root/
 COPY general/scripts/os/dataengine_* /root/scripts/
+COPY general/scripts/os/update_inactivity_on_start.py /root/scripts/
 COPY general/scripts/os/reconfigure_spark.py /root/scripts/
 COPY general/scripts/os/install_additional_libs.py /root/scripts/install_additional_libs.py
 COPY general/scripts/os/get_list_available_pkgs.py /root/scripts/get_list_available_pkgs.py
diff --git a/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile
index 6979f41..0a3ad63 100644
--- a/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/deeplearning_Dockerfile
@@ -37,6 +37,9 @@
 COPY general/templates/os/r_template.json /root/templates/
 COPY general/templates/os/run_template.sh /root/templates/
 COPY general/templates/os/tensorboard.service /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/files/os/toree-assembly-0.2.0.jar /root/files/
 COPY general/files/os/toree_kernel.tar.gz /root/files/
 COPY general/templates/os/pyspark_dataengine-service_template.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile
index 89cf37c..8e5dabd 100644
--- a/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/jupyter_Dockerfile
@@ -38,6 +38,9 @@
 COPY general/templates/os/r_template.json /root/templates/
 COPY general/templates/os/run_template.sh /root/templates/
 COPY general/templates/os/toree_dataengine-service_* /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/files/os/toree-assembly-0.2.0.jar /root/files/
 COPY general/files/os/toree_kernel.tar.gz /root/files/
 COPY general/templates/os/pyspark_dataengine_template.json /root/templates/
diff --git a/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile
index 5c13de5..0a07470 100644
--- a/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/rstudio_Dockerfile
@@ -31,6 +31,9 @@
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
 COPY general/templates/gcp/core-site.xml /root/templates/
+COPY general/templates/os/inactive_rs.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
     chmod a+x /root/scripts/*
diff --git a/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile
index ed15d89..9f1e594 100644
--- a/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/tensor_Dockerfile
@@ -36,6 +36,9 @@
 COPY general/templates/os/pyspark_dataengine_template.json /root/templates/
 COPY general/templates/os/tensorboard.service /root/templates/
 COPY general/templates/os/pyspark_dataengine-service_template.json /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/templates/gcp/core-site.xml /root/templates/
 
 RUN chmod a+x /root/fabfile.py; \
diff --git a/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile b/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile
index 0f6bd7a..ab5b22e 100644
--- a/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile
+++ b/infrastructure-provisioning/src/general/files/gcp/zeppelin_Dockerfile
@@ -34,6 +34,9 @@
 COPY general/templates/os/dataengine_interpreter_spark.json /root/templates/
 COPY general/templates/os/${OS}/ungit.service /root/templates/
 COPY general/templates/os/notebook_spark-defaults_local.conf /root/templates/
+COPY general/templates/os/inactive.sh /root/templates/
+COPY general/templates/os/inactive.service /root/templates/
+COPY general/templates/os/inactive.timer /root/templates/
 COPY general/templates/gcp/dataengine-service_interpreter_* /root/templates/
 COPY general/templates/gcp/core-site.xml /root/templates/
 
diff --git a/infrastructure-provisioning/src/general/lib/os/fab.py b/infrastructure-provisioning/src/general/lib/os/fab.py
index fc287c9..ff38fec 100644
--- a/infrastructure-provisioning/src/general/lib/os/fab.py
+++ b/infrastructure-provisioning/src/general/lib/os/fab.py
@@ -449,6 +449,31 @@
     run('git config --global https.proxy $https_proxy')
 
 
+def install_inactivity_checker(os_user, ip_adress, rstudio=False):
+    if not exists('/home/{}/.ensure_dir/inactivity_ensured'.format(os_user)):
+        try:
+            if not exists('/opt/inactivity'):
+                sudo('mkdir /opt/inactivity')
+            put('/root/templates/inactive.service', '/etc/systemd/system/inactive.service', use_sudo=True)
+            put('/root/templates/inactive.timer', '/etc/systemd/system/inactive.timer', use_sudo=True)
+            if rstudio:
+                put('/root/templates/inactive_rs.sh', '/opt/inactivity/inactive.sh', use_sudo=True)
+            else:
+                put('/root/templates/inactive.sh', '/opt/inactivity/inactive.sh', use_sudo=True)
+            sudo("sed -i 's|IP_ADRESS|{}|g' /opt/inactivity/inactive.sh".format(ip_adress))
+            sudo("chmod 755 /opt/inactivity/inactive.sh")
+            sudo("chown root:root /etc/systemd/system/inactive.service")
+            sudo("chown root:root /etc/systemd/system/inactive.timer")
+            sudo("date +%s > /opt/inactivity/local_inactivity")
+            sudo('systemctl daemon-reload')
+            sudo('systemctl enable inactive.timer')
+            sudo('systemctl start inactive.timer')
+            sudo('touch /home/{}/.ensure_dir/inactive_ensured'.format(os_user))
+        except Exception as err:
+            print('Failed to setup inactivity check service!', str(err))
+            sys.exit(1)
+
+
 def set_git_proxy(os_user, hostname, keyfile, proxy_host):
     env['connection_attempts'] = 100
     env.key_filename = [keyfile]
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
index f84520b..9610083 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_notebook_configure_dataengine.py
@@ -86,10 +86,12 @@
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        params = "--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --keyfile {} --notebook_ip {}".\
+        params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
+                 " --keyfile {5} --notebook_ip {6} --spark_master_ip {7}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['dlab_ssh_user'],
-                   notebook_config['spark_master_url'], notebook_config['key_path'], notebook_config['notebook_ip'])
+                   notebook_config['spark_master_url'], notebook_config['key_path'],
+                   notebook_config['notebook_ip'], notebook_config['spark_master_ip'])
         try:
             local("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
index bfef46a..3bd537a 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_start_notebook.py
@@ -78,6 +78,20 @@
     except:
         sys.exit(1)
 
+    try:
+        logging.info('[UPDATE LAST ACTIVITY TIME]')
+        print('[UPDATE LAST ACTIVITY TIME]')
+        params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
+            .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
+        try:
+            local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
+        except Exception as err:
+            traceback.print_exc()
+            append_result("Failed to update last activity time.", str(err))
+            raise Exception
+    except:
+        sys.exit(1)
+
 
     try:
         ip_address = get_instance_ip_address(notebook_config['tag_name'], notebook_config['notebook_name']).get('Private')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
index cce4074..7652900 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_configure.py
@@ -95,6 +95,7 @@
         terminate_emr(emr_conf['cluster_id'])
         sys.exit(1)
 
+
     try:
         print('[SETUP EDGE REVERSE PROXY TEMPLATE]')
         logging.info('[SETUP EDGE REVERSE PROXY TEMPLATE]')
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
index 02e6475..6f12119 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine_start.py
@@ -49,37 +49,61 @@
     # generating variables dictionary
     create_aws_config_files()
     print('Generating infrastructure names and tags')
-    data_engine_config = dict()
+    data_engine = dict()
     
     try:
-        data_engine_config['exploratory_name'] = os.environ['exploratory_name']
+        data_engine['exploratory_name'] = os.environ['exploratory_name']
     except:
-        data_engine_config['exploratory_name'] = ''
+        data_engine['exploratory_name'] = ''
     try:
-        data_engine_config['computational_name'] = os.environ['computational_name']
+        data_engine['computational_name'] = os.environ['computational_name']
     except:
-        data_engine_config['computational_name'] = ''
-    data_engine_config['service_base_name'] = os.environ['conf_service_base_name']
-    data_engine_config['user_name'] = os.environ['edge_user_name']
-    data_engine_config['cluster_name'] = \
-        data_engine_config['service_base_name'] + '-' + \
-        data_engine_config['user_name'] + '-de-' + \
-        data_engine_config['exploratory_name'] + '-' + \
-        data_engine_config['computational_name']
+        data_engine['computational_name'] = ''
+    data_engine['service_base_name'] = os.environ['conf_service_base_name']
+    data_engine['user_name'] = os.environ['edge_user_name']
+    data_engine['cluster_name'] = \
+        data_engine['service_base_name'] + '-' + \
+        data_engine['user_name'] + '-de-' + \
+        data_engine['exploratory_name'] + '-' + \
+        data_engine['computational_name']
+
 
     logging.info('[START DATA ENGINE CLUSTER]')
     print('[START DATA ENGINE CLUSTER]')
     try:
         start_data_engine("{}:{}".format(os.environ['conf_service_base_name'],
-                                         data_engine_config['cluster_name']))
+                                         data_engine['cluster_name']))
     except Exception as err:
         print('Error: {0}'.format(err))
         append_result("Failed to start Data Engine.", str(err))
         sys.exit(1)
 
     try:
+        logging.info('[UPDATE LAST ACTIVITY TIME]')
+        print('[UPDATE LAST ACTIVITY TIME]')
+        data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
+        data_engine['tag_name'] = data_engine['service_base_name'] + '-Tag'
+        data_engine['notebook_ip'] = get_instance_ip_address(data_engine['tag_name'],
+                                                                    os.environ['notebook_instance_name']).get('Private')
+        data_engine['computational_ip'] = get_instance_ip_address(data_engine['tag_name'],
+                                                                         data_engine['computational_id']).get(
+            'Private')
+        data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
+        params = '--os_user {0} --notebook_ip {1} --keyfile "{2}" --cluster_ip {3}' \
+            .format(os.environ['conf_os_user'], data_engine['notebook_ip'], data_engine['keyfile'],
+                    data_engine['computational_ip'])
+        try:
+            local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
+        except Exception as err:
+            traceback.print_exc()
+            append_result("Failed to update last activity time.", str(err))
+            raise Exception
+    except:
+        sys.exit(1)
+
+    try:
         with open("/root/result.json", 'w') as result:
-            res = {"service_base_name": data_engine_config['service_base_name'],
+            res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Start Data Engine"}
             print(json.dumps(res))
             result.write(json.dumps(res))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
index 069ebd9..f98ae4d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
@@ -66,6 +66,7 @@
     notebook_config['tag_name'] = '{}-Tag'.format(notebook_config['service_base_name'])
     notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
     notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+    notebook_config['ip_address'] = get_instance_ip_address(notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
     # generating variables regarding EDGE proxy on Notebook instance
     instance_hostname = get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -160,16 +161,16 @@
     try:
         logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
         print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
-        params = "--hostname {} --keyfile {} " \
-                 "--os_user {} --jupyter_version {} " \
-                 "--scala_version {} --spark_version {} " \
-                 "--hadoop_version {} --region {} " \
-                 "--r_mirror {} --exploratory_name {}" \
+        params = "--hostname {0} --keyfile {1} " \
+                 "--os_user {2} --jupyter_version {3} " \
+                 "--scala_version {4} --spark_version {5} " \
+                 "--hadoop_version {6} --region {7} " \
+                 "--r_mirror {8} --ip_adress {9} --exploratory_name {10}" \
                  .format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
                          os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
                          os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
                          os.environ['aws_region'], os.environ['notebook_r_mirror'],
-                         notebook_config['exploratory_name'])
+                         notebook_config['ip_address'], notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_deep_learning_node', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py b/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py
index 5831410..ef7660a 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/edge_prepare.py
@@ -266,7 +266,7 @@
                 "PrefixListIds": [],
                 "FromPort": 4040,
                 "IpRanges": [{"CidrIp": edge_conf['private_subnet_cidr']}],
-                "ToPort": 4045, "IpProtocol": "tcp", "UserIdGroupPairs": []
+                "ToPort": 4140, "IpProtocol": "tcp", "UserIdGroupPairs": []
             },
             {
                 "PrefixListIds": [],
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
index 7f32c24..2b390b3 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_configure.py
@@ -66,6 +66,7 @@
     notebook_config['tag_name'] = '{}-Tag'.format(notebook_config['service_base_name'])
     notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
     notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+    notebook_config['ip_address'] = get_instance_ip_address(notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
     # generating variables regarding EDGE proxy on Notebook instance
     instance_hostname = get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -144,15 +145,16 @@
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        params = "--hostname {} " \
-                 "--keyfile {} " \
-                 "--region {} " \
-                 "--spark_version {} " \
-                 "--hadoop_version {} " \
-                 "--os_user {} " \
-                 "--scala_version {} " \
-                 "--r_mirror {} " \
-                 "--exploratory_name {}".\
+        params = "--hostname {0} " \
+                 "--keyfile {1} " \
+                 "--region {2} " \
+                 "--spark_version {3} " \
+                 "--hadoop_version {4} " \
+                 "--os_user {5} " \
+                 "--scala_version {6} " \
+                 "--r_mirror {7} " \
+                 "--ip_adress {8} " \
+                 "--exploratory_name {9}".\
             format(instance_hostname,
                    keyfile_name,
                    os.environ['aws_region'],
@@ -161,6 +163,7 @@
                    notebook_config['dlab_ssh_user'],
                    os.environ['notebook_scala_version'],
                    os.environ['notebook_r_mirror'],
+                   notebook_config['ip_address'],
                    notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_jupyter_node', params))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
index 2913e1a..937ca5b 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/jupyter_dataengine-service_create_configs.py
@@ -163,6 +163,8 @@
           """\/jars\/usr\/other\/*/' """ + spark_defaults_path + """" """)
 
 
+
+
 if __name__ == "__main__":
     if args.dry_run == 'true':
         parser.print_help()
diff --git a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
index c35380a..e0b046c 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/rstudio_configure.py
@@ -68,6 +68,7 @@
     notebook_config['rstudio_pass'] = id_generator()
     notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
     notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+    notebook_config['ip_address'] = get_instance_ip_address(notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
     # generating variables regarding EDGE proxy on Notebook instance
     instance_hostname = get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -146,14 +147,15 @@
     try:
         logging.info('[CONFIGURE R_STUDIO NOTEBOOK INSTANCE]')
         print('[CONFIGURE R_STUDIO NOTEBOOK INSTANCE]')
-        params = "--hostname {}  --keyfile {} " \
-                 "--region {} --rstudio_pass {} " \
-                 "--rstudio_version {} --os_user {} " \
-                 "--r_mirror {} --exploratory_name {}" \
+        params = "--hostname {0}  --keyfile {1} " \
+                 "--region {2} --rstudio_pass {3} " \
+                 "--rstudio_version {4} --os_user {5} " \
+                 "--r_mirror {6} --ip_adress {7} --exploratory_name {8}" \
             .format(instance_hostname, keyfile_name,
                     os.environ['aws_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['dlab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['exploratory_name'])
+                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+                    notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_rstudio_node', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
index a32be53..96b4cc3 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor-rstudio_configure.py
@@ -68,6 +68,8 @@
     notebook_config['tag_name'] = '{}-Tag'.format(notebook_config['service_base_name'])
     notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
     notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+    notebook_config['ip_address'] = get_instance_ip_address(notebook_config['tag_name'],
+                                                            notebook_config['instance_name']).get('Private')
     tag = {"Key": notebook_config['tag_name'],
            "Value": "{}-{}-subnet".format(notebook_config['service_base_name'], os.environ['edge_user_name'])}
     notebook_config['subnet_cidr'] = get_subnet_by_tag(tag)
@@ -150,14 +152,15 @@
     try:
         logging.info('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
         print('[CONFIGURE TENSORFLOW-RSTUDIO NOTEBOOK INSTANCE]')
-        params = "--hostname {}  --keyfile {} " \
-                 "--region {} --rstudio_pass {} " \
-                 "--rstudio_version {} --os_user {} " \
-                 "--r_mirror {} --exploratory_name {}" \
+        params = "--hostname {0}  --keyfile {1} " \
+                 "--region {2} --rstudio_pass {3} " \
+                 "--rstudio_version {4} --os_user {5} " \
+                 "--r_mirror {6} --ip_adress {7} --exploratory_name {8}" \
             .format(instance_hostname, keyfile_name,
                     os.environ['aws_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['dlab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['exploratory_name'])
+                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+                    notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_tensor-rstudio_node', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
index 046aab7..1230ce1 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/tensor_configure.py
@@ -71,6 +71,7 @@
     tag = {"Key": notebook_config['tag_name'],
            "Value": "{}-{}-subnet".format(notebook_config['service_base_name'], os.environ['edge_user_name'])}
     notebook_config['subnet_cidr'] = get_subnet_by_tag(tag)
+    notebook_config['ip_address'] = get_instance_ip_address(notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
     # generating variables regarding EDGE proxy on Notebook instance
     instance_hostname = get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name'])
@@ -149,12 +150,12 @@
     try:
         logging.info('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
         print('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
-        params = "--hostname {} --keyfile {} " \
-                 "--region {} --os_user {} " \
-                 "--exploratory_name {}" \
+        params = "--hostname {0} --keyfile {1} " \
+                 "--region {2} --os_user {3} " \
+                 "--ip_adress {4} --exploratory_name {5}" \
                  .format(instance_hostname, keyfile_name,
                          os.environ['aws_region'], notebook_config['dlab_ssh_user'],
-                         notebook_config['exploratory_name'])
+                         notebook_config['ip_address'], notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_tensor_node', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
index c17ed5b..b59cf37 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/zeppelin_configure.py
@@ -68,6 +68,7 @@
     notebook_config['tag_name'] = '{}-Tag'.format(notebook_config['service_base_name'])
     notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
     notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+    notebook_config['ip_address'] = get_instance_ip_address(notebook_config['tag_name'], notebook_config['instance_name']).get('Private')
 
     region = os.environ['aws_region']
     if region == 'us-east-1':
@@ -158,21 +159,22 @@
                              "backend_hostname": get_instance_hostname(notebook_config['tag_name'], notebook_config['instance_name']),
                              "backend_port": "8080",
                              "nginx_template_dir": "/root/templates/"}
-        params = "--hostname {} --instance_name {} " \
-                 "--keyfile {} --region {} " \
-                 "--additional_config '{}' --os_user {} " \
-                 "--spark_version {} --hadoop_version {} " \
-                 "--edge_hostname {} --proxy_port {} " \
-                 "--zeppelin_version {} --scala_version {} " \
-                 "--livy_version {} --multiple_clusters {} " \
-                 "--r_mirror {} --endpoint_url {} " \
-                 "--exploratory_name {}" \
+        params = "--hostname {0} --instance_name {1} " \
+                 "--keyfile {2} --region {3} " \
+                 "--additional_config '{4}' --os_user {5} " \
+                 "--spark_version {6} --hadoop_version {7} " \
+                 "--edge_hostname {8} --proxy_port {9} " \
+                 "--zeppelin_version {10} --scala_version {11} " \
+                 "--livy_version {12} --multiple_clusters {13} " \
+                 "--r_mirror {14} --endpoint_url {15} " \
+                 "--ip_adress {16} --exploratory_name {17}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, os.environ['aws_region'],
                     json.dumps(additional_config), notebook_config['dlab_ssh_user'], os.environ['notebook_spark_version'],
                     os.environ['notebook_hadoop_version'], edge_instance_hostname, '3128',
                     os.environ['notebook_zeppelin_version'], os.environ['notebook_scala_version'],
                     os.environ['notebook_livy_version'], os.environ['notebook_multiple_clusters'],
-                    os.environ['notebook_r_mirror'], endpoint_url, notebook_config['exploratory_name'])
+                    os.environ['notebook_r_mirror'], endpoint_url, notebook_config['ip_address'],
+                    notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_zeppelin_node', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
index 9547532..8b8c626 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_notebook_configure_dataengine.py
@@ -85,11 +85,12 @@
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        params = "--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --keyfile {} --notebook_ip {} --datalake_enabled {}".\
+        params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4}" \
+                 " --keyfile {5} --notebook_ip {6} --datalake_enabled {7} --spark_master_ip {8}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['dlab_ssh_user'],
                    notebook_config['spark_master_url'], notebook_config['key_path'], notebook_config['notebook_ip'],
-                   os.environ['azure_datalake_enable'])
+                   os.environ['azure_datalake_enable'], notebook_config['spark_master_ip'])
         try:
             local("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
index 48fe5ab..32e1283 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/common_start_notebook.py
@@ -100,6 +100,20 @@
             sys.exit(1)
 
     try:
+        logging.info('[UPDATE LAST ACTIVITY TIME]')
+        print('[UPDATE LAST ACTIVITY TIME]')
+        params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
+            .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
+        try:
+            local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
+        except Exception as err:
+            traceback.print_exc()
+            append_result("Failed to update last activity time.", str(err))
+            raise Exception
+    except:
+        sys.exit(1)
+
+    try:
         ip_address = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
                                                                  notebook_config['notebook_name'])
         print('[SUMMARY]')
diff --git a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
index 6a8a1f2..52e2716 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/dataengine_start.py
@@ -82,6 +82,26 @@
         sys.exit(1)
 
     try:
+        logging.info('[UPDATE LAST ACTIVITY TIME]')
+        print('[UPDATE LAST ACTIVITY TIME]')
+        data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
+        data_engine['notebook_ip'] = AzureMeta().get_private_ip_address(data_engine['resource_group_name'], os.environ['notebook_instance_name'])
+        data_engine['computational_ip'] = AzureMeta().get_private_ip_address(data_engine['resource_group_name'], data_engine['computational_id'])
+        data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
+        params = '--os_user {0} --notebook_ip {1} --keyfile "{2}" --cluster_ip {3}' \
+            .format(os.environ['conf_os_user'], data_engine['notebook_ip'], data_engine['keyfile'],
+                    data_engine['computational_ip'])
+        try:
+            local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
+        except Exception as err:
+            traceback.print_exc()
+            append_result("Failed to update last activity time.", str(err))
+            raise Exception
+    except:
+        sys.exit(1)
+
+
+    try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
                    "Action": "Start Data Engine"}
diff --git a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
index 38c2fb1..a8f7492 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
@@ -65,6 +65,8 @@
                                    "Exploratory": notebook_config['exploratory_name'],
                                    os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
         notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+        notebook_config['ip_address'] = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
+                                                        notebook_config['instance_name'])
 
         # generating variables regarding EDGE proxy on Notebook instance
         instance_hostname = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
@@ -167,16 +169,16 @@
     try:
         logging.info('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
         print('[CONFIGURE DEEP LEARNING NOTEBOOK INSTANCE]')
-        params = "--hostname {} --keyfile {} " \
-                 "--os_user {} --jupyter_version {} " \
-                 "--scala_version {} --spark_version {} " \
-                 "--hadoop_version {} --region {} " \
-                 "--r_mirror {} --exploratory_name {}" \
+        params = "--hostname {0} --keyfile {1} " \
+                 "--os_user {2} --jupyter_version {3} " \
+                 "--scala_version {4} --spark_version {5} " \
+                 "--hadoop_version {6} --region {7} " \
+                 "--r_mirror {8} --ip_adress {9} --exploratory_name {10}" \
                  .format(instance_hostname, keyfile_name, notebook_config['dlab_ssh_user'],
                          os.environ['notebook_jupyter_version'], os.environ['notebook_scala_version'],
                          os.environ['notebook_spark_version'], os.environ['notebook_hadoop_version'],
                          os.environ['azure_region'], os.environ['notebook_r_mirror'],
-                         notebook_config['exploratory_name'])
+                         notebook_config['ip_address'], notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_deep_learning_node', params))
             remount_azure_disk(True, notebook_config['dlab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
index a4bf9e4..0177ec3 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/edge_prepare.py
@@ -299,7 +299,7 @@
                 "name": "out-12",
                 "protocol": "Tcp",
                 "source_port_range": "*",
-                "destination_port_range": "4040-4045",
+                "destination_port_range": "4040-4140",
                 "source_address_prefix": "*",
                 "destination_address_prefix": edge_conf['private_subnet_cidr'],
                 "access": "Allow",
diff --git a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
index f4aeea6..2bce792 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/jupyter_configure.py
@@ -64,6 +64,8 @@
                                    "Exploratory": notebook_config['exploratory_name'],
                                    os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
         notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+        notebook_config['ip_address'] = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
+                                                        notebook_config['instance_name'])
 
         # generating variables regarding EDGE proxy on Notebook instance
         instance_hostname = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
@@ -150,16 +152,16 @@
     try:
         logging.info('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
         print('[CONFIGURE JUPYTER NOTEBOOK INSTANCE]')
-        params = "--hostname {} --keyfile {} " \
-                 "--region {} --spark_version {} " \
-                 "--hadoop_version {} --os_user {} " \
-                 "--scala_version {} --r_mirror {} " \
-                 "--exploratory_name {}".\
+        params = "--hostname {0} --keyfile {1} " \
+                 "--region {2} --spark_version {3} " \
+                 "--hadoop_version {4} --os_user {5} " \
+                 "--scala_version {6} --r_mirror {7} " \
+                 "--ip_adress {8} --exploratory_name {9}".\
             format(instance_hostname, keyfile_name,
                    os.environ['azure_region'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['dlab_ssh_user'],
                    os.environ['notebook_scala_version'], os.environ['notebook_r_mirror'],
-                   notebook_config['exploratory_name'])
+                   notebook_config['ip_address'], notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_jupyter_node', params))
             remount_azure_disk(True, notebook_config['dlab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
index d75455a..4f466ce 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/rstudio_configure.py
@@ -65,6 +65,8 @@
                                    "Exploratory": notebook_config['exploratory_name'],
                                    os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
         notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+        notebook_config['ip_address'] = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
+                                                                           notebook_config['instance_name'])
 
         # generating variables regarding EDGE proxy on Notebook instance
         instance_hostname = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
@@ -152,14 +154,15 @@
     try:
         logging.info('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
         print('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
-        params = "--hostname {}  --keyfile {} " \
-                 "--region {} --rstudio_pass {} " \
-                 "--rstudio_version {} --os_user {} " \
-                 "--r_mirror {} --exploratory_name {}" \
+        params = "--hostname {0}  --keyfile {1} " \
+                 "--region {2} --rstudio_pass {3} " \
+                 "--rstudio_version {4} --os_user {5} " \
+                 "--r_mirror {6} --ip_adress {7} --exploratory_name {8}" \
             .format(instance_hostname, keyfile_name,
                     os.environ['azure_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['dlab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['exploratory_name'])
+                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+                    notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_rstudio_node', params))
             remount_azure_disk(True, notebook_config['dlab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
index 704f372..c6d51eb 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/tensor_configure.py
@@ -67,6 +67,8 @@
                                    "Exploratory": notebook_config['exploratory_name'],
                                    os.environ['conf_billing_tag_key']: os.environ['conf_billing_tag_value']}
         notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+        notebook_config['ip_address'] = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
+                                                        notebook_config['instance_name'])
 
         # generating variables regarding EDGE proxy on Notebook instance
         instance_hostname = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
@@ -151,12 +153,12 @@
     try:
         logging.info('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
         print('[CONFIGURE TENSORFLOW NOTEBOOK INSTANCE]')
-        params = "--hostname {} --keyfile {} " \
-                 "--region {} --os_user {} " \
-                 "--exploratory_name {}" \
+        params = "--hostname {0} --keyfile {1} " \
+                 "--region {2} --os_user {3} " \
+                 "--ip_adress {4} --exploratory_name {5}" \
                  .format(instance_hostname, keyfile_name,
                          os.environ['azure_region'], notebook_config['dlab_ssh_user'],
-                         notebook_config['exploratory_name'])
+                         notebook_config['ip_address'], notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_tensor_node', params))
             remount_azure_disk(True, notebook_config['dlab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
index 367e9f7..5917ee2 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/zeppelin_configure.py
@@ -66,6 +66,8 @@
                                    "Exploratory": notebook_config['exploratory_name'],
                                    "product": "dlab"}
         notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
+        notebook_config['ip_address'] = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
+                                                        notebook_config['instance_name'])
 
         # generating variables regarding EDGE proxy on Notebook instance
         instance_hostname = AzureMeta().get_private_ip_address(notebook_config['resource_group_name'],
@@ -156,22 +158,22 @@
                              "backend_hostname": instance_hostname,
                              "backend_port": "8080",
                              "nginx_template_dir": "/root/templates/"}
-        params = "--hostname {} --instance_name {} " \
-                 "--keyfile {} --region {} " \
-                 "--additional_config '{}' --os_user {} " \
-                 "--spark_version {} --hadoop_version {} " \
-                 "--edge_hostname {} --proxy_port {} " \
-                 "--zeppelin_version {} --scala_version {} " \
-                 "--livy_version {} --multiple_clusters {} " \
-                 "--r_mirror {} --endpoint_url {} " \
-                 "--exploratory_name {}" \
+        params = "--hostname {0} --instance_name {1} " \
+                 "--keyfile {2} --region {3} " \
+                 "--additional_config '{4}' --os_user {5} " \
+                 "--spark_version {6} --hadoop_version {7} " \
+                 "--edge_hostname {8} --proxy_port {9} " \
+                 "--zeppelin_version {10} --scala_version {11} " \
+                 "--livy_version {12} --multiple_clusters {13} " \
+                 "--r_mirror {14} --endpoint_url {15} " \
+                 "--ip_adress {16} --exploratory_name {17}" \
             .format(instance_hostname, notebook_config['instance_name'], keyfile_name, os.environ['azure_region'],
                     json.dumps(additional_config), notebook_config['dlab_ssh_user'], os.environ['notebook_spark_version'],
                     os.environ['notebook_hadoop_version'], edge_instance_private_hostname, '3128',
                     os.environ['notebook_zeppelin_version'], os.environ['notebook_scala_version'],
                     os.environ['notebook_livy_version'], os.environ['notebook_multiple_clusters'],
                     os.environ['notebook_r_mirror'], 'null',
-                    notebook_config['exploratory_name'])
+                    notebook_config['ip_address'], notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_zeppelin_node', params))
             remount_azure_disk(True, notebook_config['dlab_ssh_user'], instance_hostname,
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
index 9bd7ab4..acb80e8 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_notebook_configure_dataengine.py
@@ -84,10 +84,12 @@
     try:
         logging.info('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
         print('[INSTALLING KERNELS INTO SPECIFIED NOTEBOOK]')
-        params = "--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --keyfile {} --notebook_ip {}".\
+        params = "--cluster_name {0} --spark_version {1} --hadoop_version {2} --os_user {3} --spark_master {4} --keyfile {5}" \
+                 " --notebook_ip {6} --spark_master_ip {7}".\
             format(notebook_config['cluster_name'], os.environ['notebook_spark_version'],
                    os.environ['notebook_hadoop_version'], notebook_config['dlab_ssh_user'],
-                   notebook_config['spark_master_url'], notebook_config['key_path'], notebook_config['notebook_ip'])
+                   notebook_config['spark_master_url'], notebook_config['key_path'],
+                   notebook_config['notebook_ip'], notebook_config['spark_master_ip'])
         try:
             local("~/scripts/{}_{}.py {}".format(os.environ['application'], 'install_dataengine_kernels', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
index d61584a..1459f9e 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/common_start_notebook.py
@@ -76,6 +76,20 @@
         sys.exit(1)
 
     try:
+        logging.info('[UPDATE LAST ACTIVITY TIME]')
+        print('[UPDATE LAST ACTIVITY TIME]')
+        params = '--os_user {} --notebook_ip {} --keyfile "{}"' \
+            .format(os.environ['conf_os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'])
+        try:
+            local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
+        except Exception as err:
+            traceback.print_exc()
+            append_result("Failed to update last activity time.", str(err))
+            raise Exception
+    except:
+        sys.exit(1)
+
+    try:
         print('[SUMMARY]')
         logging.info('[SUMMARY]')
         print("Instance name: {}".format(notebook_config['notebook_name']))
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
index 001de6d..04097e9 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine_start.py
@@ -78,6 +78,28 @@
             raise Exception
     except:
         sys.exit(1)
+
+    try:
+        logging.info('[UPDATE LAST ACTIVITY TIME]')
+        print('[UPDATE LAST ACTIVITY TIME]')
+        data_engine['computational_id'] = data_engine['cluster_name'] + '-m'
+        data_engine['tag_name'] = data_engine['service_base_name'] + '-Tag'
+        data_engine['notebook_ip'] = GCPMeta().get_private_ip_address(os.environ['notebook_instance_name'])
+        data_engine['computational_ip'] = GCPMeta().get_private_ip_address(data_engine['computational_id'])
+        data_engine['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
+        params = '--os_user {0} --notebook_ip {1} --keyfile "{2}" --cluster_ip {3}' \
+            .format(os.environ['conf_os_user'], data_engine['notebook_ip'], data_engine['keyfile'],
+                    data_engine['computational_ip'])
+        try:
+            local("~/scripts/{}.py {}".format('update_inactivity_on_start', params))
+        except Exception as err:
+            traceback.print_exc()
+            append_result("Failed to update last activity time.", str(err))
+            raise Exception
+    except:
+        sys.exit(1)
+
+
     try:
         with open("/root/result.json", 'w') as result:
             res = {"service_base_name": data_engine['service_base_name'],
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
index 1c23f38..8d9da73 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/rstudio_configure.py
@@ -63,6 +63,7 @@
     notebook_config['ssh_key_path'] = '{0}{1}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
     notebook_config['dlab_ssh_user'] = os.environ['conf_os_user']
     notebook_config['zone'] = os.environ['gcp_zone']
+    notebook_config['ip_address'] = GCPMeta().get_private_ip_address(notebook_config['instance_name'])
     notebook_config['rstudio_pass'] = id_generator()
     notebook_config['shared_image_enabled'] = os.environ['conf_shared_image_enabled']
     try:
@@ -132,14 +133,15 @@
     try:
         logging.info('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
         print('[CONFIGURE RSTUDIO NOTEBOOK INSTANCE]')
-        params = "--hostname {}  --keyfile {} " \
-                 "--region {} --rstudio_pass {} " \
-                 "--rstudio_version {} --os_user {} " \
-                 "--r_mirror {} --exploratory_name {}" \
+        params = "--hostname {0}  --keyfile {1} " \
+                 "--region {2} --rstudio_pass {3} " \
+                 "--rstudio_version {4} --os_user {5} " \
+                 "--r_mirror {6} --ip_adress {7} --exploratory_name {8}" \
             .format(instance_hostname, notebook_config['ssh_key_path'],
                     os.environ['gcp_region'], notebook_config['rstudio_pass'],
                     os.environ['notebook_rstudio_version'], notebook_config['dlab_ssh_user'],
-                    os.environ['notebook_r_mirror'], notebook_config['exploratory_name'])
+                    os.environ['notebook_r_mirror'], notebook_config['ip_address'],
+                    notebook_config['exploratory_name'])
         try:
             local("~/scripts/{}.py {}".format('configure_rstudio_node', params))
         except:
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
index 0830ff0..1b3602b 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/zeppelin_configure.py
@@ -9,9 +9,9 @@
 # to you under the Apache License, Version 2.0 (the
 # "License"); you may not use this file except in compliance
 # with the License.  You may obtain a copy of the License at
-# 
+#
 #   http://www.apache.org/licenses/LICENSE-2.0
-# 
+#
 # Unless required by applicable law or agreed to in writing,
 # software distributed under the License is distributed on an
 # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
diff --git a/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py b/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
new file mode 100644
index 0000000..f6f4ea0
--- /dev/null
+++ b/infrastructure-provisioning/src/general/scripts/os/check_inactivity.py
@@ -0,0 +1,62 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import sys
+import argparse
+from dlab.notebook_lib import *
+from dlab.fab import *
+from dlab.actions_lib import *
+from fabric.api import *
+import json
+import os
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--instance_ip', type=str, default='')
+parser.add_argument('--resource_type', type=str, default='')
+parser.add_argument('--keyfile', type=str, default='')
+parser.add_argument('--dataengine_ip', type=str, default='')
+args = parser.parse_args()
+
+
+if __name__ == "__main__":
+    env['connection_attempts'] = 100
+    env.key_filename = [args.keyfile]
+    env.host_string = '{}@{}'.format(args.os_user, args.instance_ip)
+
+    inactivity_dir = '/opt/inactivity/'
+    if args.resource_type == 'dataengine':
+        reworked_ip=args.dataengine_ip.replace('.','-')
+        inactivity_file = '{}_inactivity'.format(reworked_ip)
+    else:
+        inactivity_file = 'local_inactivity'
+
+    if exists('{}{}'.format(inactivity_dir, inactivity_file)):
+        timestamp = sudo('cat {}{}'.format(inactivity_dir, inactivity_file))
+    else:
+        timestamp = '0000000000'
+
+
+    with open('/root/result.json', 'w') as outfile:
+        json.dump(timestamp, outfile)
diff --git a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
index 4564d8c..5cee0bc 100644
--- a/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
+++ b/infrastructure-provisioning/src/general/scripts/os/common_clean_instance.py
@@ -38,6 +38,10 @@
 def general_clean():
     try:
         sudo('systemctl stop ungit')
+        sudo('systemctl stop inactive.timer')
+        sudo('rm -f /etc/systemd/system/inactive.service')
+        sudo('rm -f /etc/systemd/system/inactive.timer')
+        sudo('rm -rf /opt/inactivity')
         sudo('npm -g uninstall ungit')
         sudo('rm -f /etc/systemd/system/ungit.service')
         sudo('systemctl daemon-reload')
diff --git a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
index 44c6762..c01a907 100644
--- a/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/deeplearning_install_dataengine_kernels.py
@@ -37,6 +37,7 @@
 parser.add_argument('--keyfile', type=str, default='')
 parser.add_argument('--notebook_ip', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='false')
+parser.add_argument('--spark_master_ip', type=str, default='')
 args = parser.parse_args()
 
 
@@ -62,6 +63,9 @@
         if exists('/usr/lib64'):
             sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
 
+def create_inactivity_log(master_ip, hoststring):
+    reworked_ip = master_ip.replace('.', '-')
+    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -73,6 +77,7 @@
     except:
         region = ''
     configure_notebook(args.keyfile, env.host_string)
+    create_inactivity_log(args.spark_master_ip, env.host_string)
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     sudo('/usr/bin/python /usr/local/bin/deeplearning_dataengine_create_configs.py '
diff --git a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
index 7a5d372..9b984af 100644
--- a/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/jupyter_install_dataengine_kernels.py
@@ -37,6 +37,7 @@
 parser.add_argument('--keyfile', type=str, default='')
 parser.add_argument('--notebook_ip', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='false')
+parser.add_argument('--spark_master_ip', type=str, default='')
 args = parser.parse_args()
 
 
@@ -65,6 +66,9 @@
         if exists('/usr/lib64'):
             sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
 
+def create_inactivity_log(master_ip, hoststring):
+    reworked_ip = master_ip.replace('.', '-')
+    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -79,6 +83,7 @@
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
+    create_inactivity_log(args.spark_master_ip, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/jupyter_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} \
          --spark_master {} --region {} --datalake_enabled {} --r_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
new file mode 100644
index 0000000..85f3b8e
--- /dev/null
+++ b/infrastructure-provisioning/src/general/scripts/os/notebook_inactivity_check.py
@@ -0,0 +1,73 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+
+import json
+from dlab.fab import *
+from dlab.meta_lib import *
+import sys, time, os
+from dlab.actions_lib import *
+
+
+if __name__ == "__main__":
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'], os.environ['request_id'])
+    local_log_filepath = "/logs/edge/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    try:
+        logging.info('[ASK INACTIVITY STATUS]')
+        print('[ASK INACTIVITY STATUS]')
+        notebook_config = dict()
+        try:
+            notebook_config['notebook_name'] = os.environ['notebook_instance_name']
+            notebook_config['os_user'] = os.environ['conf_os_user']
+            notebook_config['resource_type'] = os.environ['conf_resource']
+            notebook_config['service_base_name'] = os.environ['conf_service_base_name']
+            notebook_config['tag_name'] = notebook_config['service_base_name'] + '-Tag'
+            notebook_config['notebook_ip'] = get_instance_private_ip_address(
+                notebook_config['tag_name'], notebook_config['notebook_name'])
+            notebook_config['keyfile'] = '{}{}.pem'.format(os.environ['conf_key_dir'], os.environ['conf_key_name'])
+            if notebook_config['resource_type'] == 'dataengine':
+                notebook_config['dataengine_name'] = '{}-m'.format(os.environ['computational_id'])
+                notebook_config['dataengine_ip'] = get_instance_private_ip_address(
+                    notebook_config['tag_name'], notebook_config['dataengine_name'])
+            else:
+                notebook_config['dataengine_ip'] = '0.0.0.0'
+        except Exception as err:
+            print('Error: {0}'.format(err))
+            append_result("Failed to get parameter.", str(err))
+            sys.exit(1)
+        params = "--os_user {0} --instance_ip {1} --keyfile '{2}' --resource_type {3} --dataengine_ip {4}" \
+            .format(notebook_config['os_user'], notebook_config['notebook_ip'], notebook_config['keyfile'], notebook_config['resource_type'], notebook_config['dataengine_ip'])
+        try:
+            # Run script to get available libs
+            local("~/scripts/{}.py {}".format('check_inactivity', params))
+        except:
+            traceback.print_exc()
+            raise Exception
+    except Exception as err:
+        print('Error: {0}'.format(err))
+        append_result("Failed to ask inactivity status.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
index cb1a82e..ef071c3 100644
--- a/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/rstudio_install_dataengine_kernels.py
@@ -37,6 +37,7 @@
 parser.add_argument('--keyfile', type=str, default='')
 parser.add_argument('--notebook_ip', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='false')
+parser.add_argument('--spark_master_ip', type=str, default='')
 args = parser.parse_args()
 
 
@@ -58,6 +59,9 @@
         if exists('/usr/lib64'):
             sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
 
+def create_inactivity_log(master_ip, hoststring):
+    reworked_ip = master_ip.replace('.', '-')
+    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -71,6 +75,7 @@
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
+    create_inactivity_log(args.spark_master_ip, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
index 7294f08..dd466c5 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor-rstudio_install_dataengine_kernels.py
@@ -37,6 +37,7 @@
 parser.add_argument('--keyfile', type=str, default='')
 parser.add_argument('--notebook_ip', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='false')
+parser.add_argument('--spark_master_ip', type=str, default='')
 args = parser.parse_args()
 
 
@@ -58,6 +59,9 @@
         if exists('/usr/lib64'):
             sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
 
+def create_inactivity_log(master_ip, hoststring):
+    reworked_ip = master_ip.replace('.', '-')
+    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -71,6 +75,7 @@
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
+    create_inactivity_log(args.spark_master_ip, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/tensor-rstudio_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
index e8c6565..40024ac 100644
--- a/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/tensor_install_dataengine_kernels.py
@@ -37,6 +37,7 @@
 parser.add_argument('--keyfile', type=str, default='')
 parser.add_argument('--notebook_ip', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='false')
+parser.add_argument('--spark_master_ip', type=str, default='')
 args = parser.parse_args()
 
 
@@ -59,6 +60,9 @@
         if exists('/usr/lib64'):
             sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
 
+def create_inactivity_log(master_ip, hoststring):
+    reworked_ip = master_ip.replace('.', '-')
+    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -72,6 +76,7 @@
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
+    create_inactivity_log(args.spark_master_ip, env.host_string)
     sudo('/usr/bin/python /usr/local/bin/tensor_dataengine_create_configs.py '
          '--cluster_name {} --spark_version {} --hadoop_version {} --os_user {} --spark_master {} --region {} '
          '--datalake_enabled {} --spark_configurations "{}"'.
diff --git a/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py b/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
new file mode 100644
index 0000000..6d496c7
--- /dev/null
+++ b/infrastructure-provisioning/src/general/scripts/os/update_inactivity_on_start.py
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+import argparse
+from dlab.notebook_lib import *
+from dlab.actions_lib import *
+from dlab.fab import *
+from fabric.api import *
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('--keyfile', type=str, default='')
+parser.add_argument('--notebook_ip', type=str, default='')
+parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--cluster_ip', type=str, default='none')
+args = parser.parse_args()
+
+
+if __name__ == "__main__":
+    env.hosts = "{}".format(args.notebook_ip)
+    env['connection_attempts'] = 100
+    env.user = args.os_user
+    env.key_filename = "{}".format(args.keyfile)
+    env.host_string = env.user + "@" + env.hosts
+
+    if args.cluster_ip == "none":
+        kernel = 'local'
+    else:
+        kernel = args.cluster_ip.replace('.', '-')
+
+    sudo("date +%s > /opt/inactivity/{}_inactivity".format(kernel))
diff --git a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
index 2b10a5b..7d851cd 100644
--- a/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
+++ b/infrastructure-provisioning/src/general/scripts/os/zeppelin_install_dataengine_kernels.py
@@ -38,6 +38,7 @@
 parser.add_argument('--keyfile', type=str, default='')
 parser.add_argument('--notebook_ip', type=str, default='')
 parser.add_argument('--datalake_enabled', type=str, default='false')
+parser.add_argument('--spark_master_ip', type=str, default='')
 args = parser.parse_args()
 
 
@@ -64,6 +65,9 @@
         if exists('/usr/lib64'):
             sudo('ln -fs /usr/lib/python2.7/dlab /usr/lib64/python2.7/dlab')
 
+def create_inactivity_log(master_ip, hoststring):
+    reworked_ip = master_ip.replace('.', '-')
+    sudo("date +%s > /opt/inactivity/{}_inactivity".format(reworked_ip))
 
 if __name__ == "__main__":
     env.hosts = "{}".format(args.notebook_ip)
@@ -77,6 +81,7 @@
     if 'spark_configurations' not in os.environ:
         os.environ['spark_configurations'] = '[]'
     configure_notebook(args.keyfile, env.host_string)
+    create_inactivity_log(args.spark_master_ip, env.host_string)
     livy_version = os.environ['notebook_livy_version']
     r_enabled = os.environ['notebook_r_enabled']
     sudo('/usr/bin/python /usr/local/bin/zeppelin_dataengine_create_configs.py '
diff --git a/infrastructure-provisioning/src/general/templates/os/inactive.service b/infrastructure-provisioning/src/general/templates/os/inactive.service
new file mode 100644
index 0000000..445e724
--- /dev/null
+++ b/infrastructure-provisioning/src/general/templates/os/inactive.service
@@ -0,0 +1,29 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+[Unit]
+Description=inactivity check
+
+[Service]
+User=root
+Type=oneshot
+ExecStart=/bin/bash /opt/inactivity/inactive.sh
+
diff --git a/infrastructure-provisioning/src/general/templates/os/inactive.sh b/infrastructure-provisioning/src/general/templates/os/inactive.sh
new file mode 100644
index 0000000..b3598f4
--- /dev/null
+++ b/infrastructure-provisioning/src/general/templates/os/inactive.sh
@@ -0,0 +1,49 @@
+#!/bin/bash
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+ip="IP_ADRESS"
+jps -m | grep spark | \
+while read i
+do
+  if [[ $i == *"--master spark"* ]]
+  then
+	master="$(echo $i | sed -n 's/.*spark\:\/\/\([0-9.]*\):7077 .*/\1/p' | sed -n 's/\./\-/gp')"
+	pid="$(echo $i | sed -n 's/\(^[0-9]*\) .*/\1/p')"
+	port="$(ss -tlpn | cat | grep ${pid} | grep ':40.. ' | sed -n 's/.*:\(40..\) .*/\1/p')"
+	app="$(curl http://${ip}:${port}/api/v1/applications/ 2>&1 | sed -n 's/\.*  "id" : "\(.*\)",/\1/p')"
+	check="$(curl http://${ip}:${port}/api/v1/applications/${app}/jobs 2>&1 | grep RUNNING > /dev/null && echo 1 || echo 0)"
+	if [[ $check == "1" ]]
+	then
+		date +%s > /opt/inactivity/${master}_inactivity
+	fi
+  else
+	pid="$(echo $i | sed -n 's/\(^[0-9]*\) .*/\1/p')"
+	port="$(ss -tlpn | cat | grep ${pid} | grep ':40.. ' | sed -n 's/.*:\(40..\) .*/\1/p')"
+	app="$(curl http://${ip}:${port}/api/v1/applications/ 2>&1 | sed -n 's/\.*  "id" : "\(.*\)",/\1/p')"
+	check="$(curl http://${ip}:${port}/api/v1/applications/${app}/jobs 2>&1 | grep RUNNING > /dev/null && echo 1 || echo 0)"
+	if [[ $check == "1" ]]
+	then
+		date +%s > /opt/inactivity/local_inactivity
+	fi
+  fi
+done
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/templates/os/inactive.timer b/infrastructure-provisioning/src/general/templates/os/inactive.timer
new file mode 100644
index 0000000..c04a73e
--- /dev/null
+++ b/infrastructure-provisioning/src/general/templates/os/inactive.timer
@@ -0,0 +1,31 @@
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+[Unit]
+Description=inactive service
+
+[Timer]
+OnUnitActiveSec=3s
+OnBootSec=120s
+AccuracySec=5s
+
+[Install]
+WantedBy=timers.target
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/general/templates/os/inactive_rs.sh b/infrastructure-provisioning/src/general/templates/os/inactive_rs.sh
new file mode 100644
index 0000000..bbee021
--- /dev/null
+++ b/infrastructure-provisioning/src/general/templates/os/inactive_rs.sh
@@ -0,0 +1,48 @@
+#!/bin/bash
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+ip="IP_ADRESS"
+jps -m | grep spark | \
+while read i
+do
+  pid="$(echo $i | sed -n 's/\(^[0-9]*\) .*/\1/p')"
+  port="$(ss -tlpn | cat | grep ${pid} | grep ':40.. ' | sed -n 's/.*:\(40..\) .*/\1/p')"
+  app_master_check="$(curl http://${ip}:${port}/environment/ 2>&1 | grep spark.master > /dev/null && echo check || echo emr)"
+  if [[ $app_master_check == "check" ]]
+  then
+  	parse_master="$(curl http://${ip}:${port}/environment/ 2>&1 | sed -n 's/.*spark\.master<\/td><td>\([^<]*\)<\/td>.*/\1/p')"
+    if [[ $parse_master == *"local"* ]]
+    then
+      master="local"
+    elif [[ $parse_master == *"spark://"* ]]
+    then
+      master="$(curl http://${ip}:${port}/environment/ 2>&1 | sed -n 's/.*spark\.master<\/td><td>\([^<]*\)<\/td>.*/\1/p' | sed -n 's/spark\:\/\/\([0-9.]*\):7077/\1/p' | sed -n 's/\./\-/gp')"
+    fi
+    app="$(curl http://${ip}:${port}/api/v1/applications/ 2>&1 | sed -n 's/\.*  "id" : "\(.*\)",/\1/p')"
+    check="$(curl http://${ip}:${port}/api/v1/applications/${app}/jobs 2>&1 | grep RUNNING > /dev/null && echo 1 || echo 0)"
+    if [[ $check == "1" ]]
+    then
+  	  date +%s > /opt/inactivity/${master}_inactivity
+    fi
+  fi
+done
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/jupyter/fabfile.py b/infrastructure-provisioning/src/jupyter/fabfile.py
index 181b79b..7b3f23d 100644
--- a/infrastructure-provisioning/src/jupyter/fabfile.py
+++ b/infrastructure-provisioning/src/jupyter/fabfile.py
@@ -224,3 +224,19 @@
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
         sys.exit(1)
+
+# Main function for checking inactivity status
+def check_inactivity():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    try:
+        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to check inactivity status.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
index 124a2ba..96bebb9 100644
--- a/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
+++ b/infrastructure-provisioning/src/jupyter/scripts/configure_jupyter_node.py
@@ -38,6 +38,7 @@
 parser.add_argument('--os_user', type=str, default='')
 parser.add_argument('--scala_version', type=str, default='')
 parser.add_argument('--r_mirror', type=str, default='')
+parser.add_argument('--ip_adress', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 args = parser.parse_args()
 
@@ -129,6 +130,10 @@
     if exists('/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
+    # INSTALL INACTIVITY CHECKER
+    print("Install inactivity checker")
+    install_inactivity_checker(args.os_user, args.ip_adress)
+
     # INSTALL OPTIONAL PACKAGES
     print("Installing additional Python packages")
     ensure_additional_python_libs(args.os_user)
diff --git a/infrastructure-provisioning/src/rstudio/fabfile.py b/infrastructure-provisioning/src/rstudio/fabfile.py
index 508d0e1..35ef372 100644
--- a/infrastructure-provisioning/src/rstudio/fabfile.py
+++ b/infrastructure-provisioning/src/rstudio/fabfile.py
@@ -223,3 +223,19 @@
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
         sys.exit(1)
+
+# Main function for checking inactivity status
+def check_inactivity():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    try:
+        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to check inactivity status.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
index f831375..de1b7a4 100644
--- a/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
+++ b/infrastructure-provisioning/src/rstudio/scripts/configure_rstudio_node.py
@@ -40,6 +40,7 @@
 parser.add_argument('--rstudio_pass', type=str, default='')
 parser.add_argument('--rstudio_version', type=str, default='')
 parser.add_argument('--r_mirror', type=str, default='')
+parser.add_argument('--ip_adress', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 args = parser.parse_args()
 
@@ -108,7 +109,11 @@
     install_ungit(args.os_user, args.exploratory_name)
     if exists('/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
-    
+
+    # INSTALL INACTIVITY CHECKER
+    print("Install inactivity checker")
+    install_inactivity_checker(args.os_user, args.ip_adress, True)
+
     #POST INSTALLATION PROCESS
     print("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
index b9247b8..6f55ab7 100644
--- a/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
+++ b/infrastructure-provisioning/src/ssn/scripts/configure_ssn_node.py
@@ -136,6 +136,15 @@
         print('Failed to configure SSL certificates: ', str(err))
         sys.exit(1)
 
+def docker_build_script():
+    try:
+        put('/root/templates/docker_build.py', 'docker_build')
+        sudo('chmod +x docker_build')
+        sudo('mv docker_build /usr/bin/docker-build')
+    except Exception as err:
+        traceback.print_exc()
+        print('Failed to configure docker_build script: ', str(err))
+        sys.exit(1)
 
 ##############
 # Run script #
@@ -192,4 +201,7 @@
     cp_gitlab_scripts(args.dlab_path)
 
     print("Ensuring safest ssh ciphers")
-    ensure_ciphers()
\ No newline at end of file
+    ensure_ciphers()
+
+    print("Configuring docker_build script")
+    docker_build_script()
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/ssn/templates/docker_build.py b/infrastructure-provisioning/src/ssn/templates/docker_build.py
new file mode 100644
index 0000000..fd44b42
--- /dev/null
+++ b/infrastructure-provisioning/src/ssn/templates/docker_build.py
@@ -0,0 +1,73 @@
+#!/usr/bin/python
+
+# *****************************************************************************
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+# ******************************************************************************
+
+
+from fabric.api import *
+import sys
+import os
+import traceback
+
+src_path = '/opt/dlab/sources/infrastructure-provisioning/src/'
+if sys.argv[1] == 'all':
+    node = [
+            'edge',
+            'jupyter',
+            'rstudio',
+            'zeppelin',
+            'tensor',
+            'tensor-rstudio',
+            'deeplearning',
+            'dataengine'
+            ]
+else:
+    node = sys.argv[1:]
+
+def image_build(src_path, node):
+    try:
+        if local("cat /etc/lsb-release | grep DISTRIB_ID | awk -F '=' '{print $2}'", capture=True).stdout == 'Ubuntu':
+            os_family = 'debian'
+        else:
+            os_family = 'redhat'
+        if local("uname -r | awk -F '-' '{print $3}'", capture=True).stdout == 'aws':
+            cloud_provider = 'aws'
+        elif local("uname -r | awk -F '-' '{print $3}'", capture=True).stdout == 'azure':
+            cloud_provider = 'azure'
+            if not exists('{}base/azure_auth.json'.format(src_path)):
+                local('cp /home/dlab-user/keys/azure_auth.json {}base/azure_auth.json'.format(src_path))
+        else:
+            cloud_provider = 'gcp'
+        with lcd(src_path):
+            local('docker build --build-arg OS={0} --build-arg SRC_PATH= --file general/files/{1}/base_Dockerfile -t docker.dlab-base:latest .'.format(os_family, cloud_provider))
+            try:
+                for i in range(len(node)):
+                    local('docker build --build-arg OS={0} --file general/files/{1}/{2}_Dockerfile -t docker.dlab-{2} .'.format(os_family, cloud_provider, node[i]))
+            except Exception as err:
+                print("Failed to build {} image".format(node[i]), str(err))
+                raise Exception
+    except Exception as err:
+        traceback.print_exc()
+        sys.exit(1)
+
+
+if __name__ == "__main__":
+    image_build(src_path, node)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/tensor-rstudio/fabfile.py b/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
index e69838b..f7a1ad9 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/fabfile.py
@@ -223,3 +223,19 @@
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
         sys.exit(1)
+
+# Main function for checking inactivity status
+def check_inactivity():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    try:
+        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to check inactivity status.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
index 3a801be..09bc542 100644
--- a/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
+++ b/infrastructure-provisioning/src/tensor-rstudio/scripts/configure_tensor-rstudio_node.py
@@ -40,6 +40,7 @@
 parser.add_argument('--rstudio_pass', type=str, default='')
 parser.add_argument('--rstudio_version', type=str, default='')
 parser.add_argument('--r_mirror', type=str, default='')
+parser.add_argument('--ip_adress', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 args = parser.parse_args()
 
@@ -127,6 +128,10 @@
     if exists('/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
+    # INSTALL INACTIVITY CHECKER
+    print("Install inactivity checker")
+    install_inactivity_checker(args.os_user, args.ip_adress)
+
     # POST INSTALLATION PROCESS
     print("Updating pyOpenSSL library")
     update_pyopenssl_lib(args.os_user)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/tensor/fabfile.py b/infrastructure-provisioning/src/tensor/fabfile.py
index 074ec68..4509ad4 100644
--- a/infrastructure-provisioning/src/tensor/fabfile.py
+++ b/infrastructure-provisioning/src/tensor/fabfile.py
@@ -223,3 +223,19 @@
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
         sys.exit(1)
+
+# Main function for checking inactivity status
+def check_inactivity():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    try:
+        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to check inactivity status.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
index 94abbe3..85934c2 100644
--- a/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
+++ b/infrastructure-provisioning/src/tensor/scripts/configure_tensor_node.py
@@ -37,6 +37,7 @@
 parser.add_argument('--keyfile', type=str, default='')
 parser.add_argument('--region', type=str, default='')
 parser.add_argument('--os_user', type=str, default='')
+parser.add_argument('--ip_adress', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 args = parser.parse_args()
 
@@ -130,6 +131,10 @@
     if exists('/home/{0}/{1}'.format(args.os_user, gitlab_certfile)):
         install_gitlab_cert(args.os_user, gitlab_certfile)
 
+    # INSTALL INACTIVITY CHECKER
+    print("Install inactivity checker")
+    install_inactivity_checker(args.os_user, args.ip_adress)
+
     # INSTALL OPTIONAL PACKAGES
     print("Installing additional Python packages")
     ensure_additional_python_libs(args.os_user)
diff --git a/infrastructure-provisioning/src/zeppelin/fabfile.py b/infrastructure-provisioning/src/zeppelin/fabfile.py
index 3fba5c4..5806e45 100644
--- a/infrastructure-provisioning/src/zeppelin/fabfile.py
+++ b/infrastructure-provisioning/src/zeppelin/fabfile.py
@@ -224,3 +224,19 @@
         traceback.print_exc()
         append_result("Failed to reconfigure Spark for Notebook node.", str(err))
         sys.exit(1)
+
+# Main function for checking inactivity status
+def check_inactivity():
+    local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
+                                               os.environ['request_id'])
+    local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
+    logging.basicConfig(format='%(levelname)-8s [%(asctime)s]  %(message)s',
+                        level=logging.DEBUG,
+                        filename=local_log_filepath)
+
+    try:
+        local("~/scripts/{}.py".format('notebook_inactivity_check'))
+    except Exception as err:
+        traceback.print_exc()
+        append_result("Failed to check inactivity status.", str(err))
+        sys.exit(1)
\ No newline at end of file
diff --git a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
index 17540ff..c803622 100644
--- a/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
+++ b/infrastructure-provisioning/src/zeppelin/scripts/configure_zeppelin_node.py
@@ -50,6 +50,7 @@
 parser.add_argument('--multiple_clusters', type=str, default='')
 parser.add_argument('--r_mirror', type=str, default='')
 parser.add_argument('--endpoint_url', type=str, default='')
+parser.add_argument('--ip_adress', type=str, default='')
 parser.add_argument('--exploratory_name', type=str, default='')
 args = parser.parse_args()
 
@@ -248,6 +249,10 @@
     # COPY PRE-COMMIT SCRIPT TO ZEPPELIN
     sudo('cp /home/{}/.git/templates/hooks/pre-commit /opt/zeppelin/notebook/.git/hooks/'.format(args.os_user))
 
+    # INSTALL INACTIVITY CHECKER
+    print("Install inactivity checker")
+    install_inactivity_checker(args.os_user, args.ip_adress)
+
     # INSTALL OPTIONAL PACKAGES
     if os.environ['notebook_r_enabled'] == 'true':
         print("Install additional R packages")
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/SchedulerJobDTO.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/SchedulerJobDTO.java
index 52d56a0..8e15a59 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/SchedulerJobDTO.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/SchedulerJobDTO.java
@@ -27,6 +27,7 @@
 import java.time.*;
 import java.util.Collections;
 import java.util.List;
+import java.util.Objects;
 
 /**
  * Stores info about a scheduler job (general duration, days to repeat, time to start and finish).
@@ -71,5 +72,11 @@
 	private Long maxInactivity;
 	@JsonProperty("check_inactivity_required")
 	private boolean checkInactivityRequired;
+	@JsonProperty("consider_inactivity")
+	private boolean considerInactivity = true;
+
+	public boolean inactivityScheduler() {
+		return Objects.nonNull(maxInactivity);
+	}
 
 }
\ No newline at end of file
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/base/DataEngineType.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/base/DataEngineType.java
index 39b1c63..a9d8b03 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/base/DataEngineType.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/base/DataEngineType.java
@@ -25,38 +25,42 @@
 import java.util.Map;
 
 public enum DataEngineType {
-    CLOUD_SERVICE("dataengine-service"), SPARK_STANDALONE("dataengine");
+	CLOUD_SERVICE("dataengine-service"), SPARK_STANDALONE("dataengine");
 
-    private static final String DOCKER_IMAGE_PREFIX = "docker.dlab-";
+	private static final String DOCKER_IMAGE_PREFIX = "docker.dlab-";
 
-    private static final Map<String, DataEngineType> INTERNAL_MAP = new HashMap<>();
+	private static final Map<String, DataEngineType> INTERNAL_MAP = new HashMap<>();
 
-    static {
-        for (DataEngineType dataEngineType : DataEngineType.values()) {
-            INTERNAL_MAP.put(dataEngineType.getName(), dataEngineType);
-        }
-    }
+	static {
+		for (DataEngineType dataEngineType : DataEngineType.values()) {
+			INTERNAL_MAP.put(dataEngineType.getName(), dataEngineType);
+		}
+	}
 
-    private String name;
+	private String name;
 
-    DataEngineType(String name) {
-        this.name = name;
-    }
+	DataEngineType(String name) {
+		this.name = name;
+	}
 
-    public static DataEngineType fromString(String name) {
-        return INTERNAL_MAP.get(name);
-    }
+	public String getImage() {
+		return DOCKER_IMAGE_PREFIX + this.name;
+	}
 
-    public static DataEngineType fromDockerImageName(String name) {
-        return INTERNAL_MAP.get(name.replace(DOCKER_IMAGE_PREFIX, ""));
-    }
+	public static DataEngineType fromString(String name) {
+		return INTERNAL_MAP.get(name);
+	}
 
-    public static String getDockerImageName(DataEngineType dataEngineType) {
-        return DOCKER_IMAGE_PREFIX + dataEngineType.getName();
-    }
+	public static DataEngineType fromDockerImageName(String name) {
+		return INTERNAL_MAP.get(name.replace(DOCKER_IMAGE_PREFIX, ""));
+	}
 
-    @JsonValue
-    public String getName() {
-        return name;
-    }
+	public static String getDockerImageName(DataEngineType dataEngineType) {
+		return DOCKER_IMAGE_PREFIX + dataEngineType.getName();
+	}
+
+	@JsonValue
+	public String getName() {
+		return name;
+	}
 }
\ No newline at end of file
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/CheckInactivityStatusDTO.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/CheckInactivityStatusDTO.java
index 04ac54d..80e57cd 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/CheckInactivityStatusDTO.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/CheckInactivityStatusDTO.java
@@ -19,32 +19,12 @@
 package com.epam.dlab.dto.computational;
 
 import com.epam.dlab.dto.StatusBaseDTO;
-import com.epam.dlab.dto.status.EnvResource;
-import com.google.common.base.MoreObjects;
-import lombok.Getter;
+import lombok.Data;
 
-import java.util.List;
-
-@Getter
+@Data
 public class CheckInactivityStatusDTO extends StatusBaseDTO<CheckInactivityStatusDTO> {
 
-	private List<EnvResource> resources;
-	private CheckInactivityStatus checkInactivityStatus;
-
-	public CheckInactivityStatusDTO withResources(List<EnvResource> clusters) {
-		this.resources = clusters;
-		return this;
-	}
-
-	public CheckInactivityStatusDTO withStatus(CheckInactivityStatus checkInactivityStatus) {
-		this.checkInactivityStatus = checkInactivityStatus;
-		return this;
-	}
-
-	@Override
-	public MoreObjects.ToStringHelper toStringHelper(Object self) {
-		return super.toStringHelper(self)
-				.add("resources", resources)
-				.add("checkInactivityStatus", checkInactivityStatus);
-	}
+	private String exploratoryName;
+	private String computationalName;
+	private long lastActivityUnixTime;
 }
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/ComputationalCheckInactivityDTO.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/ComputationalCheckInactivityDTO.java
new file mode 100644
index 0000000..1fc3291
--- /dev/null
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/computational/ComputationalCheckInactivityDTO.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.dto.computational;
+
+import com.epam.dlab.dto.base.computational.ComputationalBase;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+public class ComputationalCheckInactivityDTO extends ComputationalBase<ComputationalCheckInactivityDTO> {
+	private String notebookImage;
+	@JsonProperty("computational_id")
+	private String computationalId;
+	private String image;
+
+	public ComputationalCheckInactivityDTO withNotebookImageName(String imageName) {
+		this.notebookImage = imageName;
+		return this;
+	}
+
+	public ComputationalCheckInactivityDTO withComputationalId(String computationalId) {
+		this.computationalId = computationalId;
+		return this;
+	}
+
+	public ComputationalCheckInactivityDTO withImage(String image) {
+		this.image = image;
+		return this;
+	}
+
+	public String getNotebookImage() {
+		return notebookImage;
+	}
+
+	public String getComputationalId() {
+		return computationalId;
+	}
+
+	public String getImage() {
+		return image;
+	}
+}
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryCheckInactivityAction.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryCheckInactivityAction.java
new file mode 100644
index 0000000..28b6e66
--- /dev/null
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryCheckInactivityAction.java
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.dto.exploratory;
+
+public class ExploratoryCheckInactivityAction extends ExploratoryActionDTO<ExploratoryCheckInactivityAction> {
+}
diff --git a/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryStatusDTO.java b/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryStatusDTO.java
index 4303bf6..ac2c9ed 100644
--- a/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryStatusDTO.java
+++ b/services/dlab-model/src/main/java/com/epam/dlab/dto/exploratory/ExploratoryStatusDTO.java
@@ -25,6 +25,7 @@
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.google.common.base.MoreObjects.ToStringHelper;
 
+import java.util.Date;
 import java.util.List;
 
 public class ExploratoryStatusDTO extends StatusEnvBaseDTO<ExploratoryStatusDTO> {
@@ -36,6 +37,8 @@
 	private String exploratoryPassword;
 	@JsonProperty("private_ip")
 	private String privateIp;
+	@JsonProperty("last_activity")
+	private Date lastActivity;
 	@JsonProperty
 	private List<ClusterConfig> config;
 
@@ -100,6 +103,15 @@
 		return config;
 	}
 
+	public ExploratoryStatusDTO withLastActivity(Date lastActivity) {
+		this.lastActivity = lastActivity;
+		return this;
+	}
+
+	public Date getLastActivity() {
+		return lastActivity;
+	}
+
 	@Override
 	public ToStringHelper toStringHelper(Object self) {
 		return super.toStringHelper(self)
diff --git a/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/ApiCallbacks.java b/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/ApiCallbacks.java
index 209d49d..a1dd121 100644
--- a/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/ApiCallbacks.java
+++ b/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/ApiCallbacks.java
@@ -36,7 +36,9 @@
 	public static final String IMAGE_STATUS_URI = IMAGE + "/image_status";
 	public static final String BACKUP_URI = API + "/infrastructure/backup" + STATUS_URI;
 	public static final String REUPLOAD_KEY_URI = API + "/infrastructure/reupload_key/callback";
-	public static final String CHECK_INACTIVITY_CLUSTERS_URI = API + "/infrastructure/inactivity/callback";
+	public static final String CHECK_INACTIVITY_EXPLORATORY_URI = API + "/infrastructure/inactivity/callback/exploratory";
+	public static final String CHECK_INACTIVITY_COMPUTATIONAL_URI = API + "/infrastructure/inactivity/callback" +
+			"/computational";
 
 	private ApiCallbacks() {
 	}
diff --git a/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/InfrasctructureAPI.java b/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/InfrasctructureAPI.java
index ebd8a01..57fb6e0 100644
--- a/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/InfrasctructureAPI.java
+++ b/services/dlab-webapp-common/src/main/java/com/epam/dlab/rest/contracts/InfrasctructureAPI.java
@@ -21,7 +21,8 @@
 public final class InfrasctructureAPI {
 	public static final String INFRASTRUCTURE = "/infrastructure";
 	public static final String INFRASTRUCTURE_STATUS = INFRASTRUCTURE + "/status";
-	public static final String INFRASTRUCTURE_CHECK_INACTIVITY = INFRASTRUCTURE + "/check_inactivity";
+	public static final String EXPLORATORY_CHECK_INACTIVITY = INFRASTRUCTURE + "/exploratory/check_inactivity";
+	public static final String COMPUTATIONAL_CHECK_INACTIVITY = INFRASTRUCTURE + "/computational/check_inactivity";
 
 	private InfrasctructureAPI() {
 	}
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/commands/RunDockerCommand.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/commands/RunDockerCommand.java
index 19dcccf..9cab93a 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/commands/RunDockerCommand.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/commands/RunDockerCommand.java
@@ -255,6 +255,11 @@
         return this;
     }
 
+    public RunDockerCommand withRemove(){
+        options.add("--rm");
+        return this;
+    }
+
     @Override
     public String toCMD() {
         StringBuilder sb = new StringBuilder(command);
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/CheckInactivityCallbackHandler.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/CheckInactivityCallbackHandler.java
index 4f25c72..2a2e9e7 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/CheckInactivityCallbackHandler.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/core/response/handlers/CheckInactivityCallbackHandler.java
@@ -18,34 +18,33 @@
  */
 package com.epam.dlab.backendapi.core.response.handlers;
 
+import com.epam.dlab.auth.SystemUserInfoService;
 import com.epam.dlab.backendapi.core.FileHandlerCallback;
-import com.epam.dlab.dto.computational.CheckInactivityStatus;
+import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.dto.computational.CheckInactivityStatusDTO;
-import com.epam.dlab.dto.status.EnvResource;
 import com.epam.dlab.exceptions.DlabException;
 import com.epam.dlab.rest.client.RESTService;
 import com.fasterxml.jackson.annotation.JacksonInject;
 import com.fasterxml.jackson.annotation.JsonCreator;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import com.fasterxml.jackson.core.JsonParser;
-import com.fasterxml.jackson.core.type.TypeReference;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.ObjectMapper;
-import com.fasterxml.jackson.databind.ObjectReader;
+import com.google.inject.Singleton;
 import lombok.extern.slf4j.Slf4j;
 
 import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.List;
 
 @Slf4j
+@Singleton
 public class CheckInactivityCallbackHandler implements FileHandlerCallback {
 	private static final ObjectMapper MAPPER = new ObjectMapper()
 			.configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, true);
 	private static final String STATUS_FIELD = "status";
-	private static final String RESOURCES_FIELD = "resources";
 	private static final String ERROR_MESSAGE_FIELD = "error_message";
+	private static final String RESPONSE = "response";
+	private static final String OK_STATUS_STRING = "ok";
+	private static final String RESULT_NODE = "result";
 	@JsonProperty
 	private final String uuid;
 	private final RESTService selfService;
@@ -53,15 +52,30 @@
 	private final String callbackUrl;
 	@JsonProperty
 	private final String user;
+	@JsonProperty
+	private final String exploratoryName;
+	@JsonProperty
+	private final String computationalName;
+	private SystemUserInfoService systemUserInfoService;
 
 	@JsonCreator
 	public CheckInactivityCallbackHandler(@JacksonInject RESTService selfService,
+										  @JacksonInject SystemUserInfoService systemUserInfoService,
 										  @JsonProperty("callbackUrl") String callbackUrl,
-										  @JsonProperty("user") String user, String uuid) {
+										  @JsonProperty("user") String user, String uuid, String exploratoryName,
+										  String computationalName) {
 		this.selfService = selfService;
 		this.uuid = uuid;
 		this.callbackUrl = callbackUrl;
 		this.user = user;
+		this.exploratoryName = exploratoryName;
+		this.computationalName = computationalName;
+		this.systemUserInfoService = systemUserInfoService;
+	}
+
+	public CheckInactivityCallbackHandler(RESTService selfService, SystemUserInfoService systemUserInfoService,
+										  String callbackUrl, String user, String uuid, String exploratoryName) {
+		this(selfService, systemUserInfoService, callbackUrl, user, uuid, exploratoryName, null);
 	}
 
 	@Override
@@ -83,41 +97,16 @@
 
 		final JsonNode treeNode = MAPPER.readTree(fileContent);
 		final String status = treeNode.get(STATUS_FIELD).textValue();
-		CheckInactivityStatusDTO checkInactivityStatusDTO = "ok".equals(status) ?
-				getOkStatusDto(treeNode) : getFailedStatusDto(treeNode);
+		CheckInactivityStatusDTO checkInactivityStatusDTO = OK_STATUS_STRING.equals(status) ?
+				getOkStatusDto(treeNode) : getFailedStatusDto(treeNode.get(ERROR_MESSAGE_FIELD).textValue());
 		selfServicePost(checkInactivityStatusDTO);
-		return "ok".equals(status);
-	}
-
-	private CheckInactivityStatusDTO getOkStatusDto(JsonNode jsonNode) throws IOException {
-		final JsonNode clustersNode = jsonNode.get(RESOURCES_FIELD);
-		ObjectReader reader = MAPPER.readerFor(new TypeReference<List<EnvResource>>() {
-		});
-		List<EnvResource> clusters = reader.readValue(clustersNode);
-		return buildCheckInactivityClustersStatusDTO(CheckInactivityStatus.COMPLETED, clusters);
-	}
-
-	private CheckInactivityStatusDTO getFailedStatusDto(JsonNode jsonNode) {
-		return buildCheckInactivityClustersStatusDTO(CheckInactivityStatus.FAILED,
-				Collections.emptyList())
-				.withErrorMessage(jsonNode.get(ERROR_MESSAGE_FIELD).textValue());
-	}
-
-	private void selfServicePost(CheckInactivityStatusDTO statusDTO) {
-		log.debug("Send post request to self service for UUID {}, object is {}", uuid, statusDTO);
-		try {
-			selfService.post(callbackUrl, statusDTO, Response.class);
-		} catch (Exception e) {
-			log.error("Send request or response error for UUID {}: {}", uuid, e.getLocalizedMessage(), e);
-			throw new DlabException("Send request or response error for UUID " + uuid + ": "
-					+ e.getLocalizedMessage(), e);
-		}
+		return OK_STATUS_STRING.equals(status);
 	}
 
 	@Override
 	public void handleError(String errorMessage) {
-		buildCheckInactivityClustersStatusDTO(CheckInactivityStatus.FAILED, Collections.emptyList())
-				.withErrorMessage(errorMessage);
+		log.error(errorMessage);
+		selfServicePost(getFailedStatusDto(errorMessage).withErrorMessage(errorMessage));
 	}
 
 	@Override
@@ -125,13 +114,32 @@
 		return user;
 	}
 
-	private CheckInactivityStatusDTO buildCheckInactivityClustersStatusDTO(CheckInactivityStatus status,
-																		   List<EnvResource> clusters) {
-		return new CheckInactivityStatusDTO()
+	private CheckInactivityStatusDTO getOkStatusDto(JsonNode jsonNode) {
+		final CheckInactivityStatusDTO statusDTO = new CheckInactivityStatusDTO().withStatus(OK_STATUS_STRING)
+				.withRequestId(uuid);
+		statusDTO.setComputationalName(computationalName);
+		statusDTO.setExploratoryName(exploratoryName);
+		final long lastActivity = Long.parseLong(jsonNode.get(RESPONSE).get(RESULT_NODE).textValue());
+		statusDTO.setLastActivityUnixTime(lastActivity);
+		return statusDTO;
+	}
+
+	private CheckInactivityStatusDTO getFailedStatusDto(String errorMessage) {
+		return new CheckInactivityStatusDTO().withStatus(UserInstanceStatus.FAILED)
 				.withRequestId(uuid)
-				.withResources(clusters)
-				.withStatus(status)
-				.withUser(user);
+				.withErrorMessage(errorMessage);
+	}
+
+	private void selfServicePost(CheckInactivityStatusDTO statusDTO) {
+		log.debug("Send post request to self service for UUID {}, object is {}", uuid, statusDTO);
+		try {
+			selfService.post(callbackUrl, systemUserInfoService.create(user).getAccessToken(), statusDTO,
+					Response.class);
+		} catch (Exception e) {
+			log.error("Send request or response error for UUID {}: {}", uuid, e.getLocalizedMessage(), e);
+			throw new DlabException("Send request or response error for UUID " + uuid + ": "
+					+ e.getLocalizedMessage(), e);
+		}
 	}
 
 }
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java
index 95ecc8f..3ba4d42 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProductionModule.java
@@ -30,6 +30,8 @@
 import com.epam.dlab.backendapi.core.response.handlers.dao.CallbackHandlerDao;
 import com.epam.dlab.backendapi.core.response.handlers.dao.FileSystemCallbackHandlerDao;
 import com.epam.dlab.backendapi.service.RestoreCallbackHandlerService;
+import com.epam.dlab.backendapi.service.CheckInactivityService;
+import com.epam.dlab.backendapi.service.impl.CheckInactivityServiceImpl;
 import com.epam.dlab.backendapi.service.impl.RestoreCallbackHandlerServiceImpl;
 import com.epam.dlab.constants.ServiceConsts;
 import com.epam.dlab.mongo.MongoService;
@@ -72,5 +74,6 @@
 		bind(ObjectMapper.class).toInstance(new ObjectMapper().configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, true));
 		bind(CallbackHandlerDao.class).to(FileSystemCallbackHandlerDao.class);
 		bind(RestoreCallbackHandlerService.class).to(RestoreCallbackHandlerServiceImpl.class);
+		bind(CheckInactivityService.class).to(CheckInactivityServiceImpl.class);
 	}
 }
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProvisioningDevModule.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProvisioningDevModule.java
index c070a76..3ec1035 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProvisioningDevModule.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/modules/ProvisioningDevModule.java
@@ -21,7 +21,6 @@
 
 import com.epam.dlab.ModuleBase;
 import com.epam.dlab.auth.SystemUserInfoService;
-import com.epam.dlab.auth.SystemUserInfoServiceImpl;
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.auth.contract.SecurityAPI;
 import com.epam.dlab.auth.dto.UserCredentialDTO;
@@ -33,6 +32,8 @@
 import com.epam.dlab.backendapi.core.response.handlers.dao.CallbackHandlerDao;
 import com.epam.dlab.backendapi.core.response.handlers.dao.FileSystemCallbackHandlerDao;
 import com.epam.dlab.backendapi.service.RestoreCallbackHandlerService;
+import com.epam.dlab.backendapi.service.CheckInactivityService;
+import com.epam.dlab.backendapi.service.impl.CheckInactivityServiceImpl;
 import com.epam.dlab.backendapi.service.impl.RestoreCallbackHandlerServiceImpl;
 import com.epam.dlab.constants.ServiceConsts;
 import com.epam.dlab.mongo.MongoService;
@@ -44,6 +45,7 @@
 import io.dropwizard.setup.Environment;
 
 import javax.ws.rs.core.Response;
+import java.util.Optional;
 
 /**
  * Mock class for an application configuration of Provisioning Service for tests.
@@ -73,11 +75,22 @@
 				.SELF_SERVICE_NAME));
 		bind(MetadataHolder.class).to(DockerWarmuper.class);
 		bind(ICommandExecutor.class).toInstance(new CommandExecutorMock(configuration.getCloudProvider()));
-		bind(SystemUserInfoService.class).to(SystemUserInfoServiceImpl.class);
+		bind(SystemUserInfoService.class).toInstance(new SystemUserInfoService() {
+			@Override
+			public Optional<UserInfo> getUser(String token) {
+				return Optional.of(getUserInfo());
+			}
+
+			@Override
+			public UserInfo create(String name) {
+				return getUserInfo();
+			}
+		});
 		bind(MongoService.class).toInstance(configuration.getMongoFactory().build(environment));
 		bind(ObjectMapper.class).toInstance(new ObjectMapper().configure(JsonParser.Feature.AUTO_CLOSE_SOURCE, true));
 		bind(CallbackHandlerDao.class).to(FileSystemCallbackHandlerDao.class);
 		bind(RestoreCallbackHandlerService.class).to(RestoreCallbackHandlerServiceImpl.class);
+		bind(CheckInactivityService.class).to(CheckInactivityServiceImpl.class);
 	}
 
 	/**
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureResource.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureResource.java
index 8f1aadf..91519ab 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureResource.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/resources/InfrastructureResource.java
@@ -20,9 +20,9 @@
 package com.epam.dlab.backendapi.resources;
 
 import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.core.commands.DockerAction;
-import com.epam.dlab.backendapi.service.impl.CheckInactivityService;
-import com.epam.dlab.dto.status.EnvResource;
+import com.epam.dlab.backendapi.service.CheckInactivityService;
+import com.epam.dlab.dto.computational.ComputationalCheckInactivityDTO;
+import com.epam.dlab.dto.exploratory.ExploratoryCheckInactivityAction;
 import com.epam.dlab.rest.contracts.InfrasctructureAPI;
 import com.google.inject.Inject;
 import io.dropwizard.auth.Auth;
@@ -30,7 +30,6 @@
 import javax.ws.rs.*;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.util.List;
 
 @Path(InfrasctructureAPI.INFRASTRUCTURE)
 @Consumes(MediaType.APPLICATION_JSON)
@@ -49,8 +48,14 @@
 	}
 
 	@POST
-	@Path("check_inactivity")
-	public String checkClusterInactivity(@Auth UserInfo ui, List<EnvResource> resources) {
-		return checkInactivityService.checkClusterAction(ui.getName(), resources, DockerAction.CHECK_INACTIVITY);
+	@Path("/exploratory/check_inactivity")
+	public String checkExploratoryInactivity(@Auth UserInfo ui, ExploratoryCheckInactivityAction dto) {
+		return checkInactivityService.checkExploratoryInactivity(ui.getName(), dto);
+	}
+
+	@POST
+	@Path("/computational/check_inactivity")
+	public String checkComputationalInactivity(@Auth UserInfo ui, ComputationalCheckInactivityDTO dto) {
+		return checkInactivityService.checkComputationalInactivity(ui.getName(), dto);
 	}
 }
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/CheckInactivityService.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/CheckInactivityService.java
new file mode 100644
index 0000000..50d8e00
--- /dev/null
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/CheckInactivityService.java
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package com.epam.dlab.backendapi.service;
+
+import com.epam.dlab.dto.computational.ComputationalCheckInactivityDTO;
+import com.epam.dlab.dto.exploratory.ExploratoryCheckInactivityAction;
+
+public interface CheckInactivityService {
+	String checkComputationalInactivity(String userName, ComputationalCheckInactivityDTO dto);
+
+	String checkExploratoryInactivity(String userName, ExploratoryCheckInactivityAction dto);
+}
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/CheckInactivityService.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/CheckInactivityService.java
deleted file mode 100644
index 19484ff..0000000
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/CheckInactivityService.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package com.epam.dlab.backendapi.service.impl;
-
-import com.epam.dlab.backendapi.core.Directories;
-import com.epam.dlab.backendapi.core.commands.DockerAction;
-import com.epam.dlab.backendapi.core.commands.DockerCommands;
-import com.epam.dlab.backendapi.core.commands.RunDockerCommand;
-import com.epam.dlab.backendapi.core.response.handlers.CheckInactivityCallbackHandler;
-import com.epam.dlab.dto.computational.CheckInactivityCallbackDTO;
-import com.epam.dlab.dto.status.EnvResource;
-import com.epam.dlab.rest.contracts.ApiCallbacks;
-import com.google.inject.Singleton;
-import lombok.extern.slf4j.Slf4j;
-
-import java.util.List;
-
-@Slf4j
-@Singleton
-public class CheckInactivityService extends DockerService implements DockerCommands {
-
-	private static final String CHECK_INACTIVITY_CLUSTERS_ACTION = "check_inactivity_clusters";
-
-	public String checkClusterAction(String userName, List<EnvResource> clusters, DockerAction action) {
-		log.debug("Admin {} is checking inactivity for resources...", userName);
-		log.debug("Obtained {} resources: {}", clusters.size(), clusters);
-		String uuid = getUuid();
-		CheckInactivityCallbackDTO dto = buildCallbackDTO(uuid, clusters);
-		startCallbackListener(userName, dto);
-		RunDockerCommand runDockerCommand = buildRunDockerCommand(dto, action);
-		runDockerCmd(userName, uuid, runDockerCommand, dto);
-		return uuid;
-	}
-
-	private String getUuid() {
-		return DockerCommands.generateUUID();
-	}
-
-	private void runDockerCmd(String userName, String uuid, RunDockerCommand runDockerCommand,
-							  CheckInactivityCallbackDTO callbackDto) {
-		try {
-			final String command = commandBuilder.buildCommand(runDockerCommand, callbackDto);
-			log.trace("Docker command: {}", command);
-			commandExecutor.executeAsync(userName, uuid, command);
-		} catch (Exception e) {
-			log.error("Exception occured during reuploading key: {} for command {}", e.getLocalizedMessage(),
-					runDockerCommand.toCMD());
-		}
-	}
-
-	private void startCallbackListener(String userName, CheckInactivityCallbackDTO dto) {
-		folderListenerExecutor.start(configuration.getKeyLoaderDirectory(),
-				configuration.getKeyLoaderPollTimeout(),
-				new CheckInactivityCallbackHandler(
-						selfService, ApiCallbacks.CHECK_INACTIVITY_CLUSTERS_URI, userName, dto.getId()));
-	}
-
-	@Override
-	public String getResourceType() {
-		return Directories.EDGE_LOG_DIRECTORY;
-	}
-
-	private RunDockerCommand buildRunDockerCommand(CheckInactivityCallbackDTO callbackDto,
-												   DockerAction action) {
-		return new RunDockerCommand()
-				.withInteractive()
-				.withName(getContainerName(callbackDto))
-				.withVolumeForRootKeys(configuration.getKeyDirectory())
-				.withVolumeForResponse(configuration.getKeyLoaderDirectory())
-				.withVolumeForLog(configuration.getDockerLogDirectory(), getResourceType())
-				.withResource("computational")
-				.withRequestId(callbackDto.getId())
-				.withConfKeyName(configuration.getAdminKey())
-				.withImage(configuration.getEdgeImage())
-				.withAction(action);
-	}
-
-	private CheckInactivityCallbackDTO buildCallbackDTO(String uuid, List<EnvResource> clusters) {
-		return new CheckInactivityCallbackDTO()
-				.withId(uuid)
-				.withClusters(clusters);
-	}
-
-	private String getContainerName(CheckInactivityCallbackDTO callbackDto) {
-		return nameContainer(callbackDto.getId(), CHECK_INACTIVITY_CLUSTERS_ACTION);
-	}
-}
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/CheckInactivityServiceImpl.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/CheckInactivityServiceImpl.java
new file mode 100644
index 0000000..9677005
--- /dev/null
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/CheckInactivityServiceImpl.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package com.epam.dlab.backendapi.service.impl;
+
+import com.epam.dlab.backendapi.core.Directories;
+import com.epam.dlab.backendapi.core.commands.DockerAction;
+import com.epam.dlab.backendapi.core.commands.DockerCommands;
+import com.epam.dlab.backendapi.core.commands.RunDockerCommand;
+import com.epam.dlab.backendapi.core.response.handlers.CheckInactivityCallbackHandler;
+import com.epam.dlab.backendapi.service.CheckInactivityService;
+import com.epam.dlab.dto.ResourceBaseDTO;
+import com.epam.dlab.dto.base.DataEngineType;
+import com.epam.dlab.dto.computational.ComputationalCheckInactivityDTO;
+import com.epam.dlab.dto.exploratory.ExploratoryCheckInactivityAction;
+import com.epam.dlab.rest.contracts.ApiCallbacks;
+import com.google.inject.Singleton;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+@Singleton
+public class CheckInactivityServiceImpl extends DockerService implements CheckInactivityService, DockerCommands {
+
+
+	@Override
+	public String checkComputationalInactivity(String userName, ComputationalCheckInactivityDTO dto) {
+		String uuid = DockerCommands.generateUUID();
+		startComputationalCallbackListener(userName, dto, uuid);
+		final RunDockerCommand dockerCommand = new RunDockerCommand()
+				.withInteractive()
+				.withRemove()
+				.withName(nameContainer(uuid, DockerAction.CHECK_INACTIVITY.toString()))
+				.withVolumeForRootKeys(configuration.getKeyDirectory())
+				.withVolumeForResponse(configuration.getKeyLoaderDirectory())
+				.withVolumeForLog(configuration.getDockerLogDirectory(), getResourceType())
+				.withResource(DataEngineType.fromDockerImageName(dto.getImage()) == DataEngineType.SPARK_STANDALONE ?
+						Directories.DATA_ENGINE_LOG_DIRECTORY :
+						Directories.DATA_ENGINE_SERVICE_LOG_DIRECTORY)
+				.withRequestId(uuid)
+				.withConfKeyName(configuration.getAdminKey())
+				.withImage(dto.getNotebookImage())
+				.withAction(DockerAction.CHECK_INACTIVITY);
+		runDockerCmd(userName, uuid, dockerCommand, dto);
+		return uuid;
+	}
+
+	@Override
+	public String checkExploratoryInactivity(String userName, ExploratoryCheckInactivityAction dto) {
+		String uuid = DockerCommands.generateUUID();
+		startExploratoryCallbackListener(userName, dto, uuid);
+		final RunDockerCommand dockerCommand = new RunDockerCommand()
+				.withInteractive()
+				.withRemove()
+				.withName(nameContainer(uuid, DockerAction.CHECK_INACTIVITY.toString()))
+				.withVolumeForRootKeys(configuration.getKeyDirectory())
+				.withVolumeForResponse(configuration.getKeyLoaderDirectory())
+				.withVolumeForLog(configuration.getDockerLogDirectory(), getResourceType())
+				.withResource(Directories.NOTEBOOK_LOG_DIRECTORY)
+				.withRequestId(uuid)
+				.withConfKeyName(configuration.getAdminKey())
+				.withImage(dto.getNotebookImage())
+				.withAction(DockerAction.CHECK_INACTIVITY);
+		runDockerCmd(userName, uuid, dockerCommand, dto);
+		return uuid;
+	}
+
+	private void startComputationalCallbackListener(String userName, ComputationalCheckInactivityDTO dto,
+													String uuid) {
+		final CheckInactivityCallbackHandler handler = new CheckInactivityCallbackHandler(
+				selfService, sysUserService, ApiCallbacks.CHECK_INACTIVITY_COMPUTATIONAL_URI, userName, uuid,
+				dto.getExploratoryName(), dto.getComputationalName());
+		folderListenerExecutor.start(configuration.getKeyLoaderDirectory(),
+				configuration.getKeyLoaderPollTimeout(), handler);
+	}
+
+	private void startExploratoryCallbackListener(String userName, ExploratoryCheckInactivityAction dto, String uuid) {
+		final CheckInactivityCallbackHandler handler = new CheckInactivityCallbackHandler(
+				selfService, sysUserService, ApiCallbacks.CHECK_INACTIVITY_EXPLORATORY_URI, userName, uuid,
+				dto.getExploratoryName());
+		folderListenerExecutor.start(configuration.getKeyLoaderDirectory(),
+				configuration.getKeyLoaderPollTimeout(), handler);
+	}
+
+	private void runDockerCmd(String userName, String uuid, RunDockerCommand dockerCmd, ResourceBaseDTO<?> dto) {
+		try {
+			final String command = commandBuilder.buildCommand(dockerCmd, dto);
+			log.trace("Docker command: {}", command);
+			commandExecutor.executeAsync(userName, uuid, command);
+		} catch (Exception e) {
+			log.error("Exception occured during reuploading key: {} for command {}", e.getLocalizedMessage(),
+					dockerCmd.toCMD());
+		}
+	}
+
+	@Override
+	public String getResourceType() {
+		return Directories.NOTEBOOK_LOG_DIRECTORY;
+	}
+}
diff --git a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/DockerService.java b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/DockerService.java
index 98e89b6..8e984de 100644
--- a/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/DockerService.java
+++ b/services/provisioning-service/src/main/java/com/epam/dlab/backendapi/service/impl/DockerService.java
@@ -19,6 +19,7 @@
 
 package com.epam.dlab.backendapi.service.impl;
 
+import com.epam.dlab.auth.SystemUserInfoService;
 import com.epam.dlab.backendapi.ProvisioningServiceApplicationConfiguration;
 import com.epam.dlab.backendapi.core.commands.CommandBuilder;
 import com.epam.dlab.backendapi.core.commands.ICommandExecutor;
@@ -38,5 +39,7 @@
     protected CommandBuilder commandBuilder;
     @Inject
     protected RESTService selfService;
+    @Inject
+    protected SystemUserInfoService sysUserService;
 
 }
diff --git a/services/provisioning-service/src/main/resources/mock_response/aws/dataengine-service_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/aws/dataengine-service_check_inactivity.json
new file mode 100644
index 0000000..7b28898
--- /dev/null
+++ b/services/provisioning-service/src/main/resources/mock_response/aws/dataengine-service_check_inactivity.json
@@ -0,0 +1,8 @@
+{
+  "status": "ok",
+  "response": {
+    "result": "1549464798"
+  },
+  "request_id" : "${REQUEST_ID}"
+}
+
diff --git a/services/provisioning-service/src/main/resources/mock_response/aws/dataengine_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/aws/dataengine_check_inactivity.json
new file mode 100644
index 0000000..39bb182
--- /dev/null
+++ b/services/provisioning-service/src/main/resources/mock_response/aws/dataengine_check_inactivity.json
@@ -0,0 +1,8 @@
+{
+  "status": "ok",
+  "response": {
+    "result": "1548253841"
+  },
+  "request_id" : "${REQUEST_ID}"
+}
+
diff --git a/services/provisioning-service/src/main/resources/mock_response/aws/notebook_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/aws/notebook_check_inactivity.json
index ba941c6..af3320b 100644
--- a/services/provisioning-service/src/main/resources/mock_response/aws/notebook_check_inactivity.json
+++ b/services/provisioning-service/src/main/resources/mock_response/aws/notebook_check_inactivity.json
@@ -1,14 +1,8 @@
 {
   "status": "ok",
-  "request_id": "${REQUEST_ID}",
-  "resources": [
-    {
-      "id": "i-e649463cf6fd461bb",
-      "status": "running",
-      "name": "jup_sc",
-      "resourceType": "COMPUTATIONAL",
-      "lastActivity": 1575842400000
-    }
-  ]
+  "response": {
+    "result": "1549464798",
+    "log": "/var/log/dlab/notebook/notebook_bohdan_hliva_eb30e2bb-28db-4b07-bf16-e634d448952f.log"
+  },
+  "request_id": "${REQUEST_ID}"
 }
-
diff --git a/services/provisioning-service/src/main/resources/mock_response/azure/dataengine-service_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/azure/dataengine-service_check_inactivity.json
new file mode 100644
index 0000000..39bb182
--- /dev/null
+++ b/services/provisioning-service/src/main/resources/mock_response/azure/dataengine-service_check_inactivity.json
@@ -0,0 +1,8 @@
+{
+  "status": "ok",
+  "response": {
+    "result": "1548253841"
+  },
+  "request_id" : "${REQUEST_ID}"
+}
+
diff --git a/services/provisioning-service/src/main/resources/mock_response/azure/dataengine_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/azure/dataengine_check_inactivity.json
new file mode 100644
index 0000000..39bb182
--- /dev/null
+++ b/services/provisioning-service/src/main/resources/mock_response/azure/dataengine_check_inactivity.json
@@ -0,0 +1,8 @@
+{
+  "status": "ok",
+  "response": {
+    "result": "1548253841"
+  },
+  "request_id" : "${REQUEST_ID}"
+}
+
diff --git a/services/provisioning-service/src/main/resources/mock_response/azure/notebook_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/azure/notebook_check_inactivity.json
index 0cf2f32..e90176b 100644
--- a/services/provisioning-service/src/main/resources/mock_response/azure/notebook_check_inactivity.json
+++ b/services/provisioning-service/src/main/resources/mock_response/azure/notebook_check_inactivity.json
@@ -1,21 +1,7 @@
 {
   "status": "ok",
-  "request_id": "${REQUEST_ID}",
-  "resources": [
-    {
-      "id": "dev-test-dataengine-jup_1-sp_1",
-      "status": "running",
-      "name": "sp_1",
-      "resourceType": "COMPUTATIONAL",
-      "lastActivity": 1533652097582
-    },
-    {
-      "id": "dev-test-des-jup_1-emr_1-da3cd",
-      "status": "running",
-      "name": "emr_1",
-      "resourceType": "COMPUTATIONAL",
-      "lastActivity": 1533652114629
-    }
-  ]
+  "response": {
+    "result": "1548253841"
+  },
+  "request_id": "${REQUEST_ID}"
 }
-
diff --git a/services/provisioning-service/src/main/resources/mock_response/gcp/dataengine-service_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/gcp/dataengine-service_check_inactivity.json
new file mode 100644
index 0000000..39bb182
--- /dev/null
+++ b/services/provisioning-service/src/main/resources/mock_response/gcp/dataengine-service_check_inactivity.json
@@ -0,0 +1,8 @@
+{
+  "status": "ok",
+  "response": {
+    "result": "1548253841"
+  },
+  "request_id" : "${REQUEST_ID}"
+}
+
diff --git a/services/provisioning-service/src/main/resources/mock_response/gcp/dataengine_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/gcp/dataengine_check_inactivity.json
new file mode 100644
index 0000000..39bb182
--- /dev/null
+++ b/services/provisioning-service/src/main/resources/mock_response/gcp/dataengine_check_inactivity.json
@@ -0,0 +1,8 @@
+{
+  "status": "ok",
+  "response": {
+    "result": "1548253841"
+  },
+  "request_id" : "${REQUEST_ID}"
+}
+
diff --git a/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_check_inactivity.json b/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_check_inactivity.json
index 0cf2f32..6b96209 100644
--- a/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_check_inactivity.json
+++ b/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_check_inactivity.json
@@ -1,21 +1,7 @@
 {
   "status": "ok",
-  "request_id": "${REQUEST_ID}",
-  "resources": [
-    {
-      "id": "dev-test-dataengine-jup_1-sp_1",
-      "status": "running",
-      "name": "sp_1",
-      "resourceType": "COMPUTATIONAL",
-      "lastActivity": 1533652097582
-    },
-    {
-      "id": "dev-test-des-jup_1-emr_1-da3cd",
-      "status": "running",
-      "name": "emr_1",
-      "resourceType": "COMPUTATIONAL",
-      "lastActivity": 1533652114629
-    }
-  ]
+  "response": {
+    "result": "1548253841"
+  },
+  "request_id" : "${REQUEST_ID}"
 }
-
diff --git a/services/self-service/self-service.yml b/services/self-service/self-service.yml
index 8ae3c02..809d04d 100644
--- a/services/self-service/self-service.yml
+++ b/services/self-service/self-service.yml
@@ -78,8 +78,8 @@
       archivedFileCount: 10
   rootPath: "/api"
   applicationConnectors:
-  #    - type: http
-  #      port: 8080
+#    - type: http
+#      port: 8080
   - type: https
     port: 8443
     certAlias: dlab
@@ -89,8 +89,8 @@
     trustStorePath: ${TRUST_STORE_PATH}
     trustStorePassword: ${TRUST_STORE_PASSWORD}
   adminConnectors:
-  #    - type: http
-  #      port: 8081
+#    - type: http
+#      port: 8081
   - type: https
     port: 8444
     certAlias: dlab
@@ -107,7 +107,6 @@
   loggers:
     com.epam: TRACE
     com.novemberain: ERROR
-    org.apache.guacamole: TRACE
   appenders:
 <#if DEV_MODE == "true">
   - type: console
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ComputationalDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ComputationalDAO.java
index b91e204..907be1b 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ComputationalDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ComputationalDAO.java
@@ -69,10 +69,9 @@
 		return COMPUTATIONAL_RESOURCES + FIELD_SET_DELIMETER + fieldName;
 	}
 
-	private static Bson computationalCondition(String user, String exploratoryFieldValue,
-											   String compResourceFieldValue) {
-		return and(eq(USER, user), eq(EXPLORATORY_NAME, exploratoryFieldValue),
-				eq(COMPUTATIONAL_RESOURCES + "." + COMPUTATIONAL_NAME, compResourceFieldValue));
+	private static Bson computationalCondition(String user, String exploratoryName, String compName) {
+		return and(eq(USER, user), eq(EXPLORATORY_NAME, exploratoryName),
+				eq(COMPUTATIONAL_RESOURCES + "." + COMPUTATIONAL_NAME, compName));
 	}
 
 	/**
@@ -427,9 +426,10 @@
 				Objects.isNull(dto) ? null : convertToBson(dto));
 	}
 
-
-	public void updateLastActivityDateForInstanceId(String instanceId, LocalDateTime lastActivity) {
-		updateOne(USER_INSTANCES, eq(computationalFieldFilter(COMPUTATIONAL_ID), instanceId),
+	public void updateLastActivity(String user, String exploratoryName,
+								   String computationalName, LocalDateTime lastActivity) {
+		updateOne(USER_INSTANCES,
+				computationalCondition(user, exploratoryName, computationalName),
 				set(computationalFieldFilter(COMPUTATIONAL_LAST_ACTIVITY),
 						Date.from(lastActivity.atZone(ZoneId.systemDefault()).toInstant())));
 	}
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java
index 703586a..2fb7b9a 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/EnvDAO.java
@@ -28,6 +28,7 @@
 import com.epam.dlab.backendapi.resources.dto.HealthStatusPageDTO;
 import com.epam.dlab.backendapi.resources.dto.HealthStatusResource;
 import com.epam.dlab.cloud.CloudProvider;
+import com.epam.dlab.dto.UserInstanceDTO;
 import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.dto.base.DataEngineType;
 import com.epam.dlab.dto.status.EnvResource;
@@ -105,10 +106,10 @@
 	}
 
 	@SuppressWarnings("unchecked")
-	public List<EnvResource> findRunningResourcesForCheckInactivity() {
+	public List<UserInstanceDTO> findRunningResourcesForCheckInactivity() {
 		return stream(find(USER_INSTANCES, or(eq(STATUS, RUNNING.toString()),
 				elemMatch(COMPUTATIONAL_RESOURCES, eq(STATUS, RUNNING.toString())))))
-				.flatMap(ui -> getRunningEnvResources(ui).stream())
+				.map(d -> convertFromDocument(d, UserInstanceDTO.class))
 				.collect(Collectors.toList());
 	}
 
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
index becfb04..ea3cef0 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/ExploratoryDAO.java
@@ -203,32 +203,13 @@
 				.collect(Collectors.toList());
 	}
 
-	/**
-	 * Finds and returns the info about all exploratories in database.
-	 **/
-	public List<UserInstanceDTO> getInstancesByComputationalIdsAndStatus(List<String> ids, UserInstanceStatus status) {
-		return instancesByCompResourceIds(and(in(INSTANCE_ID, ids), eq(STATUS, status.toString())));
+	public void updateLastActivity(String user, String exploratoryName, LocalDateTime lastActivity) {
+		updateOne(USER_INSTANCES, and(eq(USER, user), eq(EXPLORATORY_NAME, exploratoryName)),
+				set(EXPLORATORY_LAST_ACTIVITY, toDate(lastActivity)));
 	}
 
-	public List<UserInstanceDTO> getInstancesByIdsAndStatus(List<String> ids, UserInstanceStatus status) {
-		return stream(getCollection(USER_INSTANCES)
-				.find(and(in(INSTANCE_ID, ids), eq(STATUS, status.toString())))
-				.projection(fields(exclude(COMPUTATIONAL_RESOURCES))))
-				.map(d -> convertFromDocument(d, UserInstanceDTO.class))
-				.collect(Collectors.toList());
-	}
-
-	public void updateLastActivityDateForInstanceId(String instanceId, LocalDateTime lastActivity) {
-		updateOne(USER_INSTANCES, eq(INSTANCE_ID, instanceId),
-				set(EXPLORATORY_LAST_ACTIVITY, Date.from(lastActivity.atZone(ZoneId.systemDefault()).toInstant())));
-	}
-
-	private List<UserInstanceDTO> instancesByCompResourceIds(Bson compCondition) {
-		return stream(getCollection(USER_INSTANCES)
-				.find(com.mongodb.client.model.Filters.elemMatch(COMPUTATIONAL_RESOURCES, compCondition))
-				.projection(include(COMPUTATIONAL_RESOURCES + ".$", EXPLORATORY_NAME, USER)))
-				.map(d -> convertFromDocument(d, UserInstanceDTO.class))
-				.collect(Collectors.toList());
+	private Date toDate(LocalDateTime lastActivity) {
+		return Date.from(lastActivity.atZone(ZoneId.systemDefault()).toInstant());
 	}
 
 	/**
@@ -407,6 +388,10 @@
 			values.append(EXPLORATORY_ID, dto.getExploratoryId());
 		}
 
+		if (dto.getLastActivity() != null) {
+			values.append(EXPLORATORY_LAST_ACTIVITY, dto.getLastActivity());
+		}
+
 		if (dto.getResourceUrl() != null) {
 			values.append(EXPLORATORY_URL, dto.getResourceUrl().stream()
 					.map(url -> {
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/SchedulerJobDAO.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/SchedulerJobDAO.java
index 0277ff8..99f6b1b 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/SchedulerJobDAO.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/dao/SchedulerJobDAO.java
@@ -53,9 +53,13 @@
 public class SchedulerJobDAO extends BaseDAO {
 
 	static final String SCHEDULER_DATA = "scheduler_data";
+	private static final String CONSIDER_INACTIVITY_FLAG = SCHEDULER_DATA + ".consider_inactivity";
+	public static final String TIMEZONE_PREFIX = "UTC";
+	private static final String LAST_ACTIVITY = "last_activity";
 	private static final String CHECK_INACTIVITY_REQUIRED = "check_inactivity_required";
 	private static final String CHECK_INACTIVITY_FLAG = SCHEDULER_DATA + "." + CHECK_INACTIVITY_REQUIRED;
 
+
 	public SchedulerJobDAO() {
 		log.info("{} is initialized", getClass().getSimpleName());
 	}
@@ -121,6 +125,23 @@
 				.collect(toList());
 	}
 
+	public List<SchedulerJobData> getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(UserInstanceStatus status,
+																								  Date lastActivity) {
+		return stream(find(USER_INSTANCES,
+				and(
+						eq(STATUS, status.toString()),
+						schedulerNotNullCondition(),
+						eq(CONSIDER_INACTIVITY_FLAG, true),
+						or(eq(COMPUTATIONAL_RESOURCES, Collections.emptyList()),
+								and(ne(COMPUTATIONAL_RESOURCES, Collections.emptyList()),
+										Filters.elemMatch(COMPUTATIONAL_RESOURCES,
+												lte(LAST_ACTIVITY, lastActivity))))
+				),
+				fields(excludeId(), include(USER, EXPLORATORY_NAME, SCHEDULER_DATA))))
+				.map(d -> convertFromDocument(d, SchedulerJobData.class))
+				.collect(toList());
+	}
+
 	public List<SchedulerJobData> getExploratorySchedulerDataWithOneOfStatus(UserInstanceStatus... statuses) {
 		FindIterable<Document> userInstances = userInstancesWithScheduler(in(STATUS,
 				Arrays.stream(statuses).map(UserInstanceStatus::toString).collect(toList())));
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java
index 3e3819c..bd4a5ab 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/modules/DevModule.java
@@ -21,7 +21,6 @@
 
 import com.epam.dlab.ModuleBase;
 import com.epam.dlab.auth.SystemUserInfoService;
-import com.epam.dlab.auth.SystemUserInfoServiceImpl;
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.auth.contract.SecurityAPI;
 import com.epam.dlab.auth.dto.UserCredentialDTO;
@@ -41,6 +40,7 @@
 
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import java.util.Optional;
 
 /**
  * Mock class for an application configuration of SelfService for developer mode.
@@ -77,7 +77,17 @@
 		bind(BackupDao.class).to(BackupDaoImpl.class);
 		bind(ExploratoryService.class).to(ExploratoryServiceImpl.class);
 		bind(InactivityService.class).to(InactivityServiceImpl.class);
-		bind(SystemUserInfoService.class).to(SystemUserInfoServiceImpl.class);
+		bind(SystemUserInfoService.class).toInstance(new SystemUserInfoService() {
+			@Override
+			public Optional<UserInfo> getUser(String token) {
+				return Optional.of(getUserInfo());
+			}
+
+			@Override
+			public UserInfo create(String name) {
+				return getUserInfo();
+			}
+		});
 		bind(Authorizer.class).to(SelfServiceSecurityAuthorizer.class);
 		bind(AccessKeyService.class).to(AccessKeyServiceImpl.class);
 		bind(GitCredentialService.class).to(GitCredentialServiceImpl.class);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/CheckInactivityCallback.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/CheckInactivityCallback.java
index b5eeb15..f7a9bd9 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/CheckInactivityCallback.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/CheckInactivityCallback.java
@@ -18,12 +18,12 @@
  */
 package com.epam.dlab.backendapi.resources.callback;
 
+import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.domain.RequestId;
 import com.epam.dlab.backendapi.service.InactivityService;
 import com.epam.dlab.dto.computational.CheckInactivityStatusDTO;
-import com.epam.dlab.dto.status.EnvResource;
-import com.epam.dlab.model.ResourceType;
 import com.google.inject.Inject;
+import io.dropwizard.auth.Auth;
 import lombok.extern.slf4j.Slf4j;
 
 import javax.ws.rs.Consumes;
@@ -31,8 +31,10 @@
 import javax.ws.rs.Path;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
-import java.util.List;
-import java.util.stream.Collectors;
+import java.time.LocalDateTime;
+
+import static java.time.Instant.ofEpochSecond;
+import static java.time.ZoneId.systemDefault;
 
 @Path("/infrastructure/inactivity/callback")
 @Consumes(MediaType.APPLICATION_JSON)
@@ -45,28 +47,24 @@
 	private InactivityService inactivityService;
 
 	@POST
-	public Response checkInactiveClusterResponse(CheckInactivityStatusDTO dto) {
+	@Path("exploratory")
+	public Response updateExploratoryLastActivity(@Auth UserInfo userInfo, CheckInactivityStatusDTO dto) {
 		requestId.checkAndRemove(dto.getRequestId());
-		stopClustersByInactivity(dto);
-		stopExploratoryByInactivity(dto);
+		inactivityService.updateLastActivityForExploratory(userInfo, dto.getExploratoryName(),
+				toLocalDateTime(dto.getLastActivityUnixTime()));
 		return Response.ok().build();
 	}
 
-	private void stopClustersByInactivity(CheckInactivityStatusDTO dto) {
-		final List<EnvResource> clusters = getResources(dto, ResourceType.COMPUTATIONAL);
-		inactivityService.stopClustersByInactivity(clusters.stream().map(EnvResource::getId).collect(Collectors.toList()));
-		inactivityService.updateLastActivityForClusters(clusters);
+	@POST
+	@Path("computational")
+	public Response updateComputationalLastActivity(@Auth UserInfo userInfo, CheckInactivityStatusDTO dto) {
+		requestId.checkAndRemove(dto.getRequestId());
+		inactivityService.updateLastActivityForComputational(userInfo, dto.getExploratoryName(),
+				dto.getComputationalName(), toLocalDateTime(dto.getLastActivityUnixTime()));
+		return Response.ok().build();
 	}
 
-	private void stopExploratoryByInactivity(CheckInactivityStatusDTO dto) {
-		final List<EnvResource> exploratories = getResources(dto, ResourceType.EXPLORATORY);
-		inactivityService.stopByInactivity(exploratories);
-		inactivityService.updateLastActivity(exploratories);
-	}
-
-	private List<EnvResource> getResources(CheckInactivityStatusDTO dto, ResourceType resourceType) {
-		return dto.getResources().stream()
-				.filter(r -> r.getResourceType() == resourceType)
-				.collect(Collectors.toList());
+	private LocalDateTime toLocalDateTime(long unixTime) {
+		return ofEpochSecond(unixTime).atZone(systemDefault()).toLocalDateTime();
 	}
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ComputationalCallback.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ComputationalCallback.java
index 105ec08..1f2294a 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ComputationalCallback.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ComputationalCallback.java
@@ -23,9 +23,9 @@
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.dao.ComputationalDAO;
 import com.epam.dlab.backendapi.domain.RequestId;
-import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.backendapi.service.ComputationalService;
 import com.epam.dlab.backendapi.service.ReuploadKeyService;
+import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.dto.computational.ComputationalStatusDTO;
 import com.epam.dlab.dto.computational.UserComputationalResource;
 import com.epam.dlab.exceptions.DlabException;
@@ -41,6 +41,7 @@
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import java.util.Date;
 
 import static com.epam.dlab.dto.UserInstanceStatus.RUNNING;
 
@@ -50,8 +51,8 @@
 @Slf4j
 public class ComputationalCallback {
 
-    @Inject
-    private ComputationalDAO computationalDAO;
+	@Inject
+	private ComputationalDAO computationalDAO;
 	@Inject
 	private RequestId requestId;
 	@Inject
@@ -61,18 +62,18 @@
 	@Inject
 	private ComputationalService computationalService;
 
-    /**
-     * Updates the status of the computational resource for user.
-     *
-     * @param dto DTO info about the status of the computational resource.
-     * @return 200 OK - if request success otherwise throws exception.
-     */
-    @POST
-    @Path(ApiCallbacks.STATUS_URI)
-    public Response status(@Auth UserInfo ui, ComputationalStatusDTO dto) {
+	/**
+	 * Updates the status of the computational resource for user.
+	 *
+	 * @param dto DTO info about the status of the computational resource.
+	 * @return 200 OK - if request success otherwise throws exception.
+	 */
+	@POST
+	@Path(ApiCallbacks.STATUS_URI)
+	public Response status(@Auth UserInfo ui, ComputationalStatusDTO dto) {
 		log.debug("Updating status for computational resource {} for user {}: {}",
 				dto.getComputationalName(), dto.getUser(), dto);
-        String uuid = dto.getRequestId();
+		String uuid = dto.getRequestId();
 		requestId.checkAndRemove(uuid);
 
 		UserComputationalResource compResource = computationalService.getComputationalResource(dto.getUser(),
@@ -83,13 +84,13 @@
 		log.debug("Current status for computational resource {} of exploratory environment {} for user {} is {}",
 				dto.getComputationalName(), dto.getExploratoryName(), dto.getUser(),
 				compResource.getStatus());
-        try {
-            computationalDAO.updateComputationalFields(dto);
-        } catch (DlabException e) {
-            log.error("Could not update status for computational resource {} for user {} to {}: {}", dto, e);
-            throw e;
-        }
-        if (UserInstanceStatus.CONFIGURING == UserInstanceStatus.of(dto.getStatus())) {
+		try {
+			computationalDAO.updateComputationalFields(dto.withLastActivity(new Date()));
+		} catch (DlabException e) {
+			log.error("Could not update status for computational resource {} for user {} to {}: {}", dto, e);
+			throw e;
+		}
+		if (UserInstanceStatus.CONFIGURING == UserInstanceStatus.of(dto.getStatus())) {
 			log.debug("Waiting for configuration of the computational resource {} for user {}",
 					dto.getComputationalName(), dto.getUser());
 			requestId.put(dto.getUser(), uuid);
@@ -99,6 +100,6 @@
 			UserInfo userInfo = systemUserService.create(dto.getUser());
 			reuploadKeyService.reuploadKeyAction(userInfo, resourceData);
 		}
-        return Response.ok().build();
-    }
+		return Response.ok().build();
+	}
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ExploratoryCallback.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ExploratoryCallback.java
index 0978998..eeab58c 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ExploratoryCallback.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/resources/callback/ExploratoryCallback.java
@@ -42,6 +42,7 @@
 import javax.ws.rs.Produces;
 import javax.ws.rs.core.MediaType;
 import javax.ws.rs.core.Response;
+import java.util.Date;
 
 import static com.epam.dlab.dto.UserInstanceStatus.*;
 
@@ -89,7 +90,7 @@
 				dto.getExploratoryName(), dto.getUser(), currentStatus);
 
 		try {
-			exploratoryDAO.updateExploratoryFields(dto);
+			exploratoryDAO.updateExploratoryFields(dto.withLastActivity(new Date()));
 			if (currentStatus == TERMINATING) {
 				updateComputationalStatuses(dto.getUser(), dto.getExploratoryName(),
 						UserInstanceStatus.of(dto.getStatus()));
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/schedulers/CheckInactivityScheduledJob.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/schedulers/CheckInactivityScheduledJob.java
index 05c2b77..08bbaed 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/schedulers/CheckInactivityScheduledJob.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/schedulers/CheckInactivityScheduledJob.java
@@ -19,9 +19,8 @@
 package com.epam.dlab.backendapi.schedulers;
 
 import com.epam.dlab.auth.SystemUserInfoService;
-import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.schedulers.internal.Scheduled;
-import com.epam.dlab.backendapi.service.SchedulerJobService;
+import com.epam.dlab.backendapi.service.InactivityService;
 import com.google.inject.Inject;
 import lombok.extern.slf4j.Slf4j;
 import org.quartz.Job;
@@ -37,18 +36,15 @@
 @Scheduled("inactivity")
 public class CheckInactivityScheduledJob implements Job {
 
-	private static final String SCHEDULER_USER = "scheduler_user";
-
 	@Inject
-	private SchedulerJobService schedulerJobService;
+	private InactivityService inactivityService;
 
 	@Inject
 	private SystemUserInfoService systemUserInfoService;
 
 	@Override
 	public void execute(JobExecutionContext context) {
-		UserInfo userInfo = systemUserInfoService.create(SCHEDULER_USER);
-		log.info("Starting check inactivity cluster job on behalf of {}...", SCHEDULER_USER);
-		schedulerJobService.updateRunningResourcesLastActivity(userInfo);
+		log.trace("Starting check inactivity job");
+		inactivityService.updateRunningResourcesLastActivity();
 	}
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InactivityService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InactivityService.java
index 6dd76f1..7b5cd44 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InactivityService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/InactivityService.java
@@ -18,18 +18,16 @@
  */
 package com.epam.dlab.backendapi.service;
 
-import com.epam.dlab.dto.status.EnvResource;
+import com.epam.dlab.auth.UserInfo;
 
-import java.util.List;
+import java.time.LocalDateTime;
 
 public interface InactivityService {
 
-    void stopClustersByInactivity(List<String> computationalIds);
+	void updateRunningResourcesLastActivity();
 
-    void updateLastActivityForClusters(List<EnvResource> clusters);
+	void updateLastActivityForExploratory(UserInfo userInfo, String exploratoryName, LocalDateTime lastActivity);
 
-
-    void stopByInactivity(List<EnvResource> exploratories);
-
-    void updateLastActivity(List<EnvResource> exploratories);
+	void updateLastActivityForComputational(UserInfo userInfo, String exploratoryName,
+											String computationalName, LocalDateTime lastActivity);
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SchedulerJobService.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SchedulerJobService.java
index 5fdbdd2..2e25611 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SchedulerJobService.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/SchedulerJobService.java
@@ -80,11 +80,6 @@
 
 	void terminateComputationalByScheduler();
 
-	/**
-	 * Executes check cluster inactivity job for all running resources.
-	 */
-	void updateRunningResourcesLastActivity(UserInfo userInfo);
-
 	void removeScheduler(String user, String exploratoryName);
 
 	void removeScheduler(String user, String exploratoryName, String computationalName);
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InactivityServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InactivityServiceImpl.java
index d81e017..8184683 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InactivityServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/InactivityServiceImpl.java
@@ -19,6 +19,7 @@
 package com.epam.dlab.backendapi.service.impl;
 
 import com.epam.dlab.auth.SystemUserInfoService;
+import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.dao.ComputationalDAO;
 import com.epam.dlab.backendapi.dao.EnvDAO;
 import com.epam.dlab.backendapi.dao.ExploratoryDAO;
@@ -26,25 +27,20 @@
 import com.epam.dlab.backendapi.service.ComputationalService;
 import com.epam.dlab.backendapi.service.ExploratoryService;
 import com.epam.dlab.backendapi.service.InactivityService;
+import com.epam.dlab.backendapi.util.RequestBuilder;
 import com.epam.dlab.constants.ServiceConsts;
-import com.epam.dlab.dto.SchedulerJobDTO;
 import com.epam.dlab.dto.UserInstanceDTO;
-import com.epam.dlab.dto.base.DataEngineType;
+import com.epam.dlab.dto.UserInstanceStatus;
+import com.epam.dlab.dto.computational.ComputationalCheckInactivityDTO;
 import com.epam.dlab.dto.computational.UserComputationalResource;
-import com.epam.dlab.dto.status.EnvResource;
+import com.epam.dlab.dto.exploratory.ExploratoryCheckInactivityAction;
 import com.epam.dlab.rest.client.RESTService;
+import com.epam.dlab.rest.contracts.InfrasctructureAPI;
 import com.google.inject.Inject;
 import com.google.inject.name.Named;
 import lombok.extern.slf4j.Slf4j;
 
 import java.time.LocalDateTime;
-import java.util.List;
-import java.util.Objects;
-import java.util.stream.Collectors;
-
-import static com.epam.dlab.dto.UserInstanceStatus.RUNNING;
-import static com.epam.dlab.dto.base.DataEngineType.CLOUD_SERVICE;
-import static com.epam.dlab.dto.base.DataEngineType.SPARK_STANDALONE;
 
 @Slf4j
 public class InactivityServiceImpl implements InactivityService {
@@ -55,6 +51,8 @@
 	@Inject
 	private EnvDAO envDAO;
 	@Inject
+	private RequestBuilder requestBuilder;
+	@Inject
 	@Named(ServiceConsts.PROVISIONING_SERVICE_NAME)
 	private RESTService provisioningService;
 	@Inject
@@ -67,62 +65,48 @@
 	private SystemUserInfoService systemUserInfoService;
 
 	@Override
-	public void stopClustersByInactivity(List<String> computationalIds) {
-		exploratoryDAO.getInstancesByComputationalIdsAndStatus(computationalIds, RUNNING)
-				.forEach(this::stopClusters);
+	public void updateRunningResourcesLastActivity() {
+		envDAO.findRunningResourcesForCheckInactivity()
+				.forEach(this::updateLastActivity);
 	}
 
 	@Override
-	public void updateLastActivityForClusters(List<EnvResource> clusters) {
-		log.debug("Updating last activity date for clusters...");
-		clusters.forEach(r -> computationalDAO.updateLastActivityDateForInstanceId(r.getId(), r.getLastActivity()));
+	public void updateLastActivityForExploratory(UserInfo userInfo, String exploratoryName,
+												 LocalDateTime lastActivity) {
+		exploratoryDAO.updateLastActivity(userInfo.getName(), exploratoryName, lastActivity);
 	}
 
 	@Override
-	public void stopByInactivity(List<EnvResource> exploratories) {
-		final List<String> expIds = exploratories.stream().map(EnvResource::getId).collect(Collectors.toList());
-		exploratoryDAO.getInstancesByIdsAndStatus(expIds, RUNNING)
-				.stream()
-				.filter(this::shouldExploratoryBeInactivated)
-				.forEach(this::stopNotebook);
-
+	public void updateLastActivityForComputational(UserInfo userInfo, String exploratoryName,
+												   String computationalName, LocalDateTime lastActivity) {
+		computationalDAO.updateLastActivity(userInfo.getName(), exploratoryName, computationalName, lastActivity);
 	}
 
-	@Override
-	public void updateLastActivity(List<EnvResource> exploratories) {
-		exploratories.forEach(r -> exploratoryDAO.updateLastActivityDateForInstanceId(r.getId(), r.getLastActivity()));
-	}
-
-	private void stopNotebook(UserInstanceDTO ui) {
-		exploratoryService.stop(systemUserInfoService.create(ui.getUser()), ui.getExploratoryName());
-	}
-
-	private boolean shouldExploratoryBeInactivated(UserInstanceDTO ui) {
-		final SchedulerJobDTO schedulerData = ui.getSchedulerData();
-
-		return Objects.nonNull(schedulerData) && schedulerData.isCheckInactivityRequired() && Objects.nonNull(ui.getLastActivity()) &&
-				ui.getLastActivity().plusMinutes(schedulerData.getMaxInactivity()).isBefore(LocalDateTime.now());
-	}
-
-	private void stopClusters(UserInstanceDTO ui) {
-		ui.getResources().stream()
-				.filter(this::shouldClusterBeInactivated)
-				.forEach(c -> stopCluster(c, ui.getUser(), ui.getExploratoryName()));
-	}
-
-	private boolean shouldClusterBeInactivated(UserComputationalResource c) {
-		final SchedulerJobDTO schedulerData = c.getSchedulerData();
-		return Objects.nonNull(schedulerData) && schedulerData.isCheckInactivityRequired() &&
-				c.getLastActivity().plusMinutes(schedulerData.getMaxInactivity()).isBefore(LocalDateTime.now());
-	}
-
-	private void stopCluster(UserComputationalResource c, String user, String exploratoryName) {
-		final DataEngineType dataEngineType = c.getDataEngineType();
-		final String compName = c.getComputationalName();
-		if (dataEngineType == SPARK_STANDALONE) {
-			computationalService.stopSparkCluster(systemUserInfoService.create(user), exploratoryName, compName);
-		} else if (dataEngineType == CLOUD_SERVICE) {
-			computationalService.terminateComputational(systemUserInfoService.create(user), exploratoryName, compName);
+	private void updateLastActivity(UserInstanceDTO ui) {
+		if (UserInstanceStatus.RUNNING.toString().equals(ui.getStatus())) {
+			updateExploratoryLastActivity(systemUserInfoService.create(ui.getUser()), ui);
 		}
+		ui.getResources()
+				.stream()
+				.filter(comp -> UserInstanceStatus.RUNNING.toString().equals(comp.getStatus()))
+				.forEach(cr -> updateComputationalLastActivity(systemUserInfoService.create(ui.getUser()), ui, cr));
+	}
+
+	private void updateComputationalLastActivity(UserInfo userInfo, UserInstanceDTO ui, UserComputationalResource cr) {
+		final ComputationalCheckInactivityDTO dto =
+				requestBuilder.newComputationalCheckInactivity(userInfo, ui, cr);
+		final String uuid =
+				provisioningService.post(InfrasctructureAPI.COMPUTATIONAL_CHECK_INACTIVITY,
+						userInfo.getAccessToken(), dto, String.class);
+		requestId.put(userInfo.getName(), uuid);
+	}
+
+	private void updateExploratoryLastActivity(UserInfo userInfo, UserInstanceDTO ui) {
+		final ExploratoryCheckInactivityAction dto =
+				requestBuilder.newExploratoryCheckInactivityAction(userInfo, ui);
+		final String uuid =
+				provisioningService.post(InfrasctructureAPI.EXPLORATORY_CHECK_INACTIVITY,
+						userInfo.getAccessToken(), dto, String.class);
+		requestId.put(userInfo.getName(), uuid);
 	}
 }
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImpl.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImpl.java
index 6222093..c906712 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImpl.java
@@ -34,12 +34,10 @@
 import com.epam.dlab.dto.UserInstanceStatus;
 import com.epam.dlab.dto.base.DataEngineType;
 import com.epam.dlab.dto.computational.UserComputationalResource;
-import com.epam.dlab.dto.status.EnvResource;
 import com.epam.dlab.exceptions.ResourceInappropriateStateException;
 import com.epam.dlab.exceptions.ResourceNotFoundException;
 import com.epam.dlab.model.scheduler.SchedulerJobData;
 import com.epam.dlab.rest.client.RESTService;
-import com.epam.dlab.rest.contracts.InfrasctructureAPI;
 import com.google.inject.Inject;
 import com.google.inject.Singleton;
 import com.google.inject.name.Named;
@@ -48,8 +46,10 @@
 
 import java.time.*;
 import java.time.temporal.ChronoUnit;
+import java.util.Date;
 import java.util.List;
 import java.util.Objects;
+import java.util.Optional;
 import java.util.function.Predicate;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
@@ -57,7 +57,9 @@
 import static com.epam.dlab.constants.ServiceConsts.PROVISIONING_SERVICE_NAME;
 import static com.epam.dlab.dto.UserInstanceStatus.*;
 import static com.epam.dlab.dto.base.DataEngineType.getDockerImageName;
+import static java.time.ZoneId.systemDefault;
 import static java.util.Collections.singletonList;
+import static java.util.Date.from;
 
 @Slf4j
 @Singleton
@@ -65,6 +67,7 @@
 
 	private static final String SCHEDULER_NOT_FOUND_MSG =
 			"Scheduler job data not found for user %s with exploratory %s";
+	private static final long ALLOWED_INACTIVITY_MINUTES = 1L;
 
 	@Inject
 	private SchedulerJobDAO schedulerJobDAO;
@@ -116,9 +119,10 @@
 		log.debug("Updating exploratory {} for user {} with new scheduler job data: {}...", exploratoryName, user,
 				dto);
 		exploratoryDAO.updateSchedulerDataForUserAndExploratory(user, exploratoryName, dto);
-		if (dto.isSyncStartRequired()) {
+
+		if (!dto.inactivityScheduler() && dto.isSyncStartRequired()) {
 			shareSchedulerJobDataToSparkClusters(user, exploratoryName, dto);
-		} else {
+		} else if (!dto.inactivityScheduler()) {
 			computationalDAO.updateSchedulerSyncFlag(user, exploratoryName, dto.isSyncStartRequired());
 		}
 	}
@@ -136,13 +140,13 @@
 
 	@Override
 	public void stopComputationalByScheduler() {
-		getComputationalSchedulersForStopping(OffsetDateTime.now())
+		getComputationalSchedulersForStopping(OffsetDateTime.now(), true)
 				.forEach(this::stopComputational);
 	}
 
 	@Override
 	public void stopExploratoryByScheduler() {
-		getExploratorySchedulersForStopping(OffsetDateTime.now())
+		getExploratorySchedulersForStopping(OffsetDateTime.now(), true)
 				.forEach(this::stopExploratory);
 	}
 
@@ -172,16 +176,6 @@
 	}
 
 	@Override
-	public void updateRunningResourcesLastActivity(UserInfo userInfo) {
-		List<EnvResource> resources = envDAO.findRunningResourcesForCheckInactivity();
-		if (!resources.isEmpty()) {
-			String uuid = provisioningService.post(InfrasctructureAPI.INFRASTRUCTURE_CHECK_INACTIVITY,
-					userInfo.getAccessToken(), resources, String.class);
-			requestId.put(userInfo.getName(), uuid);
-		}
-	}
-
-	@Override
 	public void removeScheduler(String user, String exploratoryName) {
 		schedulerJobDAO.removeScheduler(user, exploratoryName);
 	}
@@ -196,11 +190,11 @@
 		final OffsetDateTime desiredDateTime = OffsetDateTime.now().plusMinutes(minutesOffset);
 		final Predicate<SchedulerJobData> userPredicate = s -> user.equals(s.getUser());
 		final Stream<SchedulerJobData> computationalSchedulersStream =
-				getComputationalSchedulersForStopping(desiredDateTime)
+				getComputationalSchedulersForStopping(desiredDateTime, false)
 						.stream()
 						.filter(userPredicate);
 		final Stream<SchedulerJobData> exploratorySchedulersStream =
-				getExploratorySchedulersForStopping(desiredDateTime)
+				getExploratorySchedulersForStopping(desiredDateTime, false)
 						.stream()
 						.filter(userPredicate);
 		return Stream.concat(computationalSchedulersStream, exploratorySchedulersStream)
@@ -228,8 +222,7 @@
 		final String expName = job.getExploratoryName();
 		final String user = job.getUser();
 		log.debug("Stopping exploratory {} for user {} by scheduler", expName, user);
-		exploratoryService.stop(systemUserService.create(job.getUser()),
-				job.getExploratoryName());
+		exploratoryService.stop(systemUserService.create(user), expName);
 	}
 
 	private List<SchedulerJobData> getExploratorySchedulersForTerminating(OffsetDateTime now) {
@@ -303,10 +296,16 @@
 		}
 	}
 
-	private List<SchedulerJobData> getExploratorySchedulersForStopping(OffsetDateTime currentDateTime) {
-		return schedulerJobDAO.getExploratorySchedulerDataWithStatus(RUNNING)
+	private List<SchedulerJobData> getExploratorySchedulersForStopping(OffsetDateTime currentDateTime,
+																	   boolean checkInactivity) {
+
+		final Date clusterMaxInactivityAllowedDate =
+				from(LocalDateTime.now().minusMinutes(ALLOWED_INACTIVITY_MINUTES).atZone(systemDefault()).toInstant());
+		return schedulerJobDAO.getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(RUNNING,
+				clusterMaxInactivityAllowedDate)
 				.stream()
-				.filter(canSchedulerForStoppingBeApplied(currentDateTime))
+				.filter(canSchedulerForStoppingBeApplied(currentDateTime)
+						.or(schedulerJobData -> checkInactivity && exploratoryInactivityCondition(schedulerJobData)))
 				.collect(Collectors.toList());
 	}
 
@@ -350,20 +349,61 @@
 				convertedCurrentTime.equals(terminateDateTime.atOffset(timeZoneOffset).toLocalDateTime());
 	}
 
-	private List<SchedulerJobData> getComputationalSchedulersForStopping(OffsetDateTime currentDateTime) {
+	private List<SchedulerJobData> getComputationalSchedulersForStopping(OffsetDateTime currentDateTime,
+																		 boolean checkInactivity) {
 		return schedulerJobDAO
 				.getComputationalSchedulerDataWithOneOfStatus(RUNNING, DataEngineType.SPARK_STANDALONE, RUNNING)
 				.stream()
-				.filter(canSchedulerForStoppingBeApplied(currentDateTime))
+				.filter(canSchedulerForStoppingBeApplied(currentDateTime)
+						.or(schedulerJobData -> checkInactivity && computationalInactivityCondition(schedulerJobData)))
 				.collect(Collectors.toList());
 	}
 
+	private boolean computationalInactivityCondition(SchedulerJobData jobData) {
+		final SchedulerJobDTO schedulerData = jobData.getJobDTO();
+		return schedulerData.isCheckInactivityRequired() && computationalInactivityExceed(jobData, schedulerData);
+	}
+
+	private boolean computationalInactivityExceed(SchedulerJobData schedulerJobData, SchedulerJobDTO schedulerData) {
+		final String explName = schedulerJobData.getExploratoryName();
+		final String compName = schedulerJobData.getComputationalName();
+		final String user = schedulerJobData.getUser();
+		final UserComputationalResource c = computationalDAO.fetchComputationalFields(user, explName, compName);
+		final Long maxInactivity = schedulerData.getMaxInactivity();
+		return inactivityCondition(maxInactivity, c.getStatus(), c.getLastActivity());
+	}
+
+	private boolean exploratoryInactivityCondition(SchedulerJobData jobData) {
+		final SchedulerJobDTO schedulerData = jobData.getJobDTO();
+		return schedulerData.isCheckInactivityRequired() && exploratoryInactivityExceed(jobData, schedulerData);
+	}
+
+	private boolean exploratoryInactivityExceed(SchedulerJobData schedulerJobData, SchedulerJobDTO schedulerData) {
+		final String expName = schedulerJobData.getExploratoryName();
+		final String user = schedulerJobData.getUser();
+		final UserInstanceDTO userInstanceDTO = exploratoryDAO.fetchExploratoryFields(user, expName, true);
+		final boolean canBeStopped = userInstanceDTO.getResources()
+				.stream()
+				.map(UserComputationalResource::getStatus)
+				.map(UserInstanceStatus::of)
+				.noneMatch(status -> status.in(TERMINATING, CONFIGURING, CREATING, CREATING));
+		return canBeStopped && inactivityCondition(schedulerData.getMaxInactivity(), userInstanceDTO.getStatus(),
+				userInstanceDTO.getLastActivity());
+	}
+
+	private boolean inactivityCondition(Long maxInactivity, String status, LocalDateTime lastActivity) {
+		return UserInstanceStatus.RUNNING.toString().equals(status) &&
+				Optional.ofNullable(lastActivity)
+						.map(la -> la.plusMinutes(maxInactivity).isBefore(LocalDateTime.now()))
+						.orElse(Boolean.FALSE);
+	}
+
 	private void populateDefaultSchedulerValues(SchedulerJobDTO dto) {
 		if (Objects.isNull(dto.getBeginDate()) || StringUtils.isBlank(dto.getBeginDate().toString())) {
 			dto.setBeginDate(LocalDate.now());
 		}
 		if (Objects.isNull(dto.getTimeZoneOffset()) || StringUtils.isBlank(dto.getTimeZoneOffset().toString())) {
-			dto.setTimeZoneOffset(OffsetDateTime.now(ZoneId.systemDefault()).getOffset());
+			dto.setTimeZoneOffset(OffsetDateTime.now(systemDefault()).getOffset());
 		}
 	}
 
@@ -381,7 +421,7 @@
 
 	private void validateResourceStatus(String resourceStatus) {
 		final UserInstanceStatus status = UserInstanceStatus.of(resourceStatus);
-		if (Objects.isNull(status) || status.in(UserInstanceStatus.TERMINATED, UserInstanceStatus.TERMINATING,
+		if (Objects.isNull(status) || status.in(UserInstanceStatus.TERMINATED, TERMINATING,
 				UserInstanceStatus.FAILED)) {
 			throw new ResourceInappropriateStateException(String.format("Can not create/update scheduler for user " +
 					"instance with status: %s", status));
diff --git a/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java
index 3fb600e..b120c53 100644
--- a/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java
+++ b/services/self-service/src/main/java/com/epam/dlab/backendapi/util/RequestBuilder.java
@@ -521,6 +521,20 @@
 				.withImageName(imageName);
 	}
 
+	@SuppressWarnings("unchecked")
+	public <T extends ComputationalBase<T>> T newComputationalCheckInactivity(UserInfo userInfo,
+																			  UserInstanceDTO exploratory,
+																			  UserComputationalResource cr) {
+		return (T) newResourceSysBaseDTO(userInfo, ComputationalCheckInactivityDTO.class)
+				.withExploratoryName(exploratory.getExploratoryName())
+				.withComputationalName(cr.getComputationalName())
+				.withNotebookInstanceName(exploratory.getExploratoryId())
+				.withApplicationName(getApplicationNameFromImage(exploratory.getImageName()))
+				.withNotebookImageName(exploratory.getImageName())
+				.withImage(cr.getImageName())
+				.withComputationalId(cr.getComputationalId());
+	}
+
 
 	@SuppressWarnings("unchecked")
 	public <T extends EnvBackupDTO> T newBackupCreate(BackupFormDTO backupFormDTO, String id) {
@@ -572,6 +586,17 @@
 		return dto;
 
 
+    }
+
+	public ExploratoryCheckInactivityAction newExploratoryCheckInactivityAction(UserInfo userInfo,
+																				UserInstanceDTO userInstance) {
+		final ExploratoryCheckInactivityAction dto = newResourceSysBaseDTO(userInfo,
+				ExploratoryCheckInactivityAction.class);
+		dto.withNotebookInstanceName(userInstance.getExploratoryId())
+				.withNotebookImage(userInstance.getImageName())
+				.withExploratoryName(userInstance.getExploratoryName())
+				.withReuploadKeyRequired(userInstance.isReuploadKeyRequired());
+		return dto;
 	}
 
 	private CloudProvider cloudProvider() {
diff --git a/services/self-service/src/main/resources/webapp/src/app/core/services/dataengineConfiguration.service.ts b/services/self-service/src/main/resources/webapp/src/app/core/services/dataengineConfiguration.service.ts
index ce11f3b..5dcef3e 100644
--- a/services/self-service/src/main/resources/webapp/src/app/core/services/dataengineConfiguration.service.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/core/services/dataengineConfiguration.service.ts
@@ -19,7 +19,7 @@
 
 import { Injectable } from '@angular/core';
 import { Observable } from 'rxjs';
-import {catchError, map } from 'rxjs/operators';
+import { catchError, map } from 'rxjs/operators';
 
 import { ApplicationServiceFacade } from './applicationServiceFacade.service';
 import { ErrorUtils } from '../util';
diff --git a/services/self-service/src/main/resources/webapp/src/app/core/services/scheduler.service.ts b/services/self-service/src/main/resources/webapp/src/app/core/services/scheduler.service.ts
index cb75f3c..c595486 100644
--- a/services/self-service/src/main/resources/webapp/src/app/core/services/scheduler.service.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/core/services/scheduler.service.ts
@@ -23,6 +23,7 @@
 
 import { ApplicationServiceFacade } from './applicationServiceFacade.service';
 import { ErrorUtils } from '../util/';
+import { ScheduleSchema } from '../../resources/scheduler/scheduler.model';
 
 @Injectable()
 export class SchedulerService {
@@ -37,7 +38,7 @@
         catchError(ErrorUtils.handleServiceError));
   }
 
-  public setExploratorySchedule(notebook, data, resource?): Observable<{}> {
+  public setExploratorySchedule(notebook, data, resource?): Observable<ScheduleSchema> {
     const param = resource ? `/${notebook}/${resource}` : `/${notebook}`;
     return this.applicationServiceFacade
       .buildSetExploratorySchedule(param, data)
diff --git a/services/self-service/src/main/resources/webapp/src/app/core/util/checkUtils.ts b/services/self-service/src/main/resources/webapp/src/app/core/util/checkUtils.ts
index ae3d5b6..36f8a63 100644
--- a/services/self-service/src/main/resources/webapp/src/app/core/util/checkUtils.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/core/util/checkUtils.ts
@@ -26,4 +26,13 @@
     }
     return true;
   }
+
+  public static isNumberKey($event): boolean {
+    const charCode = ($event.which) ? $event.which : $event.keyCode;
+    if (charCode !== 46 && charCode > 31 && (charCode < 48 || charCode > 57)) {
+      $event.preventDefault();
+      return false;
+    }
+    return true;
+  }
 }
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html
index a37b619..dcf1c25 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html
@@ -69,7 +69,7 @@
               <label class="label">{{ DICTIONARY[model.selectedImage.image].instance_number }}</label>
               <div class="control">
                 <input type="number" class="form-control" min="{{minInstanceNumber}}" max="{{maxInstanceNumber}}"
-                      formControlName="instance_number" (keypress)="isNumberKey($event)" />
+                      formControlName="instance_number" (keypress)="CheckUtils.isNumberKey($event)" />
                 <span class="danger_color" *ngIf="!resourceForm?.controls.instance_number.valid">
                   <span>Only integer values greater than or equal to {{ minInstanceNumber }} and less than {{ maxInstanceNumber }} are allowed</span>
                 </span>
@@ -102,7 +102,7 @@
                 [ngClass]="{ show: preemptible?.nativeElement['checked'] || false}">
             <input type="text" class="form-control"
               formControlName="preemptible_instance_number"
-              (keypress)="isNumberKey($event)"
+              (keypress)="CheckUtils.isNumberKey($event)"
               (keydown.arrowup)="preemptibleCounter($event, 'increment')"
               (keydown.arrowdown)="preemptibleCounter($event, 'decrement')" />
             <span class="danger_color" *ngIf="!resourceForm?.controls.preemptible_instance_number.valid">
@@ -122,7 +122,7 @@
             <span *ngIf="spotInstancesSelect?.nativeElement['checked'] || false"> bit, %</span>
           </label>
           <div class="control spot-details" [ngClass]="{ show: spotInstancesSelect?.nativeElement['checked'] || false }" *ngIf="spotInstancesSelect?.nativeElement['checked'] || false">
-            <input type="number" class="form-control" step="5" min="{{minSpotPrice}}" max="{{maxSpotPrice}}" formControlName="instance_price" (keypress)="isNumberKey($event)">
+            <input type="number" class="form-control" step="5" min="{{minSpotPrice}}" max="{{maxSpotPrice}}" formControlName="instance_price" (keypress)="CheckUtils.isNumberKey($event)">
             <span class="danger_color" *ngIf="!resourceForm?.controls.instance_price.valid">
               Only integer values greater than or equal to {{minSpotPrice}} and less than {{maxSpotPrice}} are allowed
             </span>
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.ts b/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.ts
index 2df4b3a..6acc684 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.ts
@@ -38,6 +38,7 @@
   readonly PROVIDER = DICTIONARY.cloud_provider;
   readonly DICTIONARY = DICTIONARY;
   readonly CLUSTER_CONFIGURATION = CLUSTER_CONFIGURATION;
+  readonly CheckUtils = CheckUtils;
 
   model: ComputationalResourceCreateModel;
   notebook_instance: any;
@@ -84,14 +85,14 @@
     this.bindDialog.onClosing = () => this.resetDialog();
   }
 
-  public isNumberKey($event): boolean {
-    const charCode = ($event.which) ? $event.which : $event.keyCode;
-    if (charCode !== 46 && charCode > 31 && (charCode < 48 || charCode > 57)) {
-      $event.preventDefault();
-      return false;
-    }
-    return true;
-  }
+  // public isNumberKey($event): boolean {
+  //   const charCode = ($event.which) ? $event.which : $event.keyCode;
+  //   if (charCode !== 46 && charCode > 31 && (charCode < 48 || charCode > 57)) {
+  //     $event.preventDefault();
+  //     return false;
+  //   }
+  //   return true;
+  // }
 
   public onUpdate($event): void {
     if ($event.model.type === 'template') {
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html
index 8135889..1b2ac38 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/detail-dialog/detail-dialog.component.html
@@ -48,16 +48,16 @@
                                 <a class="ellipsis" href="{{item.url}}" target="_blank">&#32;{{item.url}}</a>
                             </p>
                         </div>
-                        <p *ngIf="notebook.username">Node User: &nbsp;<strong>{{ notebook.username }}</strong></p>
-                        <p *ngIf="notebook.password">Password: &nbsp;<strong>{{ notebook.password }}</strong></p>
+                        <p *ngIf="notebook.username">Node User: &#32;<strong>{{ notebook.username }}</strong></p>
+                        <p *ngIf="notebook.password">Password: &#32;<strong>{{ notebook.password }}</strong></p>
 
-                        <p class="m-top-30">{{ DICTIONARY.personal_storage }}: &nbsp;</p>
+                        <p class="m-top-30">{{ DICTIONARY.personal_storage }}: &#32;</p>
                         <div class="links_block">
                             <p *ngIf="DICTIONARY.cloud_provider === 'azure' && notebook.account_name">{{ DICTIONARY.account }}
                                 <strong>{{ notebook.account_name}}</strong></p>
                             <p *ngIf="notebook.bucket_name">{{ DICTIONARY.container }} <strong>{{ notebook.bucket_name }}</strong></p>
                         </div>
-                        <p>{{ DICTIONARY.collaboration_storage }}: &nbsp;</p>
+                        <p>{{ DICTIONARY.collaboration_storage }}: &#32;</p>
                         <div class="links_block">
                             <p *ngIf="DICTIONARY.cloud_provider === 'azure' && notebook.shared_account_name">{{ DICTIONARY.account }}
                                 <strong>{{ notebook.shared_account_name}}</strong></p>
@@ -67,11 +67,11 @@
                         <br />
 
                         <div *ngIf="DICTIONARY.cloud_provider === 'azure' && notebook.datalake_name">
-                            <p>Data Lake Store: &nbsp;</p>
+                            <p>Data Lake Store: &#32;</p>
                             <div class="links_block">
-                                <p>Data Lake Store Account: &nbsp;<strong>{{ notebook.datalake_name }}</strong></p>
-                                <p>Personal folder: &nbsp;<strong>{{ notebook.datalake_directory }}</strong></p>
-                                <p>Collaboration folder: &nbsp;<strong>{{ notebook.datalake_shared_directory }}</strong></p>
+                                <p>Data Lake Store Account: &#32;<strong>{{ notebook.datalake_name }}</strong></p>
+                                <p>Personal folder: &#32;<strong>{{ notebook.datalake_directory }}</strong></p>
+                                <p>Collaboration folder: &#32;<strong>{{ notebook.datalake_shared_directory }}</strong></p>
                             </div>
                         </div>
 
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.calculations.ts b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.calculations.ts
new file mode 100644
index 0000000..1174e74
--- /dev/null
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.calculations.ts
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+export class SchedulerCalculations {
+  public static convertTimeFormat(time24: any) {
+    let result;
+    if (typeof time24 === 'string') {
+      const spl = time24.split(':');
+
+      result = {
+        hour: +spl[0] % 12 || 12,
+        minute: +spl[1],
+        meridiem: +spl[0] < 12 || +spl[0] === 24 ? 'AM' : 'PM'
+      };
+    } else {
+      let hours = time24.hour;
+      const minutes = (time24.minute < 10) ? '0' + time24.minute : time24.minute;
+
+      if (time24.meridiem === 'PM' && time24.hour < 12) hours = time24.hour + 12;
+      if (time24.meridiem === 'AM' &&  time24.hour === 12) hours = time24.hour - 12;
+      hours = hours < 10 ? '0' + hours : hours;
+
+      result = `${hours}:${minutes}`;
+    }
+    return result;
+  }
+}
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.html
index 7b889b1..90203da 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.html
@@ -31,101 +31,149 @@
       <form [formGroup]="schedulerForm">
         <div class="enable-schedule">
           <mat-slide-toggle labelPosition="after" [checked]="enableSchedule" (change)="toggleSchedule($event)">
-            <span *ngIf="toggleSchedule" class="hold-label">Enable scheduler</span>
+            <span *ngIf="toggleSchedule" class="hold-label">Scheduler by time</span>
           </mat-slide-toggle>
-          <div class="idle" *ngIf="idleImplemented">
+          <div class="idle">
             <mat-slide-toggle labelPosition="before" [checked]="enableIdleTime" (change)="toggleIdleTimes($event)">
-              <span *ngIf="toggleSchedule" class="hold-label">Inactivity time</span>
+              <span *ngIf="toggleSchedule" class="hold-label">Scheduler by inactivity</span>
             </mat-slide-toggle>
           </div>
         </div>
 
-        <div *ngIf="idleImplemented" lass="note m-bott-10">NOTE: In case of turning on inactivity time-check, your schedule configuration will be decommissioned.</div>
+        <div class="note m-bott-10">NOTE: In case of turning on inactivity time-check, your schedule
+          configuration will be decommissioned.</div>
 
-        <div class="control-group idle-control" [ngClass]="{ show: enableIdleTime }">
-          <label class="label">Inactivity time</label>
+        <div class="control-group idle-control" [ngClass]="{ show: enableIdleTimeView }">
+          <label class="label">Scheduler by inactivity, min</label>
           <div class="control">
-            <input type="number" class="form-control" min="10" max="10080" formControlName="inactivityTime" class="form-control" placeholder="Enter time in min" />
-            <span class="danger_color" *ngIf="!schedulerForm.controls.inactivityTime.valid">
-              <span>The value should be an integer greater than or equal to {{ inactivityLimits.min }} and cannot exceed 1 week ({{ inactivityLimits.max }}) in min</span>
+            <input type="text" class="form-control" placeholder="Enter time in min" formControlName="inactivityTime"
+              (keypress)="CheckUtils.isNumberKey($event)" (keydown.arrowup)="inactivityCounter($event, 'increment')"
+              (keydown.arrowdown)="inactivityCounter($event, 'decrement')" />
+            <span class="error" *ngIf="!schedulerForm.controls.inactivityTime.valid">
+              <span>The value should be an integer greater than or equal to {{ inactivityLimits.min }} and
+                cannot exceed 1 week ({{ inactivityLimits.max }}) in min</span>
             </span>
           </div>
         </div>
-
-        <div class="row-wrap">
-          <div class="col-3">
-            <mat-form-field>
-              <input matInput [matDatepicker]="startDate" placeholder="Choose start date" formControlName="startDate">
-              <mat-datepicker-toggle [ngClass]="{'not-allowed' : destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule }" matSuffix [for]="startDate"></mat-datepicker-toggle>
-              <mat-datepicker #startDate></mat-datepicker>
-            </mat-form-field>
-          </div>
-          <div class="col-3">
-            <mat-form-field>
-              <input matInput [matDatepicker]="finishDate" placeholder="Choose finish date" formControlName="finishDate">
-              <mat-datepicker-toggle matSuffix [for]="finishDate"></mat-datepicker-toggle>
-              <mat-datepicker #finishDate></mat-datepicker>
-            </mat-form-field>
-          </div>
-          <div class="col-3">
-            <mat-form-field class="timezone-offset">
-              <mat-select
-                placeholder="Select offset"
-                [(value)]="tzOffset"
-                [disabled]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule">
-                <mat-option *ngFor="let zone of zones" [value]="zone">{{ zone }}</mat-option>
-              </mat-select>
-            </mat-form-field>
-          </div>
-        </div>
-
-        <div class="row-wrap">
-          <div class="control-group col-3 time-range">
-            <dlab-time-picker [(pickTime)]="startTime" [label]="'Choose start time'"
-              [disable]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule"></dlab-time-picker>
-          </div>
-          <div class="control-group col-3 time-range">
-            <dlab-time-picker [(pickTime)]="endTime" [label]="'Choose finish time'"
-              [disable]="!enableSchedule"></dlab-time-picker>
-          </div>
-          <div *ngIf="timeReqiered" class="danger_color m-bott-10"><span>At least one of time range fields should be selected</span></div>
-          
-        </div>
-        <div class="control-group">
-          <label class="label repeat" for="options">Repeat on :</label>
-          <div class="days-block">
-            <label>Start date:</label>
-            <mat-button-toggle *ngFor="let day of weekdays; let i = index" value="{{ day }}"
-              (change)="onDaySelect($event, day, 'start')"
-              [disabled]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule"
-              [checked]="selectedStartWeekDays[day.toLowerCase()]">{{ day[0] }}
-            </mat-button-toggle>
-          </div>
-          <div class="days-block">
-            <label>Stop date:</label>
-            <mat-button-toggle *ngFor="let day of weekdays; let i = index" value="{{ day }}"
-              (change)="onDaySelect($event, day, 'stop')"
-              [disabled]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule"
-              [checked]="selectedStopWeekDays[day.toLowerCase()]">{{ day[0] }}
-            </mat-button-toggle>
-          </div>
-        </div>
-        <span class="note m-bott-10">NOTE: to enable the scheduler at least one weekday should be specified.</span>
-        <div class="inherit" *ngIf="destination">
-          <mat-slide-toggle labelPosition="after" [checked]="inherit" (change)="toggleInherit($event)" [disabled]="!enableSchedule || (!parentInherit && destination.type === 'СOMPUTATIONAL')">
-            <span *ngIf="destination.type === 'EXPLORATORY'; else resourcePropagation" class="hold-label">
-              <span>Start all spark clusters associated with current notebook <br /> based on notebook start schedule</span>
-            </span>
-            <ng-template #resourcePropagation>
-              <span class="hold-label">Inherit notebook schedule settings</span>
+        <div class="schedule-by-time" *ngIf="!enableIdleTimeView"
+          [ngClass]="{ hide: enableIdleTimeView, resource: destination.type === 'СOMPUTATIONAL' }">
+          <div class="row-wrap">
+            <div class="col-3">
+              <mat-form-field>
+                <input matInput [matDatepicker]="startDate" placeholder="Choose start date" formControlName="startDate">
+                <mat-datepicker-toggle
+                  [ngClass]="{'not-allowed' : destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule }"
+                  matSuffix [for]="startDate"></mat-datepicker-toggle>
+                <mat-datepicker #startDate></mat-datepicker>
+              </mat-form-field>
+            </div>
+            <div class="col-3">
+              <mat-form-field>
+                <input matInput [matDatepicker]="finishDate" placeholder="Choose finish date"
+                  formControlName="finishDate">
+                <mat-datepicker-toggle matSuffix [for]="finishDate"></mat-datepicker-toggle>
+                <mat-datepicker #finishDate></mat-datepicker>
+              </mat-form-field>
+            </div>
+            <div class="col-3" *ngIf="destination.type === 'СOMPUTATIONAL'; else timezone">
+              <mat-form-field>
+                <input matInput [matDatepicker]="terminateDate" placeholder="Choose terminate date"
+                  formControlName="terminateDate">
+                <mat-datepicker-toggle
+                  matSuffix [for]="terminateDate"></mat-datepicker-toggle>
+                <mat-datepicker #terminateDate></mat-datepicker>
+              </mat-form-field>
+            </div>
+            <ng-template #timezone>
+              <div class="col-3">
+                <mat-form-field class="timezone-offset">
+                  <mat-select placeholder="Select offset" [(value)]="tzOffset"
+                    [disabled]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule">
+                    <mat-option *ngFor="let zone of zones" [value]="zone">{{ zone }}</mat-option>
+                  </mat-select>
+                </mat-form-field>
+              </div>
             </ng-template>
+          </div>
+
+          <div class="row-wrap">
+            <div class="control-group col-3 time-range">
+              <dlab-time-picker [(pickTime)]="startTime" [label]="'Choose start time'"
+                [disable]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule">
+              </dlab-time-picker>
+            </div>
+            <div class="control-group col-3 time-range">
+              <dlab-time-picker [(pickTime)]="endTime" [label]="'Choose finish time'" [disable]="!enableSchedule">
+              </dlab-time-picker>
+            </div>
+            <div class="control-group col-3 time-range" *ngIf="destination.type === 'СOMPUTATIONAL'">
+              <dlab-time-picker [(pickTime)]="terminateTime" [label]="'Choose terminate time'"
+                [disable]="!enableSchedule"></dlab-time-picker>
+            </div>
+            <div *ngIf="timeReqiered" class="error m-bott-10 mt-5"><span>At least one of time range fields
+                should be selected</span></div>
+          </div>
+
+          <div class="row-wrap" *ngIf="destination.type === 'СOMPUTATIONAL'">
+            <div class="col-3">
+              <mat-form-field class="timezone-offset">
+                <mat-select placeholder="Select offset" [(value)]="tzOffset"
+                  [disabled]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule">
+                  <mat-option *ngFor="let zone of zones" [value]="zone">{{ zone }}</mat-option>
+                </mat-select>
+              </mat-form-field>
+            </div>
+          </div>
+          <div class="control-group" *ngIf="destination">
+            <label class="label repeat" for="options">Repeat on :</label>
+            <div class="days-block">
+              <label>Start date:</label>
+              <mat-button-toggle *ngFor="let day of weekdays; let i = index" value="{{ day }}"
+                (change)="onDaySelect($event, day, 'start')"
+                [disabled]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule"
+                [checked]="selectedStartWeekDays[day.toLowerCase()]">{{ day[0] }}
+              </mat-button-toggle>
+            </div>
+            <div class="days-block">
+              <label>Stop date:</label>
+              <mat-button-toggle *ngFor="let day of weekdays; let i = index" value="{{ day }}"
+                (change)="onDaySelect($event, day, 'stop')"
+                [disabled]="destination.type === 'СOMPUTATIONAL' && inherit || !enableSchedule"
+                [checked]="selectedStopWeekDays[day.toLowerCase()]">{{ day[0] }}
+              </mat-button-toggle>
+            </div>
+          </div>
+
+          <div class="options" *ngIf="destination && allowInheritView">
+            <mat-slide-toggle labelPosition="after" [checked]="inherit" (change)="toggleInherit($event)"
+              [disabled]="!enableSchedule || (!parentInherit && destination.type === 'СOMPUTATIONAL')">
+              <span *ngIf="destination.type === 'EXPLORATORY'; else resourcePropagation" class="hold-label">
+                <span>Start all spark clusters associated with current notebook based on notebook start
+                  schedule</span>
+              </span>
+              <ng-template #resourcePropagation>
+                <span class="hold-label">Inherit notebook schedule settings</span>
+              </ng-template>
+            </mat-slide-toggle>
+          </div>
+        </div>
+        <div class="options" *ngIf="destination.type === 'EXPLORATORY'">
+          <mat-slide-toggle labelPosition="after" [checked]="considerInactivity"
+            [disabled]="!enableSchedule && !enableIdleTime" (change)="considerInactivity = !considerInactivity">
+            <span class="hold-label">
+              <!-- <span>Use even some jobs are ran on computational resources</span> -->
+              <span>In case of running jobs on Spark standalone, notebook stop scheduler will not be
+                trigger</span>
+            </span>
           </mat-slide-toggle>
         </div>
       </form>
       <div class="text-center m-top-30">
         <button mat-raised-button type="button" class="butt action" (click)="close()">Cancel</button>
-        <button mat-raised-button type="button" class="butt butt-success action" [disabled]="!enableSchedule && !enableIdleTime" (click)="scheduleInstance_btnClick()">Save</button>
+        <button mat-raised-button type="button" class="butt butt-success action"
+          (click)="scheduleInstance_btnClick()">Save</button>
+        <!-- [disabled]="!enableSchedule && !enableIdleTime" -->
       </div>
     </div>
   </modal-content>
-</modal-dialog>
+</modal-dialog>
\ No newline at end of file
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.scss b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.scss
index fad88c9..83d9bac 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.scss
@@ -18,15 +18,29 @@
  */
 
 .scheduler-dialog {
+  .mat-form-field-appearance-legacy {
+    .mat-form-field-label {
+      font-family: 'Open Sans', sans-serif !important;
+      font-size: 13px;
+    }
+  }
   .mat-select-value {
     color: #758da9;
   }
+  .mat-select-disabled {
+    .mat-select-value {
+      color: #9e9e9e;
+    }
+  }
   .mat-form-field-flex {
     width: 160px;
   }
   .mat-select-trigger {
     width: 180px;
   }
+  .content-box {
+    overflow: hidden;
+  }
   .enable-schedule {
     display: flex;
     margin-bottom: 10px;
@@ -42,12 +56,13 @@
       }
     }
   }
-  .info-message, .idle-control {
+  .info-message,
+  .idle-control {
     height: 0;
     opacity: 0;
     visibility: hidden;
     text-align: left;
-    transition: all .35s linear .2s;
+    transition: height 0.35s linear 0.2s;
     padding-bottom: 0;
     &.show {
       height: 30px;
@@ -56,11 +71,48 @@
       margin-bottom: 50px;
     }
   }
+  .idle-control {
+    .control {
+      position: relative;
+      .error {
+        position: absolute;
+        left: 5px;
+        top: 40px;
+        font-size: 12px;
+      }
+    }
+  }
   .info-message {
     &.show {
       margin-bottom: 15px;
     }
   }
+  .schedule-by-time {
+    transition: opacity 0.5s linear 0.2s;
+    visibility: visible;
+    height: 240px;
+    opacity: 1;
+    &.hide {
+      height: 0;
+      opacity: 0;
+      visibility: hidden;
+      text-align: left;
+    }
+    &.resource {
+      height: 280px;
+    }
+    .row-wrap {
+      position: relative;
+      .error {
+        position: absolute;
+        bottom: -10px;
+        font-size: 12px;
+      }
+      mat-datepicker-toggle.not-allowed {
+        opacity: 1;
+      }
+    }
+  }
   .col-3 {
     width: 33%;
     box-sizing: border-box;
@@ -139,16 +191,18 @@
     font-size: 12px;
     color: #36afd5;
   }
-  .inherit {
+  .options {
     color: #718ba6;
     font-size: 15px;
     font-weight: 600;
+    margin-bottom: 10px;
     .mat-slide-toggle-content {
       padding: 5px;
-    }
-    .hold-label {
-      span {
-        font-size: 14px;
+      .hold-label {
+        span {
+          font-size: 13px;
+          vertical-align: text-bottom;
+        }
       }
     }
   }
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts
index 9be1203..fb0a4fc 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.component.ts
@@ -18,7 +18,7 @@
  */
 
 import { Component, OnInit, ViewChild, Output, EventEmitter, ViewEncapsulation, ChangeDetectorRef } from '@angular/core';
-import { FormGroup, FormBuilder } from '@angular/forms';
+import { FormGroup, FormBuilder, Validators } from '@angular/forms';
 import { ToastrService } from 'ngx-toastr';
 
 import * as _moment from 'moment';
@@ -26,8 +26,9 @@
 
 import { SchedulerService } from '../../core/services';
 import { SchedulerModel, WeekdaysModel } from './scheduler.model';
-import { HTTP_STATUS_CODES } from '../../core/util';
-import { log } from 'util';
+import { SchedulerCalculations } from './scheduler.calculations';
+import { HTTP_STATUS_CODES, CheckUtils } from '../../core/util';
+import { ScheduleSchema } from './scheduler.model';
 
 @Component({
   selector: 'dlab-scheduler',
@@ -36,16 +37,21 @@
   encapsulation: ViewEncapsulation.None
 })
 export class SchedulerComponent implements OnInit {
+  readonly CheckUtils = CheckUtils;
+
   public model: SchedulerModel;
-  public selectedStartWeekDays: WeekdaysModel = new WeekdaysModel(false, false, false, false, false, false, false);
-  public selectedStopWeekDays: WeekdaysModel = new WeekdaysModel(false, false, false, false, false, false, false);
+  public selectedStartWeekDays: WeekdaysModel = WeekdaysModel.setDefault();
+  public selectedStopWeekDays: WeekdaysModel = WeekdaysModel.setDefault();
   public notebook: any;
   public infoMessage: boolean = false;
   public timeReqiered: boolean = false;
   public inherit: boolean = false;
+  public allowInheritView: boolean = false;
   public parentInherit: boolean = false;
   public enableSchedule: boolean = false;
   public enableIdleTime: boolean = false;
+  public enableIdleTimeView: boolean = false;
+  public considerInactivity: boolean = false;
   public date_format: string = 'YYYY-MM-DD';
   public timeFormat: string = 'HH:mm';
   public weekdays: string[] = ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'];
@@ -55,11 +61,10 @@
   public tzOffset: string =  _moment().format('Z');
   public startTime = { hour: 9, minute: 0, meridiem: 'AM' };
   public endTime = { hour: 8, minute: 0, meridiem: 'PM' };
+  public terminateTime = null;
 
-  public inactivityLimits = { min: 10, max: 10080 };
-
-
-  public idleImplemented: boolean = false;
+  public inactivityLimits = { min: 120, max: 10080 };
+  public integerRegex: string = '^[0-9]*$';
 
   @ViewChild('bindDialog') bindDialog;
   @ViewChild('resourceSelect') resource_select;
@@ -100,14 +105,18 @@
           this.changeDetector.detectChanges();
           this.destination = (type === 'EXPLORATORY') ? this.notebook : resource;
           this.destination.type = type;
-          this.selectedStartWeekDays.setDegault();
-          this.selectedStopWeekDays.setDegault();
+          this.selectedStartWeekDays.reset();
+          this.selectedStopWeekDays.reset();
+          this.allowInheritView = false;
 
-          (this.destination.type === 'СOMPUTATIONAL')
-            ? this.getExploratorySchedule(this.notebook.name, this.destination.computational_name)
-            : this.getExploratorySchedule(this.notebook.name);
-
-          if (this.destination.type === 'СOMPUTATIONAL') this.checkParentInherit();
+          if (this.destination.type === 'СOMPUTATIONAL') {
+            this.allowInheritView = true;
+            this.getExploratorySchedule(this.notebook.name, this.destination.computational_name);
+            this.checkParentInherit();
+          } else if (this.destination.type === 'EXPLORATORY') {
+            this.allowInheritView = this.checkIsActiveSpark();
+            this.getExploratorySchedule(this.notebook.name);
+          }
           this.bindDialog.open(param);
         },
         this.schedulerService
@@ -136,6 +145,7 @@
   public toggleSchedule($event) {
     this.enableSchedule = $event.checked;
     this.timeReqiered = false;
+    this.allowInheritView = this.destination.type === 'СOMPUTATIONAL' || this.checkIsActiveSpark();
 
     this.enableSchedule && this.enableIdleTime && this.toggleIdleTimes({checked: false});
     (this.enableSchedule && !(this.destination.type === 'СOMPUTATIONAL' && this.inherit))
@@ -143,67 +153,105 @@
       : this.schedulerForm.get('startDate').disable();
 
     this.enableSchedule ? this.schedulerForm.get('finishDate').enable() : this.schedulerForm.get('finishDate').disable();
+    this.enableSchedule ? this.schedulerForm.get('terminateDate').enable() : this.schedulerForm.get('terminateDate').disable();
 
-    if (!this.enableSchedule)
-      this.model
-        .resetSchedule(this.notebook.name, this.destination.type === 'СOMPUTATIONAL' ? this.destination.computational_name : null)
-        .subscribe(res => {
-          this.resetDialog();
-        });
+    if (this.enableSchedule && $event.source) this.enableIdleTimeView = false;
   }
 
   public toggleIdleTimes($event) {
+    const control = this.schedulerForm.controls.inactivityTime;
+
     this.enableIdleTime = $event.checked;
     this.enableIdleTime && this.enableSchedule && this.toggleSchedule({checked: false});
+    this.allowInheritView = false;
 
     if (!this.enableIdleTime) {
-      this.schedulerForm.controls.inactivityTime.setValue(this.inactivityLimits.min);
+      this.allowInheritView = this.destination.type === 'СOMPUTATIONAL' || this.checkIsActiveSpark();
+      control.setValue('');
+    } else {
+      !control.value && control.setValue(this.inactivityLimits.min);
+      this.enableIdleTimeView = true;
     }
   }
 
   public setInactivity(...params) {
-    this.model.setInactivityTime(params).subscribe(
-      () => this.toastr.success('Inactivity settings were successfully saved', 'Success!'),
+    this.model.setInactivityTime(params).subscribe((response: any) => {
+        if (response.status === HTTP_STATUS_CODES.OK) {
+          this.toastr.success('Schedule data were successfully saved', 'Success!');
+          this.close();
+        }
+      },
       error => this.toastr.error(error.message || 'Scheduler configuration failed!', 'Oops!'));
   }
 
+  public inactivityCounter($event, action: string): void {
+    $event.preventDefault();
+    const value = this.schedulerForm.controls.inactivityTime.value;
+    const newValue = (action === 'increment' ? Number(value) + 10 : Number(value) - 10);
+    this.schedulerForm.controls.inactivityTime.setValue(newValue);
+  }
+
   public scheduleInstance_btnClick() {
-
-    if (!this.enableIdleTime) {
-      const data = {
-        startDate: this.schedulerForm.controls.startDate.value,
-        finishDate: this.schedulerForm.controls.finishDate.value
-      };
-
-      if (!this.startTime && !this.endTime && this.enableSchedule) {
-        this.timeReqiered = true;
-        return false;
-      }
-      const selectedDays = Object.keys(this.selectedStartWeekDays);
-      const parameters: any = {
-        begin_date: data.startDate ? _moment(data.startDate).format(this.date_format) : null,
-        finish_date: data.finishDate ? _moment(data.finishDate).format(this.date_format) : null,
-        start_time: this.startTime ? this.convertTimeFormat(this.startTime) : null,
-        end_time: this.endTime ? this.convertTimeFormat(this.endTime) : null,
-        start_days_repeat: selectedDays.filter(el => Boolean(this.selectedStartWeekDays[el])).map(day => day.toUpperCase()),
-        stop_days_repeat: selectedDays.filter(el => Boolean(this.selectedStopWeekDays[el])).map(day => day.toUpperCase()),
-        timezone_offset: this.tzOffset,
-        sync_start_required: this.inherit,
-        check_inactivity_required: this.enableIdleTime
-      };
-
-      (this.destination.type === 'СOMPUTATIONAL')
-        ? this.model.confirmAction(this.notebook.name, parameters, this.destination.computational_name)
-        : this.model.confirmAction(this.notebook.name, parameters);
+    if (this.enableIdleTimeView) {
+      this.enableIdleTime ? this.setScheduleByInactivity() : this.resetScheduler();
     } else {
-      const data = { check_inactivity_required: true, max_inactivity: this.schedulerForm.controls.inactivityTime.value };
-
-      (this.destination.type === 'СOMPUTATIONAL')
-        ? this.setInactivity(this.notebook.name, data, this.destination.computational_name)
-        : this.setInactivity(this.notebook.name, data);
+      this.enableSchedule ? this.setScheduleByTime() : this.resetScheduler();
     }
   }
 
+  private resetScheduler() {
+    const resource = this.destination.type === 'СOMPUTATIONAL' ? this.destination.computational_name : null;
+    this.model.resetSchedule(this.notebook.name, resource)
+      .subscribe(() => {
+        this.resetDialog();
+        this.toastr.success('Schedule data were successfully deleted', 'Success!');
+        this.close();
+      });
+  }
+
+  private setScheduleByTime() {
+    const data = {
+      startDate: this.schedulerForm.controls.startDate.value,
+      finishDate: this.schedulerForm.controls.finishDate.value,
+      terminateDate: this.schedulerForm.controls.terminateDate.value
+    };
+    const terminateDateTime = (data.terminateDate && this.terminateTime)
+      ? `${_moment(data.terminateDate).format(this.date_format)} ${SchedulerCalculations.convertTimeFormat(this.terminateTime)}`
+      : null;
+
+    if (!this.startTime && !this.endTime && !this.terminateTime && this.enableSchedule) {
+      this.timeReqiered = true;
+      return false;
+    }
+    const selectedDays = Object.keys(this.selectedStartWeekDays);
+    const parameters: ScheduleSchema = {
+      begin_date: data.startDate ? _moment(data.startDate).format(this.date_format) : null,
+      finish_date: data.finishDate ? _moment(data.finishDate).format(this.date_format) : null,
+      start_time: this.startTime ? SchedulerCalculations.convertTimeFormat(this.startTime) : null,
+      end_time: this.endTime ? SchedulerCalculations.convertTimeFormat(this.endTime) : null,
+      start_days_repeat: selectedDays.filter(el => Boolean(this.selectedStartWeekDays[el])).map(day => day.toUpperCase()),
+      stop_days_repeat: selectedDays.filter(el => Boolean(this.selectedStopWeekDays[el])).map(day => day.toUpperCase()),
+      timezone_offset: this.tzOffset,
+      sync_start_required: this.inherit,
+      check_inactivity_required: this.enableIdleTime,
+      terminate_datetime: terminateDateTime
+    };
+
+    if (this.destination.type === 'СOMPUTATIONAL') {
+      this.model.confirmAction(this.notebook.name, parameters, this.destination.computational_name);
+    } else {
+      parameters['consider_inactivity'] = this.considerInactivity;
+      this.model.confirmAction(this.notebook.name, parameters);
+    }
+  }
+
+  private setScheduleByInactivity() {
+    const data = { check_inactivity_required: this.enableIdleTime, max_inactivity: this.schedulerForm.controls.inactivityTime.value };
+    (this.destination.type === 'СOMPUTATIONAL')
+      ? this.setInactivity(this.notebook.name, data, this.destination.computational_name)
+      : this.setInactivity(this.notebook.name, {...data, consider_inactivity: this.considerInactivity});
+  }
+
   public close(): void {
     if (this.bindDialog.isOpened) {
       this.bindDialog.close();
@@ -212,36 +260,42 @@
     this.resetDialog();
   }
 
-  private formInit(start?, end?) {
+  private formInit(start?: string, end?: string, terminate?: string) {
     this.schedulerForm = this.formBuilder.group({
       startDate: { disabled: this.inherit, value: start ? _moment(start).format() : null },
       finishDate: { disabled: false, value: end ? _moment(end).format() : null },
-      inactivityTime: [this.inactivityLimits.min , this.validInactivityRange.bind(this)]
+      terminateDate: { disabled: false, value: terminate ? _moment(terminate).format() : null },
+      inactivityTime: [this.inactivityLimits.min,
+                      [Validators.compose([Validators.pattern(this.integerRegex), this.validInactivityRange.bind(this)])]]
     });
   }
 
   private getExploratorySchedule(resource, resource2?) {
     this.schedulerService.getExploratorySchedule(resource, resource2).subscribe(
-      (params: any) => {
+      (params: ScheduleSchema) => {
         if (params) {
           params.start_days_repeat.filter(key => (this.selectedStartWeekDays[key.toLowerCase()] = true));
           params.stop_days_repeat.filter(key => (this.selectedStopWeekDays[key.toLowerCase()] = true));
           this.inherit = params.sync_start_required;
           this.tzOffset = params.timezone_offset;
-          this.startTime = params.start_time ? this.convertTimeFormat(params.start_time) : null;
-          this.endTime = params.end_time ? this.convertTimeFormat(params.end_time) : null;
-          this.formInit(params.begin_date, params.finish_date);
-          this.schedulerForm.controls.inactivityTime.setValue(params.max_inactivity);
-
+          this.startTime = params.start_time ? SchedulerCalculations.convertTimeFormat(params.start_time) : null;
+          this.endTime = params.end_time ? SchedulerCalculations.convertTimeFormat(params.end_time) : null;
+          this.formInit(params.begin_date, params.finish_date, params.terminate_datetime);
+          this.schedulerForm.controls.inactivityTime.setValue(params.max_inactivity || this.inactivityLimits.min);
           this.enableIdleTime = params.check_inactivity_required;
-          this.toggleSchedule({checked: true});
+
+          if (params.terminate_datetime) {
+            const terminate_datetime = params.terminate_datetime.split(' ');
+            this.schedulerForm.controls.terminateDate.setValue(terminate_datetime[0]);
+            this.terminateTime =  SchedulerCalculations.convertTimeFormat(terminate_datetime[1]);
+          }
+
+          (this.enableIdleTime && params.max_inactivity)
+            ? this.toggleIdleTimes({checked: true})
+            : this.toggleSchedule({checked: true});
         }
       },
-      error => {
-        this.toastr.info(error.message || 'Scheduler job data not found!', null);
-        this.resetDialog();
-      }
-    );
+      error => this.resetDialog());
   }
 
   private checkParentInherit() {
@@ -252,31 +306,15 @@
   private validInactivityRange(control) {
     if (control)
       return this.enableIdleTime
-          ? (control.value && control.value >= this.inactivityLimits.min && control.value <= this.inactivityLimits.max ? null : { valid: false })
+          ? (control.value
+            && control.value >= this.inactivityLimits.min
+            && control.value <= this.inactivityLimits.max ? null : { valid: false })
           : control.value;
   }
 
-  private convertTimeFormat(time24: any) {
-    let result;
-    if (typeof time24 === 'string') {
-      let spl = time24.split(':');
-
-      result = {
-        hour: +spl[0] % 12 || 12,
-        minute: +spl[1],
-        meridiem: +spl[0] < 12 || +spl[0] === 24 ? 'AM' : 'PM'
-      };
-    } else {
-      let hours = time24.hour;
-      let minutes = (time24.minute < 10) ? '0' + time24.minute : time24.minute;
-
-      if (time24.meridiem == 'PM' && time24.hour < 12) hours = time24.hour + 12;
-      if (time24.meridiem == 'AM' &&  time24.hour == 12) hours = time24.hour - 12;
-      hours = hours < 10 ? '0' + hours : hours;
-
-      result = `${hours}:${minutes}`;
-    }
-    return result;
+  private checkIsActiveSpark() {
+    return this.notebook.resources.length > 0 && this.notebook.resources.some(el => el.image === 'docker.dlab-dataengine'
+      && (el.status !== 'terminated' && el.status !== 'terminating' && el.status !== 'failed'));
   }
 
   private resetDialog() {
@@ -284,8 +322,15 @@
     this.timeReqiered = false;
     this.inherit = false;
     this.enableSchedule = false;
+    this.considerInactivity = false;
+    this.enableIdleTime = false;
     this.tzOffset = _moment().format('Z');
-    this.startTime = this.convertTimeFormat('09:00');
-    this.endTime = this.convertTimeFormat('20:00');
+    this.startTime = SchedulerCalculations.convertTimeFormat('09:00');
+    this.endTime = SchedulerCalculations.convertTimeFormat('20:00');
+    this.terminateTime = null;
+
+    this.schedulerForm.get('startDate').disable();
+    this.schedulerForm.get('finishDate').disable();
+    this.schedulerForm.get('terminateDate').disable();
   }
 }
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.model.ts b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.model.ts
index 2f69bd0..f83be29 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.model.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/scheduler/scheduler.model.ts
@@ -19,14 +19,19 @@
 
 import { SchedulerService } from '../../core/services';
 
-export interface SchedulerParameters {
-  begin_date: string;
-  finish_date: string;
-  start_time: string;
-  end_time: string;
-  days_repeat: Array<string>;
+export interface ScheduleSchema {
+  begin_date: string | null;
+  finish_date: string | null;
+  start_time: string | null;
+  end_time: string | null;
+  start_days_repeat: Array<string>;
+  stop_days_repeat: Array<string>;
   timezone_offset: string;
   sync_start_required: boolean;
+  max_inactivity?: number;
+  terminate_datetime?: string | null;
+  check_inactivity_required?: boolean;
+  consider_inactivity?: boolean;
 }
 
 export class SchedulerModel {
@@ -61,7 +66,7 @@
   }
 
   public setInactivityTime(params) {
-    let [notebook, data, resource] = params;
+    const [notebook, data, resource] = params;
     return this.scheduleInstance(notebook, data, resource);
   }
 
@@ -89,7 +94,11 @@
     public saturday: boolean
   ) {}
 
-  setDegault(): void {
+  public static setDefault(): WeekdaysModel {
+    return new WeekdaysModel(false, false, false, false, false, false, false);
+  }
+
+  reset(): void {
     this.sunday = false;
     this.monday = false;
     this.tuesday = false;
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/dropdowns.component.scss b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/dropdowns.component.scss
index 74ea6e8..7fda83a 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/dropdowns.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/form-controls/dropdowns.component.scss
@@ -117,7 +117,7 @@
       }
     }
   }
-  &.shapes {
+  &.shapes, &.shape {
     .list-menu {
       .list-item {
         text-transform: initial;
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html
index d13c23e..914edbc 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.html
@@ -42,13 +42,28 @@
     <a *ngIf="healthStatus.status" [routerLink]="['/environment_management']" class="statusbar">
       <span class="material-icons" ngClass="{{healthStatus.status || ''}}">radio_button_checked</span>
     </a>
-    <a *ngIf="metadata" class="statusbar" #info (click)="actions.toggle($event, info)">
-      <span class="material-icons meta">share</span>
+    <!-- *ngIf="metadata" -->
+    <a  class="statusbar" #info (click)="actions.toggle($event, info)">
+      <span class="material-icons meta">info</span>
     </a>
     <bubble-up #actions class="list-menu" position="bottom-right">
       <div class="app-info">
-          <p><strong>Version: </strong> {{ metadata?.version }}</p>
-          <p><strong>Branch: </strong> {{ metadata?.branch }}</p>
+          <p>
+            <strong>Version: </strong>
+            <span class="ellipsis">{{ metadata?.version }}</span>
+          </p>
+          <p>
+            <strong>Branch: </strong>
+            <span class="ellipsis">{{ metadata?.branch }}</span>
+          </p>
+          <p>
+            <strong>Revision: </strong>
+            <span class="ellipsis">{{ metadata?.commmit }}</span>
+          </p>
+          <p>
+            <strong>Release notes: </strong>
+            <span class="ellipsis">{{ metadata?.release_notes }}</span>
+          </p>
       </div>
     </bubble-up>
     <button class="btn btn-logout" (click)="logout_btnClick()">Log out <span class="user-name">{{currentUserName}}</span></button>
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.scss b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.scss
index e79d196..cc52dc6 100644
--- a/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/navbar/navbar.component.scss
@@ -45,7 +45,7 @@
 
     .statusbar {
       display: flex;
-      width: 60px;
+      width: 50px;
       height: 46px;
       text-decoration: none;
       justify-content: center;
@@ -70,9 +70,9 @@
         }
       }
       .meta {
-        color: #fff;
+        color: #c7d4d6;
         &:hover {
-          color: #c7d4d6;
+          color: #fff;
         }
       }
     }
@@ -100,6 +100,13 @@
       color: #455c74;
       font-size: 16px;
       font-weight: 600;
+      width: 120px;
+      display: inline-block;
+    }
+    span {
+      width: 180px;
+      display: inline-block;
+      vertical-align: bottom;
     }
   }
 }
diff --git a/services/self-service/src/main/resources/webapp/src/app/shared/time-picker/time-picker.component.scss b/services/self-service/src/main/resources/webapp/src/app/shared/time-picker/time-picker.component.scss
index 5d6448e..91ea5e3 100755
--- a/services/self-service/src/main/resources/webapp/src/app/shared/time-picker/time-picker.component.scss
+++ b/services/self-service/src/main/resources/webapp/src/app/shared/time-picker/time-picker.component.scss
@@ -68,6 +68,9 @@
   }
   mat-icon {
     cursor: pointer;
+    color: #757575;
+    font-size: 16px;
+    margin-right: 3px;
   }
 }
 
diff --git a/services/self-service/src/main/resources/webapp/src/styles.scss b/services/self-service/src/main/resources/webapp/src/styles.scss
index 0ac2a99..c02e0ef 100644
--- a/services/self-service/src/main/resources/webapp/src/styles.scss
+++ b/services/self-service/src/main/resources/webapp/src/styles.scss
@@ -335,10 +335,11 @@
       padding: 12px 20px;
       overflow: inherit;
       margin: 0 0 15px;
+      font-size: 14px;
       &::before {
         position: absolute;
         left: -15px;
-        top: -12px;
+        top: -15px;
         font-family: "Material Icons";
         background-color: #fff;
         width: 30px;
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/InactivityServiceImplTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/InactivityServiceImplTest.java
deleted file mode 100644
index 5ed31cc..0000000
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/InactivityServiceImplTest.java
+++ /dev/null
@@ -1,187 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package com.epam.dlab.backendapi.service.impl;
-
-import com.epam.dlab.auth.SystemUserInfoService;
-import com.epam.dlab.auth.UserInfo;
-import com.epam.dlab.backendapi.SelfServiceApplicationConfiguration;
-import com.epam.dlab.backendapi.dao.ComputationalDAO;
-import com.epam.dlab.backendapi.dao.ExploratoryDAO;
-import com.epam.dlab.backendapi.domain.RequestId;
-import com.epam.dlab.backendapi.service.ComputationalService;
-import com.epam.dlab.backendapi.service.ExploratoryService;
-import com.epam.dlab.backendapi.util.RequestBuilder;
-import com.epam.dlab.dto.SchedulerJobDTO;
-import com.epam.dlab.dto.UserInstanceDTO;
-import com.epam.dlab.dto.UserInstanceStatus;
-import com.epam.dlab.dto.computational.CheckInactivityStatus;
-import com.epam.dlab.dto.computational.CheckInactivityStatusDTO;
-import com.epam.dlab.dto.computational.ComputationalStatusDTO;
-import com.epam.dlab.dto.computational.UserComputationalResource;
-import com.epam.dlab.dto.status.EnvResource;
-import com.epam.dlab.rest.client.RESTService;
-import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.mockito.InjectMocks;
-import org.mockito.Mock;
-import org.mockito.runners.MockitoJUnitRunner;
-
-import java.time.LocalDateTime;
-import java.util.List;
-import java.util.stream.Collectors;
-
-import static com.epam.dlab.dto.UserInstanceStatus.RUNNING;
-import static java.util.Collections.singletonList;
-import static org.mockito.Matchers.any;
-import static org.mockito.Matchers.anyList;
-import static org.mockito.Matchers.anyString;
-import static org.mockito.Matchers.eq;
-import static org.mockito.Matchers.refEq;
-import static org.mockito.Mockito.*;
-
-@RunWith(MockitoJUnitRunner.class)
-public class InactivityServiceImplTest {
-
-	private static final long MAX_INACTIVITY = 10L;
-	private static final String DOCKER_DLAB_DATAENGINE = "docker.dlab-dataengine";
-	private static final String DOCKER_DLAB_DATAENGINE_SERVICE = "docker.dlab-dataengine-service";
-	private final String USER = "test";
-	private final String TOKEN = "token";
-	private final String EXPLORATORY_NAME = "expName";
-	private final String COMP_NAME = "compName";
-	private final LocalDateTime LAST_ACTIVITY = LocalDateTime.now().minusMinutes(MAX_INACTIVITY);
-
-	@Mock
-	private ExploratoryDAO exploratoryDAO;
-	@Mock
-	private ComputationalDAO computationalDAO;
-	@Mock
-	private RESTService provisioningService;
-	@Mock
-	private SelfServiceApplicationConfiguration configuration;
-	@Mock
-	private RequestBuilder requestBuilder;
-	@Mock
-	private RequestId requestId;
-	@Mock
-	private ExploratoryService exploratoryService;
-	@Mock
-	private ComputationalService computationalService;
-	@Mock
-	private SystemUserInfoService systemUserInfoService;
-	@InjectMocks
-	private InactivityServiceImpl inactivityService;
-
-	@Test
-	@SuppressWarnings("unchecked")
-	public void stopClustersByConditionForDataengines() {
-		CheckInactivityStatusDTO dto = new CheckInactivityStatusDTO()
-				.withStatus(CheckInactivityStatus.COMPLETED)
-				.withResources(singletonList(new EnvResource().withName(COMP_NAME).withId("someId")));
-		final List<UserComputationalResource> computationalResources =
-				singletonList(getUserComputationalResource(RUNNING, DOCKER_DLAB_DATAENGINE));
-		final UserInstanceDTO exploratory = getUserInstanceDTO(computationalResources);
-
-		when(exploratoryDAO.getInstancesByComputationalIdsAndStatus(anyList(), any(UserInstanceStatus.class)))
-				.thenReturn(singletonList(exploratory));
-		when(systemUserInfoService.create(anyString())).thenReturn(getUserInfo());
-
-		inactivityService.stopClustersByInactivity(dto.getResources().stream().map(EnvResource::getId).collect(Collectors.toList()));
-
-		verify(exploratoryDAO).getInstancesByComputationalIdsAndStatus(singletonList("someId"), RUNNING);
-		verify(systemUserInfoService).create(USER);
-		verify(computationalService).stopSparkCluster(refEq(getUserInfo()), eq(EXPLORATORY_NAME), eq(COMP_NAME));
-		verify(exploratoryDAO).getInstancesByComputationalIdsAndStatus(singletonList("someId"), RUNNING);
-		verifyNoMoreInteractions(exploratoryService, configuration, systemUserInfoService, computationalDAO,
-				exploratoryDAO, computationalService, requestBuilder, provisioningService, requestId);
-	}
-
-	@Test
-	@SuppressWarnings("unchecked")
-	public void stopClustersByConditionForDataengineServices() {
-		CheckInactivityStatusDTO dto = new CheckInactivityStatusDTO()
-				.withStatus(CheckInactivityStatus.COMPLETED)
-				.withResources(singletonList(new EnvResource().withName(COMP_NAME).withId("someId")));
-		final UserComputationalResource ucResource = getUserComputationalResource(RUNNING,
-				DOCKER_DLAB_DATAENGINE_SERVICE);
-		final UserInstanceDTO exploratory = getUserInstanceDTO(singletonList(ucResource));
-		;
-		when(exploratoryDAO.getInstancesByComputationalIdsAndStatus(anyList(), any(UserInstanceStatus.class))).thenReturn(singletonList(
-				exploratory.withResources(singletonList(ucResource))));
-		when(systemUserInfoService.create(anyString())).thenReturn(getUserInfo());
-
-		inactivityService.stopClustersByInactivity(dto.getResources().stream().map(EnvResource::getId).collect(Collectors.toList()));
-
-		verify(exploratoryDAO).getInstancesByComputationalIdsAndStatus(singletonList("someId"), RUNNING);
-		verify(systemUserInfoService).create(USER);
-		verify(computationalService).terminateComputational(refEq(getUserInfo()), eq(EXPLORATORY_NAME), eq(COMP_NAME));
-		verifyNoMoreInteractions(exploratoryService, configuration, systemUserInfoService, computationalDAO,
-				exploratoryDAO, requestBuilder, provisioningService, requestId, computationalService);
-	}
-
-	@Test
-	@SuppressWarnings("unchecked")
-	public void updateLastActivityForClusters() {
-		CheckInactivityStatusDTO dto = new CheckInactivityStatusDTO()
-				.withStatus(CheckInactivityStatus.COMPLETED)
-				.withResources(singletonList(new EnvResource().withName(COMP_NAME).withId("someId").withLastActivity(LAST_ACTIVITY)));
-		doNothing().when(computationalDAO).updateLastActivityDateForInstanceId(anyString(), any(LocalDateTime.class));
-
-		inactivityService.updateLastActivityForClusters(dto.getResources());
-
-		verify(computationalDAO).updateLastActivityDateForInstanceId("someId", LAST_ACTIVITY);
-		verifyNoMoreInteractions(exploratoryService, computationalDAO);
-	}
-
-
-	private UserComputationalResource getUserComputationalResource(UserInstanceStatus status, String imageName) {
-		UserComputationalResource ucResource = new UserComputationalResource();
-		ucResource.setComputationalName(COMP_NAME);
-		ucResource.setImageName("dataengine");
-		ucResource.setImageName(imageName);
-		ucResource.setStatus(status.toString());
-		ucResource.setLastActivity(LAST_ACTIVITY);
-		final SchedulerJobDTO schedulerData = new SchedulerJobDTO();
-		schedulerData.setCheckInactivityRequired(true);
-		schedulerData.setMaxInactivity(MAX_INACTIVITY);
-		ucResource.setSchedulerData(schedulerData);
-		return ucResource;
-	}
-
-	private ComputationalStatusDTO getComputationalStatusDTOWithStatus(String status) {
-		return new ComputationalStatusDTO()
-				.withUser(USER)
-				.withExploratoryName(EXPLORATORY_NAME)
-				.withComputationalName(COMP_NAME)
-				.withStatus(UserInstanceStatus.of(status));
-	}
-
-	private UserInfo getUserInfo() {
-		return new UserInfo(USER, TOKEN);
-	}
-
-	private UserInstanceDTO getUserInstanceDTO(List<UserComputationalResource> computationalResources) {
-		final UserInstanceDTO exploratory = new UserInstanceDTO();
-		exploratory.setExploratoryId("someId");
-		exploratory.setExploratoryName(EXPLORATORY_NAME);
-		exploratory.setUser(USER);
-		exploratory.setResources(computationalResources);
-		return exploratory;
-	}
-}
\ No newline at end of file
diff --git a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImplTest.java b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImplTest.java
index 161b72d..4ca2ca0 100644
--- a/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/dlab/backendapi/service/impl/SchedulerJobServiceImplTest.java
@@ -22,10 +22,8 @@
 import com.epam.dlab.auth.SystemUserInfoService;
 import com.epam.dlab.auth.UserInfo;
 import com.epam.dlab.backendapi.dao.ComputationalDAO;
-import com.epam.dlab.backendapi.dao.EnvDAO;
 import com.epam.dlab.backendapi.dao.ExploratoryDAO;
 import com.epam.dlab.backendapi.dao.SchedulerJobDAO;
-import com.epam.dlab.backendapi.domain.RequestId;
 import com.epam.dlab.backendapi.service.ComputationalService;
 import com.epam.dlab.backendapi.service.ExploratoryService;
 import com.epam.dlab.dto.SchedulerJobDTO;
@@ -34,11 +32,9 @@
 import com.epam.dlab.dto.aws.computational.AwsComputationalResource;
 import com.epam.dlab.dto.base.DataEngineType;
 import com.epam.dlab.dto.computational.UserComputationalResource;
-import com.epam.dlab.dto.status.EnvResource;
 import com.epam.dlab.exceptions.ResourceInappropriateStateException;
 import com.epam.dlab.exceptions.ResourceNotFoundException;
 import com.epam.dlab.model.scheduler.SchedulerJobData;
-import com.epam.dlab.rest.client.RESTService;
 import com.mongodb.client.result.UpdateResult;
 import org.junit.Before;
 import org.junit.Test;
@@ -49,10 +45,7 @@
 
 import java.time.*;
 import java.time.temporal.ChronoUnit;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.List;
-import java.util.Optional;
+import java.util.*;
 import java.util.stream.Collectors;
 
 import static com.epam.dlab.dto.UserInstanceStatus.*;
@@ -67,8 +60,6 @@
 	private final String USER = "test";
 	private final String EXPLORATORY_NAME = "explName";
 	private final String COMPUTATIONAL_NAME = "compName";
-
-	private UserInfo userInfo;
 	private SchedulerJobDTO schedulerJobDTO;
 	private UserInstanceDTO userInstance;
 
@@ -84,12 +75,6 @@
 	private ExploratoryService exploratoryService;
 	@Mock
 	private ComputationalService computationalService;
-	@Mock
-	private EnvDAO envDAO;
-	@Mock
-	private RESTService provisioningService;
-	@Mock
-	private RequestId requestId;
 
 	@InjectMocks
 	private SchedulerJobServiceImpl schedulerJobService;
@@ -97,7 +82,6 @@
 
 	@Before
 	public void setUp() {
-		userInfo = getUserInfo();
 		schedulerJobDTO = getSchedulerJobDTO(LocalDate.now(), LocalDate.now().plusDays(1),
 				Arrays.asList(DayOfWeek.values()), Arrays.asList(DayOfWeek.values()), false,
 				LocalDateTime.of(LocalDate.now(), LocalTime.now().truncatedTo(ChronoUnit.MINUTES)),
@@ -400,34 +384,6 @@
 	}
 
 	@Test
-	public void executeCheckClusterInactivityJob() {
-		EnvResource resource = new EnvResource();
-		when(envDAO.findRunningResourcesForCheckInactivity()).thenReturn(singletonList(resource));
-		when(provisioningService.post(anyString(), anyString(), anyListOf(EnvResource.class), any()))
-				.thenReturn("someUuid");
-		when(requestId.put(anyString(), anyString())).thenReturn("someUuid");
-
-		schedulerJobService.updateRunningResourcesLastActivity(userInfo);
-
-		verify(envDAO).findRunningResourcesForCheckInactivity();
-		verify(provisioningService).post("/infrastructure/check_inactivity", "token",
-				singletonList(resource), String.class);
-		verify(requestId).put(USER, "someUuid");
-		verifyNoMoreInteractions(envDAO, provisioningService, requestId);
-	}
-
-	@Test
-	public void executeCheckClusterInactivityJobWithoutRunningClusters() {
-		when(envDAO.findRunningResourcesForCheckInactivity()).thenReturn(Collections.emptyList());
-
-		schedulerJobService.updateRunningResourcesLastActivity(userInfo);
-
-		verify(envDAO).findRunningResourcesForCheckInactivity();
-		verifyNoMoreInteractions(envDAO);
-		verifyZeroInteractions(provisioningService, requestId);
-	}
-
-	@Test
 	public void testStartComputationalByScheduler() {
 		when(schedulerJobDAO.getComputationalSchedulerDataWithOneOfStatus(any(UserInstanceStatus.class),
 				any(DataEngineType.class), anyVararg())).thenReturn(singletonList(getSchedulerJobData(LocalDate.now(),
@@ -627,26 +583,32 @@
 
 	@Test
 	public void testStopExploratoryByScheduler() {
-		when(schedulerJobDAO.getExploratorySchedulerDataWithStatus(any(UserInstanceStatus.class))).thenReturn(singletonList(getSchedulerJobData(LocalDate.now(), LocalDate.now().plusDays(1), Arrays.asList(DayOfWeek.values()), Arrays.asList(DayOfWeek.values()), LocalDateTime.of(LocalDate.now(),
-				LocalTime.now().truncatedTo(ChronoUnit.MINUTES)), false, USER,
-				LocalTime.now().truncatedTo(ChronoUnit.MINUTES))));
+		when(schedulerJobDAO.getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(any(UserInstanceStatus.class), any(Date.class))).
+				thenReturn(singletonList(getSchedulerJobData(LocalDate.now(), LocalDate.now().plusDays(1),
+						Arrays.asList(DayOfWeek.values()), Arrays.asList(DayOfWeek.values()),
+						LocalDateTime.of(LocalDate.now(),
+								LocalTime.now().truncatedTo(ChronoUnit.MINUTES)), false, USER,
+						LocalTime.now().truncatedTo(ChronoUnit.MINUTES))));
 		when(systemUserService.create(anyString())).thenReturn(getUserInfo());
 
 		schedulerJobService.stopExploratoryByScheduler();
 
 		verify(systemUserService).create(USER);
-		verify(schedulerJobDAO).getExploratorySchedulerDataWithStatus(RUNNING);
+		verify(schedulerJobDAO).getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(eq(RUNNING),
+				any(Date.class));
 		verify(exploratoryService).stop(refEq(getUserInfo()), eq(EXPLORATORY_NAME));
 		verifyNoMoreInteractions(systemUserService, schedulerJobDAO, exploratoryService);
 	}
 
 	@Test
 	public void testStopExploratoryBySchedulerWhenSchedulerIsNotConfigured() {
-		when(schedulerJobDAO.getExploratorySchedulerDataWithStatus(any(UserInstanceStatus.class))).thenReturn(Collections.emptyList());
+		when(schedulerJobDAO.getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(any(UserInstanceStatus.class), any(Date.class)))
+				.thenReturn(Collections.emptyList());
 
 		schedulerJobService.stopExploratoryByScheduler();
 
-		verify(schedulerJobDAO).getExploratorySchedulerDataWithStatus(RUNNING);
+		verify(schedulerJobDAO).getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(eq(RUNNING),
+				any(Date.class));
 		verifyNoMoreInteractions(schedulerJobDAO);
 		verifyZeroInteractions(systemUserService, exploratoryService);
 	}
@@ -660,12 +622,13 @@
 						LocalTime.now().truncatedTo(ChronoUnit.MINUTES)), false, USER,
 				LocalTime.now().truncatedTo(ChronoUnit.MINUTES)
 		);
-		when(schedulerJobDAO.getExploratorySchedulerDataWithStatus(any(UserInstanceStatus.class))).thenReturn(singletonList(schedulerJobData));
+		when(schedulerJobDAO.getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(any(UserInstanceStatus.class), any(Date.class))).thenReturn(singletonList(schedulerJobData));
 		when(systemUserService.create(anyString())).thenReturn(getUserInfo());
 
 		schedulerJobService.stopExploratoryByScheduler();
 
-		verify(schedulerJobDAO).getExploratorySchedulerDataWithStatus(RUNNING);
+		verify(schedulerJobDAO).getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(eq(RUNNING),
+				any(Date.class));
 		verifyNoMoreInteractions(systemUserService, schedulerJobDAO, exploratoryService);
 	}
 
@@ -680,12 +643,14 @@
 		final SchedulerJobData schedulerJobData = getSchedulerJobData(beginDate, finishDate, startDays, stopDays,
 				terminateDateTime, false, USER, LocalTime.now().truncatedTo(ChronoUnit.MINUTES)
 		);
-		when(schedulerJobDAO.getExploratorySchedulerDataWithStatus(any(UserInstanceStatus.class))).thenReturn(singletonList(schedulerJobData));
+		when(schedulerJobDAO.getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(any(UserInstanceStatus.class), any(Date.class)))
+				.thenReturn(singletonList(schedulerJobData));
 		when(systemUserService.create(anyString())).thenReturn(getUserInfo());
 
 		schedulerJobService.stopExploratoryByScheduler();
 
-		verify(schedulerJobDAO).getExploratorySchedulerDataWithStatus(RUNNING);
+		verify(schedulerJobDAO).getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(eq(RUNNING),
+				any(Date.class));
 		verifyNoMoreInteractions(systemUserService, schedulerJobDAO, exploratoryService);
 	}
 
@@ -699,12 +664,13 @@
 						LocalTime.now().truncatedTo(ChronoUnit.MINUTES)), false, USER,
 				LocalTime.now().truncatedTo(ChronoUnit.MINUTES)
 		);
-		when(schedulerJobDAO.getExploratorySchedulerDataWithStatus(any(UserInstanceStatus.class))).thenReturn(singletonList(schedulerJobData));
+		when(schedulerJobDAO.getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(any(UserInstanceStatus.class), any(Date.class))).thenReturn(singletonList(schedulerJobData));
 		when(systemUserService.create(anyString())).thenReturn(getUserInfo());
 
 		schedulerJobService.stopExploratoryByScheduler();
 
-		verify(schedulerJobDAO).getExploratorySchedulerDataWithStatus(RUNNING);
+		verify(schedulerJobDAO).getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(eq(RUNNING),
+				any(Date.class));
 		verifyNoMoreInteractions(systemUserService, schedulerJobDAO, exploratoryService);
 	}
 
@@ -1075,7 +1041,7 @@
 				LocalDateTime.of(now, currentTime.plusMinutes(minutesOffset).truncatedTo(ChronoUnit.MINUTES)),
 				false, "user123", offsetTime.truncatedTo(ChronoUnit.MINUTES));
 
-		when(schedulerJobDAO.getExploratorySchedulerDataWithStatus(any(UserInstanceStatus.class))).thenReturn(Arrays.asList(schedulerJobData, secondScheduler));
+		when(schedulerJobDAO.getExploratorySchedulerWithStatusAndClusterLastActivityLessThan(any(UserInstanceStatus.class), any(Date.class))).thenReturn(Arrays.asList(schedulerJobData, secondScheduler));
 		when(systemUserService.create(anyString())).thenReturn(getUserInfo());
 		when(schedulerJobDAO.getComputationalSchedulerDataWithOneOfStatus(any(UserInstanceStatus.class),
 				any(DataEngineType.class), anyVararg())).thenReturn(singletonList(schedulerJobData));