Merge pull request #1238 from apache/DATALAB-2515
[DATALAB-2515]: fixed It is impossible to install library
diff --git a/README.md b/README.md
index df52b48..7c24a2e 100644
--- a/README.md
+++ b/README.md
@@ -1171,7 +1171,6 @@
```
{
"os_pkg": {"htop": "2.0.1-1ubuntu1", "python-mysqldb": "1.3.7-1build2"},
- "pip2": {"requests": "N/A", "configparser": "N/A"},
"pip3": {"configparser": "N/A"},
"r_pkg": {"rmarkdown": "1.5"},
"others": {"Keras": "N/A"}
@@ -1190,7 +1189,7 @@
| notebook\_instance\_name | Name of the Notebook instance to terminate |
| aws\_region | AWS region where infrastructure was deployed |
| application | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning) |
-| libs | List of additional libraries in JSON format with type (os_pkg/pip2/pip3/r_pkg/others)|
+| libs | List of additional libraries in JSON format with type (os_pkg/pip3/r_pkg/others)|
| action | lib_install |
**Example** of additional_libs parameter:
@@ -1201,7 +1200,6 @@
"libs": [
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
{"group": "others", "name": "Keras"}
@@ -1237,7 +1235,7 @@
| notebook\_instance\_name | Name of the Notebook instance to terminate |
| azure\_resource\_group\_name | Name of the resource group where all DataLab resources are being provisioned |
| application | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning) |
-| libs | List of additional libraries in JSON format with type (os_pkg/pip2/pip3/r_pkg/others)|
+| libs | List of additional libraries in JSON format with type (os_pkg/pip3/r_pkg/others)|
| action | lib_install |
</details>
@@ -1269,7 +1267,7 @@
| gcp\_project\_id | ID of GCP project |
| gcp\_zone | GCP zone name |
| application | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning) |
-| libs | List of additional libraries in JSON format with type (os_pkg/pip2/pip3/r_pkg/others)|
+| libs | List of additional libraries in JSON format with type (os_pkg/pip3/r_pkg/others)|
| action | lib_install |
</details>
@@ -1462,7 +1460,6 @@
```
{
"os_pkg": {"htop": "2.0.1-1ubuntu1", "python-mysqldb": "1.3.7-1build2"},
- "pip2": {"requests": "N/A", "configparser": "N/A"},
"pip3": {"configparser": "N/A"},
"r_pkg": {"rmarkdown": "1.5"},
"others": {"Keras": "N/A"}
@@ -1480,7 +1477,7 @@
| computational\_id | Name of Dataengine-service |
| aws\_region | AWS region where infrastructure was deployed |
| application | Type of the notebook template (jupyter/rstudio/zeppelin/tensor/deeplearning) |
-| libs | List of additional libraries in JSON format with type (os_pkg/pip2/pip3/r_pkg/others)|
+| libs | List of additional libraries in JSON format with type (os_pkg/pip3/r_pkg/others)|
| action | lib_install |
**Example** of additional_libs parameter:
@@ -1491,7 +1488,6 @@
"libs": [
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
{"group": "others", "name": "Keras"}
@@ -1678,7 +1674,6 @@
```
{
"os_pkg": {"htop": "2.0.1-1ubuntu1", "python-mysqldb": "1.3.7-1build2"},
- "pip2": {"requests": "N/A", "configparser": "N/A"},
"pip3": {"configparser": "N/A"},
"r_pkg": {"rmarkdown": "1.5"},
"others": {"Keras": "N/A"}
@@ -1706,7 +1701,6 @@
"libs": [
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
{"group": "others", "name": "Keras"}
diff --git a/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json b/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
index 891cc5e..06f671a 100644
--- a/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
+++ b/infrastructure-provisioning/src/general/files/aws/deeplearning_description.json
@@ -8,7 +8,7 @@
"exploratory_environment_versions" :
[
{
- "template_name": "Deep Learning AMI (Ubuntu 18.04) Version 42.1",
+ "template_name": "Deep Learning AMI Version 42.1",
"description": "MXNet-1.8.0 & 1.7.0, TensorFlow-2.4.1, 2.1.3 & 1.15.5, PyTorch-1.4.0 & 1.8.0, Neuron, & others. NVIDIA CUDA, cuDNN, NCCL, Intel MKL-DNN, Docker, NVIDIA-Docker & EFA support. Uses Anaconda virtual environments, configured to keep the different framework installations separate and easy to switch between frameworks as Jupyter kernels.",
"environment_type": "exploratory",
"version": "Deep Learning AMI (Ubuntu 18.04) Version 42.1",
diff --git a/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_description.json b/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_description.json
index 31b350d..51fc3a4 100644
--- a/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_description.json
+++ b/infrastructure-provisioning/src/general/files/aws/tensor-rstudio_description.json
@@ -8,10 +8,10 @@
"exploratory_environment_versions" :
[
{
- "template_name": "RStudio with TensorFlow 2.1.0",
+ "template_name": "RStudio with TensorFlow 2.5.0",
"description": "Base image with TensorFlow and RStudio node creation routines",
"environment_type": "exploratory",
- "version": "tensorflow_gpu-2.1.0",
+ "version": "tensorflow_gpu-2.5.0",
"vendor": "AWS"
}
]
diff --git a/infrastructure-provisioning/src/general/files/aws/tensor_description.json b/infrastructure-provisioning/src/general/files/aws/tensor_description.json
index 82a7f4f..20402e6 100644
--- a/infrastructure-provisioning/src/general/files/aws/tensor_description.json
+++ b/infrastructure-provisioning/src/general/files/aws/tensor_description.json
@@ -8,10 +8,10 @@
"exploratory_environment_versions" :
[
{
- "template_name": "Jupyter with TensorFlow 2.3.2",
+ "template_name": "Jupyter with TensorFlow 2.5.0",
"description": "Base image with TensorFlow and Jupyter node creation routines",
"environment_type": "exploratory",
- "version": "tensorflow_gpu-2.3.2",
+ "version": "tensorflow_gpu-2.5.0",
"vendor": "AWS"
}
]
diff --git a/infrastructure-provisioning/src/general/files/azure/tensor_description.json b/infrastructure-provisioning/src/general/files/azure/tensor_description.json
index 4c05654..3a89e60 100644
--- a/infrastructure-provisioning/src/general/files/azure/tensor_description.json
+++ b/infrastructure-provisioning/src/general/files/azure/tensor_description.json
@@ -8,10 +8,10 @@
"exploratory_environment_versions" :
[
{
- "template_name": "Jupyter with TensorFlow 2.3.2",
+ "template_name": "Jupyter with TensorFlow 2.5.0",
"description": "Base image with TensorFlow and Jupyter node creation routines",
"environment_type": "exploratory",
- "version": "tensorflow_gpu-2.3.2",
+ "version": "tensorflow_gpu-2.5.0",
"vendor": "Azure"
}
]
diff --git a/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_description.json b/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_description.json
index 39335d3..14f33eb 100644
--- a/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_description.json
+++ b/infrastructure-provisioning/src/general/files/gcp/tensor-rstudio_description.json
@@ -27,10 +27,10 @@
},
"exploratory_environment_versions": [
{
- "template_name": "RStudio with TensorFlow 2.1.0",
+ "template_name": "RStudio with TensorFlow 2.5.0",
"description": "Base image with TensorFlow and RStudio node creation routines",
"environment_type": "exploratory",
- "version": "tensorflow_gpu-2.1.0",
+ "version": "tensorflow_gpu-2.5.0",
"vendor": "GCP"
}
]
diff --git a/infrastructure-provisioning/src/general/files/gcp/tensor_description.json b/infrastructure-provisioning/src/general/files/gcp/tensor_description.json
index eebc7fd..8561110 100644
--- a/infrastructure-provisioning/src/general/files/gcp/tensor_description.json
+++ b/infrastructure-provisioning/src/general/files/gcp/tensor_description.json
@@ -10,10 +10,10 @@
"exploratory_environment_versions" :
[
{
- "template_name": "Jupyter with TensorFlow 2.1.0",
+ "template_name": "Jupyter with TensorFlow 2.5.0",
"description": "Base image with TensorFlow and Jupyter node creation routines",
"environment_type": "exploratory",
- "version": "tensorflow_gpu-2.1.0",
+ "version": "tensorflow_gpu-2.5.0",
"vendor": "GCP"
}
]
diff --git a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
index 94c86a1..66bebfb 100644
--- a/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
+++ b/infrastructure-provisioning/src/general/lib/os/debian/notebook_lib.py
@@ -279,10 +279,13 @@
if not exists(datalab.fab.conn,'/home/{}/.ensure_dir/nvidia_ensured'.format(os_user)):
try:
# install nvidia drivers
- datalab.fab.conn.sudo('wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/cuda-ubuntu2004.pin')
+ datalab.fab.conn.sudo(
+ 'wget https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/cuda-ubuntu2004.pin')
datalab.fab.conn.sudo('mv cuda-ubuntu2004.pin /etc/apt/preferences.d/cuda-repository-pin-600')
- datalab.fab.conn.sudo('apt-key adv --fetch-keys https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/7fa2af80.pub')
- datalab.fab.conn.sudo('add-apt-repository "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64/ /"')
+ datalab.fab.conn.sudo(
+ 'wget https://developer.download.nvidia.com/compute/cuda/11.4.0/local_installers/cuda-repo-ubuntu2004-11-4-local_11.4.0-470.42.01-1_amd64.deb')
+ datalab.fab.conn.sudo('dpkg -i cuda-repo-ubuntu2004-11-4-local_11.4.0-470.42.01-1_amd64.deb')
+ datalab.fab.conn.sudo('apt-key add /var/cuda-repo-ubuntu2004-11-4-local/7fa2af80.pub')
manage_pkg('update', 'remote', '')
manage_pkg('-y install', 'remote', 'cuda')
#clean space on disk
diff --git a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
index 270ae56..ad7f59d 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/common_prepare_notebook.py
@@ -80,7 +80,7 @@
notebook_config['project_name'],
notebook_config['endpoint_name'],
notebook_config['exploratory_name'], args.uuid)
- notebook_config['primary_disk_size'] = (lambda x: '30' if x == 'deeplearning' else '16')(
+ notebook_config['primary_disk_size'] = (lambda x: '100' if x == 'deeplearning' else '16')(
os.environ['application'])
notebook_config['role_profile_name'] = '{}-{}-{}-nb-de-profile'.format(
notebook_config['service_base_name'], notebook_config['project_name'], notebook_config['endpoint_name'])
@@ -105,7 +105,7 @@
os.environ['conf_os_family'])])
image_id = datalab.meta_lib.get_ami_id_by_name(notebook_config['notebook_image_name'], 'available')
if os.environ['conf_deeplearning_cloud_ami'] == 'true' and os.environ['application'] == 'deeplearning' and image_id == '':
- image_id = datalab.meta_lib.get_ami_id(notebook_config['notebook_image_name'])
+ image_id = datalab.meta_lib.get_ami_id(os.environ['notebook_image_name'])
if image_id != '':
notebook_config['ami_id'] = image_id
print('Pre-configured image found. Using: {}'.format(notebook_config['ami_id']))
diff --git a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
index 3b578e5..4c18945 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/deeplearning_configure.py
@@ -345,8 +345,8 @@
"exploratory_url": [
{"description": "Jupyter",
"url": jupyter_notebook_access_url},
- {"description": "TensorBoard",
- "url": tensorboard_access_url},
+ #{"description": "TensorBoard",
+ # "url": tensorboard_access_url},
{"description": "Ungit",
"url": jupyter_ungit_access_url}#,
#{"description": "Jupyter (via tunnel)",
diff --git a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
index 88a2864..be91e28 100644
--- a/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/azure/deeplearning_configure.py
@@ -360,8 +360,8 @@
"exploratory_url": [
{"description": "Jupyter",
"url": jupyter_notebook_access_url},
- {"description": "TensorBoard",
- "url": tensorboard_access_url},
+ #{"description": "TensorBoard",
+ # "url": tensorboard_access_url},
{"description": "Ungit",
"url": jupyter_ungit_access_url}#,
#{"description": "Jupyter (via tunnel)",
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
index 83d8740..3229525 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/dataengine-service_prepare.py
@@ -140,12 +140,12 @@
subprocess.run("echo Waiting for changes to propagate; sleep 10", shell=True, check=True)
- if 'masterGPUCount' in os.environ:
+ if 'master_gpu_count' in os.environ:
dataproc_cluster = json.loads(open('/root/templates/dataengine-service_cluster_with_gpu.json').read())
- dataproc_cluster['config']['masterConfig']['accelerators'][0]['acceleratorCount'] = int(os.environ['masterGPUCount'])
- dataproc_cluster['config']['masterConfig']['accelerators'][0]['acceleratorTypeUri'] = os.environ['masterGPUType']
- dataproc_cluster['config']['workerConfig']['accelerators'][0]['acceleratorCount'] = int(os.environ['slaveGPUCount'])
- dataproc_cluster['config']['workerConfig']['accelerators'][0]['acceleratorTypeUri'] = os.environ['slaveGPUType']
+ dataproc_cluster['config']['masterConfig']['accelerators'][0]['acceleratorCount'] = int(os.environ['master_gpu_count'])
+ dataproc_cluster['config']['masterConfig']['accelerators'][0]['acceleratorTypeUri'] = os.environ['master_gpu_type']
+ dataproc_cluster['config']['workerConfig']['accelerators'][0]['acceleratorCount'] = int(os.environ['slave_gpu_count'])
+ dataproc_cluster['config']['workerConfig']['accelerators'][0]['acceleratorTypeUri'] = os.environ['slave_gpu_type']
gpu_driver = 'gs://goog-dataproc-initialization-actions-{}/gpu/install_gpu_driver.sh'.format(dataproc_conf['region'])
dataproc_cluster['config']['initializationActions'][0]['executableFile'] = gpu_driver
diff --git a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
index 6830d00..3be0192 100644
--- a/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
+++ b/infrastructure-provisioning/src/general/scripts/gcp/deeplearning_configure.py
@@ -286,7 +286,7 @@
print("User key name: {}".format(notebook_config['project_name']))
#print("TensorBoard URL: {}".format(tensorboard_url))
#print("TensorBoard log dir: /var/log/tensorboard")
- print("Jupyter URL: {}".format(jupyter_ip_url))
+ print("JupyterLab URL: {}".format(jupyter_ip_url))
print("Ungit URL: {}".format(ungit_ip_url))
print('SSH access (from Edge node, via IP address): ssh -i {0}.pem {1}@{2}'.format(
notebook_config['key_name'], notebook_config['datalab_ssh_user'], ip_address))
@@ -300,7 +300,7 @@
"notebook_name": notebook_config['instance_name'],
"Action": "Create new notebook server",
"exploratory_url": [
- {"description": "Jupyter",
+ {"description": "JupyterLab",
"url": jupyter_notebook_access_url},
#{"description": "TensorBoard",
# "url": tensorboard_access_url},
diff --git a/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine-service_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine-service_install_additional_libs/config.xml
index fdf7930..5c9c4b3 100644
--- a/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine-service_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine-service_install_additional_libs/config.xml
@@ -61,11 +61,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine_install_additional_libs/config.xml
index 9b3600e..abc6599 100644
--- a/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/dataengine_install_additional_libs/config.xml
@@ -61,11 +61,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/notebook_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/notebook_install_additional_libs/config.xml
index 52f5aae..719035d 100644
--- a/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/notebook_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/aws/jenkins_jobs/notebook_install_additional_libs/config.xml
@@ -65,11 +65,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/dataengine_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/dataengine_install_additional_libs/config.xml
index 9b3600e..abc6599 100644
--- a/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/dataengine_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/dataengine_install_additional_libs/config.xml
@@ -61,11 +61,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/notebook_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/notebook_install_additional_libs/config.xml
index 52f5aae..719035d 100644
--- a/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/notebook_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/azure/jenkins_jobs/notebook_install_additional_libs/config.xml
@@ -65,11 +65,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine-service_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine-service_install_additional_libs/config.xml
index 703803e..5ddce0c 100644
--- a/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine-service_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine-service_install_additional_libs/config.xml
@@ -65,11 +65,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine_install_additional_libs/config.xml
index 9b3600e..abc6599 100644
--- a/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/dataengine_install_additional_libs/config.xml
@@ -61,11 +61,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/notebook_install_additional_libs/config.xml b/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/notebook_install_additional_libs/config.xml
index 52f5aae..719035d 100644
--- a/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/notebook_install_additional_libs/config.xml
+++ b/infrastructure-provisioning/src/general/templates/gcp/jenkins_jobs/notebook_install_additional_libs/config.xml
@@ -65,11 +65,10 @@
[
{"group": "os_pkg", "name": "nmap"},
{"group": "os_pkg", "name": "htop"},
- {"group": "pip2", "name": "requests"},
{"group": "pip3", "name": "configparser"},
{"group": "r_pkg", "name": "rmarkdown"},
]
- Types: for OS - os_pkg, for Python - pip2/pip3, for R - r_pkg
+ Types: for OS - os_pkg, for Python - pip3, for R - r_pkg
NOTE: You need to escape each double quote.
For Example: {\"group\": \"os_pkg\", \"name\": \"htop\"}
</description>
diff --git a/integration-tests/examples/test_libs/deeplearning/lib_groups.json b/integration-tests/examples/test_libs/deeplearning/lib_groups.json
index 0e8d040..b77b8c2 100644
--- a/integration-tests/examples/test_libs/deeplearning/lib_groups.json
+++ b/integration-tests/examples/test_libs/deeplearning/lib_groups.json
@@ -1,5 +1,4 @@
[
- "pip2",
"pip3",
"others",
"os_pkg"
diff --git a/integration-tests/examples/test_libs/deeplearning/lib_list.json b/integration-tests/examples/test_libs/deeplearning/lib_list.json
index 8e6d23d..d360bd8 100644
--- a/integration-tests/examples/test_libs/deeplearning/lib_list.json
+++ b/integration-tests/examples/test_libs/deeplearning/lib_list.json
@@ -8,10 +8,6 @@
"start_with": "gh"
},
{
- "group": "pip2",
- "start_with": "sp"
- },
- {
"group": "pip3",
"start_with": "sp"
}
diff --git a/integration-tests/examples/test_libs/jupyter/lib_groups.json b/integration-tests/examples/test_libs/jupyter/lib_groups.json
index 821acb6..87e47a5 100644
--- a/integration-tests/examples/test_libs/jupyter/lib_groups.json
+++ b/integration-tests/examples/test_libs/jupyter/lib_groups.json
@@ -1,5 +1,4 @@
[
- "pip2",
"pip3",
"others",
"os_pkg",
diff --git a/integration-tests/examples/test_libs/jupyter/lib_list.json b/integration-tests/examples/test_libs/jupyter/lib_list.json
index 7986430..5590429 100644
--- a/integration-tests/examples/test_libs/jupyter/lib_list.json
+++ b/integration-tests/examples/test_libs/jupyter/lib_list.json
@@ -12,10 +12,6 @@
"start_with": "sp"
},
{
- "group": "pip2",
- "start_with": "sp"
- },
- {
"group": "pip3",
"start_with": "sp"
}
diff --git a/integration-tests/examples/test_libs/lib_groups.json b/integration-tests/examples/test_libs/lib_groups.json
index 9c4e5f0..5cd7bfe 100644
--- a/integration-tests/examples/test_libs/lib_groups.json
+++ b/integration-tests/examples/test_libs/lib_groups.json
@@ -1,5 +1,4 @@
[
- "pip2",
"pip3",
"os_pkg",
"r_pkg"
diff --git a/integration-tests/examples/test_libs/rstudio/lib_groups.json b/integration-tests/examples/test_libs/rstudio/lib_groups.json
index 821acb6..87e47a5 100644
--- a/integration-tests/examples/test_libs/rstudio/lib_groups.json
+++ b/integration-tests/examples/test_libs/rstudio/lib_groups.json
@@ -1,5 +1,4 @@
[
- "pip2",
"pip3",
"others",
"os_pkg",
diff --git a/integration-tests/examples/test_libs/rstudio/lib_list.json b/integration-tests/examples/test_libs/rstudio/lib_list.json
index 7986430..5590429 100644
--- a/integration-tests/examples/test_libs/rstudio/lib_list.json
+++ b/integration-tests/examples/test_libs/rstudio/lib_list.json
@@ -12,10 +12,6 @@
"start_with": "sp"
},
{
- "group": "pip2",
- "start_with": "sp"
- },
- {
"group": "pip3",
"start_with": "sp"
}
diff --git a/integration-tests/examples/test_libs/tensor/lib_groups.json b/integration-tests/examples/test_libs/tensor/lib_groups.json
index 0e8d040..b77b8c2 100644
--- a/integration-tests/examples/test_libs/tensor/lib_groups.json
+++ b/integration-tests/examples/test_libs/tensor/lib_groups.json
@@ -1,5 +1,4 @@
[
- "pip2",
"pip3",
"others",
"os_pkg"
diff --git a/integration-tests/examples/test_libs/tensor/lib_list.json b/integration-tests/examples/test_libs/tensor/lib_list.json
index 8e6d23d..d360bd8 100644
--- a/integration-tests/examples/test_libs/tensor/lib_list.json
+++ b/integration-tests/examples/test_libs/tensor/lib_list.json
@@ -8,10 +8,6 @@
"start_with": "gh"
},
{
- "group": "pip2",
- "start_with": "sp"
- },
- {
"group": "pip3",
"start_with": "sp"
}
diff --git a/integration-tests/examples/test_libs/zeppelin/lib_groups.json b/integration-tests/examples/test_libs/zeppelin/lib_groups.json
index 821acb6..87e47a5 100644
--- a/integration-tests/examples/test_libs/zeppelin/lib_groups.json
+++ b/integration-tests/examples/test_libs/zeppelin/lib_groups.json
@@ -1,5 +1,4 @@
[
- "pip2",
"pip3",
"others",
"os_pkg",
diff --git a/integration-tests/examples/test_libs/zeppelin/lib_list.json b/integration-tests/examples/test_libs/zeppelin/lib_list.json
index 7986430..b443274 100644
--- a/integration-tests/examples/test_libs/zeppelin/lib_list.json
+++ b/integration-tests/examples/test_libs/zeppelin/lib_list.json
@@ -11,10 +11,7 @@
"group": "r_pkg",
"start_with": "sp"
},
- {
- "group": "pip2",
- "start_with": "sp"
- },
+
{
"group": "pip3",
"start_with": "sp"
diff --git a/services/datalab-model/src/main/java/com/epam/datalab/dto/LibraryGroups.java b/services/datalab-model/src/main/java/com/epam/datalab/dto/LibraryGroups.java
index 39ff924..3bc772c 100644
--- a/services/datalab-model/src/main/java/com/epam/datalab/dto/LibraryGroups.java
+++ b/services/datalab-model/src/main/java/com/epam/datalab/dto/LibraryGroups.java
@@ -21,7 +21,6 @@
public enum LibraryGroups {
GROUP_JAVA("java"),
- GROUP_PIP2("pip2"),
GROUP_PIP3("pip3"),
GROUP_R_PKG("r_pkg"),
GROUP_OS_PKG("os_pkg"),
diff --git a/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesConst.java b/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesConst.java
index 2cfc997..d4a2c51 100644
--- a/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesConst.java
+++ b/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesConst.java
@@ -23,17 +23,17 @@
String GKE_SELF_SERVICE_PATH = "/root/self-service.yaml";
String GKE_SELF_SERVICE = "self-service.yaml";
String SELF_SERVICE = "self-service.yml";
- // String SELF_SERVICE_PROP_PATH = "services/self-service/self-service.yml";
+ // String SELF_SERVICE_PROP_PATH = "services/self-service/self-service.yml";
String SELF_SERVICE_PROP_PATH = "/opt/datalab/conf/self-service.yml";
String PROVISIONING_SERVICE = "provisioning.yml";
- // String PROVISIONING_SERVICE_PROP_PATH = "services/provisioning-service/provisioning.yml";
+ // String PROVISIONING_SERVICE_PROP_PATH = "services/provisioning-service/provisioning.yml";
String PROVISIONING_SERVICE_PROP_PATH = "/opt/datalab/conf/provisioning.yml";
String BILLING_SERVICE = "billing.yml";
+ String BILLING_SERVICE_PROP_PATH = "/opt/datalab/conf/billing.yml";
// String BILLING_SERVICE_PROP_PATH = "services/billing-gcp/billing.yml";
// String BILLING_SERVICE_PROP_PATH = "services/billing-azure/billing.yml";
// String BILLING_SERVICE_PROP_PATH = "services/billing-aws/billing.yml";
- String BILLING_SERVICE_PROP_PATH = "/opt/datalab/conf/billing.yml";
String GKE_BILLING_PATH = "/root/billing.yaml";
String GKE_BILLING_SERVICE = "billing.yml";
String RESTART_URL = "config/restart";
diff --git a/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesService.java b/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesService.java
index e4944a5..67f75b4 100644
--- a/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesService.java
+++ b/services/datalab-webapp-common/src/main/java/com/epam/datalab/properties/ChangePropertiesService.java
@@ -49,23 +49,40 @@
}
}
+
public void writeFileFromString(String newPropFile, String serviceName, String servicePath) {
- try {
- String oldFile = FileUtils.readFileToString(new File(servicePath), Charset.defaultCharset());
- changeCHMODE(serviceName, servicePath, ChangePropertiesConst.DEFAULT_CHMOD, ChangePropertiesConst.WRITE_CHMOD);
- BufferedWriter writer = new BufferedWriter(new FileWriter(servicePath));
- log.info("Trying to overwrite {}, file for path {} :", serviceName, servicePath);
- writer.write(addLicence());
- writer.write(checkAndReplaceSecretIfEmpty(newPropFile, oldFile));
- log.info("{} overwritten successfully", serviceName);
- writer.close();
- changeCHMODE(serviceName, servicePath, ChangePropertiesConst.WRITE_CHMOD, ChangePropertiesConst.DEFAULT_CHMOD);
+ String oldFile = readFile(serviceName, servicePath);
+ try (BufferedWriter writer = new BufferedWriter(new FileWriter(servicePath))) {
+ try {
+ changeCHMODE(serviceName, servicePath, ChangePropertiesConst.DEFAULT_CHMOD, ChangePropertiesConst.WRITE_CHMOD);
+ log.info("Trying to overwrite {}, file for path {} :", serviceName, servicePath);
+ writer.write(addLicence());
+ writer.write(checkAndReplaceSecretIfEmpty(newPropFile, oldFile));
+ log.info("{} overwritten successfully", serviceName);
+ writer.close();
+ changeCHMODE(serviceName, servicePath, ChangePropertiesConst.WRITE_CHMOD, ChangePropertiesConst.DEFAULT_CHMOD);
+ } catch (Exception e) {
+ log.error("Failed during overwriting {}", serviceName);
+ writer.write(oldFile);
+ throw new DynamicChangePropertiesException(String.format("Failed during overwriting %s", serviceName));
+ }
} catch (IOException e) {
- log.error("Failed during overwriting {}", serviceName);
+ log.error("Failed to create writer with path {}", servicePath);
throw new DynamicChangePropertiesException(String.format("Failed during overwriting %s", serviceName));
}
}
+ private String readFile(String serviceName, String servicePath) {
+ String oldFile;
+ try {
+ oldFile = FileUtils.readFileToString(new File(servicePath), Charset.defaultCharset());
+ } catch (IOException e) {
+ log.error("Failed to read with path {}", servicePath);
+ throw new DynamicChangePropertiesException(String.format("Failed during overwriting %s", serviceName));
+ }
+ return oldFile;
+ }
+
public RestartAnswer restart(RestartForm restartForm) {
try {
boolean billing = restartForm.isBilling();
@@ -94,24 +111,30 @@
private String hideSecretsAndRemoveLicence(String currentConf) {
Matcher passMatcher = Pattern.compile(ChangePropertiesConst.SECRET_REGEX).matcher(currentConf);
Matcher userMatcher = Pattern.compile(ChangePropertiesConst.USER_REGEX).matcher(currentConf);
- List<String> secrets = new ArrayList<>();
- List<String> users = new ArrayList<>();
+ List<String> secretsAndUsers = new ArrayList<>();
String confWithReplacedSecretConf = removeLicence(currentConf);
while (passMatcher.find()) {
String secret = passMatcher.group().split(":")[ChangePropertiesConst.DEFAULT_VALUE_PLACE];
if (!(secret.isEmpty() || secret.trim().isEmpty()))
- secrets.add(secret);
+ secretsAndUsers.add(secret);
}
while (userMatcher.find()) {
String user = userMatcher.group().split(":")[ChangePropertiesConst.DEFAULT_VALUE_PLACE];
if (!(user.isEmpty() || user.trim().isEmpty()))
- users.add(user);
+ secretsAndUsers.add(user);
}
- for (String secret : secrets) {
- confWithReplacedSecretConf = confWithReplacedSecretConf.replace(secret, ChangePropertiesConst.SECRET_REPLACEMENT_FORMAT);
- }
- for (String user : users) {
- confWithReplacedSecretConf = confWithReplacedSecretConf.replace(user, ChangePropertiesConst.SECRET_REPLACEMENT_FORMAT);
+ for (String secretOrUser : secretsAndUsers) {
+ int start = confWithReplacedSecretConf.indexOf(secretOrUser);
+ int end = confWithReplacedSecretConf.indexOf("\n", start) - 1;
+ boolean isTure;
+ try {
+ String s = confWithReplacedSecretConf.substring(start, end);
+ isTure = s.equals(secretOrUser);
+ } catch (StringIndexOutOfBoundsException e) {
+ isTure = true;
+ }
+ if (isTure)
+ confWithReplacedSecretConf = confWithReplacedSecretConf.replace(secretOrUser, ChangePropertiesConst.SECRET_REPLACEMENT_FORMAT);
}
return confWithReplacedSecretConf;
}
@@ -137,29 +160,45 @@
}
private String checkAndReplaceSecretIfEmpty(String newPropFile, String oldProf) {
- Map<String, String> emptySecrets = findEmptySecret(newPropFile);
- return emptySecrets.isEmpty() ? newPropFile : replaceEmptySecret(newPropFile, oldProf, emptySecrets);
+ Map<String, String> emptySecretsAndUserNames = findEmptySecretAndNames(newPropFile);
+ return emptySecretsAndUserNames.isEmpty() ? newPropFile : replaceEmptySecret(newPropFile, oldProf, emptySecretsAndUserNames);
}
private String replaceEmptySecret(String newPropFile, String oldProf, Map<String, String> emptySecrets) {
String fileWithReplacedEmptySecrets = newPropFile;
- Matcher oldProfMatcher = Pattern.compile(ChangePropertiesConst.SECRET_REGEX).matcher(oldProf);
- while (oldProfMatcher.find()) {
- String[] s = oldProfMatcher.group().split(":");
+ Matcher oldPassMatcher = Pattern.compile(ChangePropertiesConst.SECRET_REGEX).matcher(oldProf);
+ Matcher oldUserMatcher = Pattern.compile(ChangePropertiesConst.USER_REGEX).matcher(oldProf);
+
+ while (oldPassMatcher.find()) {
+ String[] s = oldPassMatcher.group().split(":");
if (emptySecrets.containsKey(s[ChangePropertiesConst.DEFAULT_NAME_PLACE])) {
- fileWithReplacedEmptySecrets = fileWithReplacedEmptySecrets.replace(emptySecrets.get(s[ChangePropertiesConst.DEFAULT_NAME_PLACE]), oldProfMatcher.group());
+ fileWithReplacedEmptySecrets = fileWithReplacedEmptySecrets.replace(emptySecrets.get(s[ChangePropertiesConst.DEFAULT_NAME_PLACE]), oldPassMatcher.group());
+ }
+ }
+ while (oldUserMatcher.find()) {
+ String[] s = oldUserMatcher.group().split(":");
+ if (emptySecrets.containsKey(s[ChangePropertiesConst.DEFAULT_NAME_PLACE])) {
+ fileWithReplacedEmptySecrets = fileWithReplacedEmptySecrets.replace(emptySecrets.get(s[ChangePropertiesConst.DEFAULT_NAME_PLACE]), oldUserMatcher.group());
}
}
return fileWithReplacedEmptySecrets;
}
- private Map<String, String> findEmptySecret(String newPropFile) {
- Matcher newPropFileMatcher = Pattern.compile(ChangePropertiesConst.SECRET_REGEX).matcher(newPropFile);
+ private Map<String, String> findEmptySecretAndNames(String newPropFile) {
+ Matcher passMatcher = Pattern.compile(ChangePropertiesConst.SECRET_REGEX).matcher(newPropFile);
+ Matcher userNameMatcher = Pattern.compile(ChangePropertiesConst.USER_REGEX).matcher(newPropFile);
Map<String, String> emptySecrets = new HashMap<>();
- while (newPropFileMatcher.find()) {
- String[] s = newPropFileMatcher.group().split(":");
+ while (passMatcher.find()) {
+ String[] s = passMatcher.group().split(":");
if (s[ChangePropertiesConst.DEFAULT_VALUE_PLACE].equals(ChangePropertiesConst.SECRET_REPLACEMENT_FORMAT)) {
- emptySecrets.put(s[ChangePropertiesConst.DEFAULT_NAME_PLACE], newPropFileMatcher.group());
+ emptySecrets.put(s[ChangePropertiesConst.DEFAULT_NAME_PLACE], passMatcher.group());
+ }
+ }
+
+ while (userNameMatcher.find()) {
+ String[] s = userNameMatcher.group().split(":");
+ if (s[ChangePropertiesConst.DEFAULT_VALUE_PLACE].equals(ChangePropertiesConst.SECRET_REPLACEMENT_FORMAT)) {
+ emptySecrets.put(s[ChangePropertiesConst.DEFAULT_NAME_PLACE], userNameMatcher.group());
}
}
return emptySecrets;
diff --git a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/response/handlers/ComputationalCallbackHandler.java b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/response/handlers/ComputationalCallbackHandler.java
index 7e0a549..99b5f0e 100644
--- a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/response/handlers/ComputationalCallbackHandler.java
+++ b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/core/response/handlers/ComputationalCallbackHandler.java
@@ -75,11 +75,15 @@
@Override
protected ComputationalStatusDTO parseOutResponse(JsonNode resultNode, ComputationalStatusDTO baseStatus) {
+ log.info("TEST LOG!!!: parseOutResponse :\n resultNode: {}", resultNode);
+
if (resultNode == null) {
return baseStatus;
}
baseStatus.withComputationalUrl(extractUrl(resultNode));
baseStatus.withLastActivity(Date.from(Instant.now()));
+ log.info("TEST LOG!!!: base status: {}", baseStatus);
+ log.info("TEST LOG!!!: getAction: {}", getAction());
if (DockerAction.CREATE == getAction()) {
baseStatus
diff --git a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/aws/ComputationalResourceAws.java b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/aws/ComputationalResourceAws.java
index 1818b29..9af68b3 100644
--- a/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/aws/ComputationalResourceAws.java
+++ b/services/provisioning-service/src/main/java/com/epam/datalab/backendapi/resources/aws/ComputationalResourceAws.java
@@ -68,7 +68,8 @@
@POST
@Path(ComputationalAPI.COMPUTATIONAL_CREATE_CLOUD_SPECIFIC)
public String create(@Auth UserInfo ui, ComputationalCreateAws dto) {
- log.debug("Create computational resources {} for user {}: {}", dto.getComputationalName(), ui.getName(), dto);
+
+ log.info("Create computational resources {} for user {}: {}", dto.getComputationalName(), ui.getName(), dto);
String uuid = DockerCommands.generateUUID();
folderListenerExecutor.start(configuration.getImagesDirectory(),
configuration.getResourceStatusPollTimeout(),
diff --git a/services/provisioning-service/src/main/resources/mock_response/aws/notebook_lib_list_pkgs.json b/services/provisioning-service/src/main/resources/mock_response/aws/notebook_lib_list_pkgs.json
index b0816fc..acc4568 100644
--- a/services/provisioning-service/src/main/resources/mock_response/aws/notebook_lib_list_pkgs.json
+++ b/services/provisioning-service/src/main/resources/mock_response/aws/notebook_lib_list_pkgs.json
@@ -5,11 +5,6 @@
"pyvcf/xenial": "0.6.7-2build1",
"pyxplot/xenial": "0.9.2-6build1"
},
- "pip2": {
- "requests": "N/A",
- "configparser": "N/A",
- "SparseAce": "N/A"
- },
"pip3": {
"configparser": "N/A",
"sparkL": "N/A"
diff --git a/services/provisioning-service/src/main/resources/mock_response/azure/notebook_lib_list_pkgs.json b/services/provisioning-service/src/main/resources/mock_response/azure/notebook_lib_list_pkgs.json
index b0816fc..acc4568 100644
--- a/services/provisioning-service/src/main/resources/mock_response/azure/notebook_lib_list_pkgs.json
+++ b/services/provisioning-service/src/main/resources/mock_response/azure/notebook_lib_list_pkgs.json
@@ -5,11 +5,6 @@
"pyvcf/xenial": "0.6.7-2build1",
"pyxplot/xenial": "0.9.2-6build1"
},
- "pip2": {
- "requests": "N/A",
- "configparser": "N/A",
- "SparseAce": "N/A"
- },
"pip3": {
"configparser": "N/A",
"sparkL": "N/A"
diff --git a/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_lib_list_pkgs.json b/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_lib_list_pkgs.json
index b0816fc..acc4568 100644
--- a/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_lib_list_pkgs.json
+++ b/services/provisioning-service/src/main/resources/mock_response/gcp/notebook_lib_list_pkgs.json
@@ -5,11 +5,6 @@
"pyvcf/xenial": "0.6.7-2build1",
"pyxplot/xenial": "0.9.2-6build1"
},
- "pip2": {
- "requests": "N/A",
- "configparser": "N/A",
- "SparseAce": "N/A"
- },
"pip3": {
"configparser": "N/A",
"sparkL": "N/A"
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java
index ffc4c60..294621e 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/domain/NotebookTemplate.java
@@ -19,6 +19,11 @@
package com.epam.datalab.backendapi.domain;
+import lombok.AllArgsConstructor;
+import lombok.Getter;
+
+@Getter
+@AllArgsConstructor
public enum NotebookTemplate {
JUPYTER("Jupyter notebook 6.1.6"),
JUPYTER_LAB("JupyterLab 0.35.6"),
@@ -26,15 +31,9 @@
DEEP_LEARNING("Deep Learning 2.4"),
TENSOR("Jupyter with TensorFlow 2.3.2"),
TENSOR_RSTUDIO("RStudio with TensorFlow 2.3.2"),
- RSTUDIO("RStudio 1.4.1103");
+ RSTUDIO("RStudio 1.4.1103"),
+ TENSOR_GCP("Jupyter with TensorFlow 2.1.0"),
+ DEEP_LEARNING_GCP("Deeplearning notebook");
- private String name;
-
- NotebookTemplate(String name) {
- this.name = name;
- }
-
- public String getName() {
- return name;
- }
+ private final String name;
}
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/callback/ComputationalCallback.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/callback/ComputationalCallback.java
index 5e074ae..cb7e1a7 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/callback/ComputationalCallback.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/resources/callback/ComputationalCallback.java
@@ -66,6 +66,8 @@
@POST
@Path(ApiCallbacks.STATUS_URI)
public Response status(ComputationalStatusDTO dto) {
+ log.info("TEST LOG!!!: status: {}", dto);
+
log.debug("Updating status for computational resource {} for user {}: {}",
dto.getComputationalName(), dto.getUser(), dto);
String uuid = dto.getRequestId();
@@ -77,11 +79,16 @@
new DatalabException(String.format("Computational resource %s of exploratory environment %s of " +
"project %s for user %s doesn't exist", dto.getComputationalName(),
dto.getExploratoryName(), dto.getProject(), dto.getUser())));
- log.debug("Current status for computational resource {} of exploratory environment {} for user {} is {}",
+
+ log.info("TEST LOG!!!: compResource: {}", compResource);
+
+ log.info("Current status for computational resource {} of exploratory environment {} for user {} is {}",
dto.getComputationalName(), dto.getExploratoryName(), dto.getUser(),
compResource.getStatus());
try {
- computationalDAO.updateComputationalFields(dto.withLastActivity(new Date()));
+ computationalDAO.updateComputationalFields(dto
+ .withLastActivity(new Date()));
+// .withStatus(UserInstanceStatus.RUNNING));
} catch (DatalabException e) {
log.error("Could not update status for computational resource {} for user {} to {}: {}", dto, e);
throw e;
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ComputationalServiceImpl.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ComputationalServiceImpl.java
index d93298b..2da87a3 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ComputationalServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/ComputationalServiceImpl.java
@@ -205,9 +205,13 @@
boolean isAdded = computationalDAO.addComputational(userInfo.getName(), formDTO.getNotebookName(), project,
computationalResource);
+ log.info("TEST LOG!!!: isAdded: {}", isAdded);
+
if (isAdded) {
try {
EndpointDTO endpointDTO = endpointService.get(instance.getEndpoint());
+ log.info("TEST LOG!!!: send to prov");
+
String uuid =
provisioningService.post(endpointDTO.getUrl() + COMPUTATIONAL_CREATE_CLOUD_SPECIFIC,
userInfo.getAccessToken(),
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureInfoServiceImpl.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureInfoServiceImpl.java
index fe2f340..09d05b9 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureInfoServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/InfrastructureInfoServiceImpl.java
@@ -130,6 +130,11 @@
@Override
public InfrastructureMetaInfoDTO getInfrastructureMetaInfo() {
final String branch = Manifests.read("GIT-Branch");
+ log.info("TEST LOG!!!: Mainfests: {}, branch: {}", Manifests.DEFAULT, branch);
+ log.info("TEST LOG!!!: Git-Commit: {} ", Manifests.read("GIT-Commit"));
+ log.info("TEST LOG!!!: Datalab-Version: {}", Manifests.read("DataLab-Version"));
+ log.info("TEST LOG!!!: RELEASE_NOTES_FORMAT: {}", RELEASE_NOTES_FORMAT);
+
return InfrastructureMetaInfoDTO.builder()
.branch(branch)
.commit(Manifests.read("GIT-Commit"))
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java
index b66123b..9ee95f2 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImpl.java
@@ -22,6 +22,7 @@
import com.epam.datalab.auth.UserInfo;
import com.epam.datalab.backendapi.annotation.*;
import com.epam.datalab.backendapi.dao.BaseDAO;
+import com.epam.datalab.backendapi.dao.ComputationalDAO;
import com.epam.datalab.backendapi.dao.ExploratoryDAO;
import com.epam.datalab.backendapi.dao.ExploratoryLibDAO;
import com.epam.datalab.backendapi.domain.EndpointDTO;
@@ -70,24 +71,24 @@
private static final String COMPUTATIONAL_NOT_FOUND_MSG = "Computational with name %s was not found";
private static final String LIB_ALREADY_INSTALLED = "Library %s is already installing";
- @Inject
- private ExploratoryDAO exploratoryDAO;
+ private final ExploratoryDAO exploratoryDAO;
+ private final ExploratoryLibDAO libraryDAO;
+ private final RequestBuilder requestBuilder;
+ private final RESTService provisioningService;
+ private final RequestId requestId;
+ private final EndpointService endpointService;
@Inject
- private ExploratoryLibDAO libraryDAO;
-
- @Inject
- private RequestBuilder requestBuilder;
-
- @Named(ServiceConsts.PROVISIONING_SERVICE_NAME)
- @Inject
- private RESTService provisioningService;
-
- @Inject
- private RequestId requestId;
-
- @Inject
- private EndpointService endpointService;
+ public LibraryServiceImpl(ExploratoryDAO exploratoryDAO, ExploratoryLibDAO libraryDAO, RequestBuilder requestBuilder,
+ @Named(ServiceConsts.PROVISIONING_SERVICE_NAME) RESTService provisioningService,
+ RequestId requestId, EndpointService endpointService, ComputationalDAO computationalDAO) {
+ this.exploratoryDAO = exploratoryDAO;
+ this.libraryDAO = libraryDAO;
+ this.requestBuilder = requestBuilder;
+ this.provisioningService = provisioningService;
+ this.requestId = requestId;
+ this.endpointService = endpointService;
+ }
@Override
@@ -100,7 +101,6 @@
Document document = (Document) libraryDAO.findComputationalLibraries(user, project,
exploratoryName, computationalName)
.getOrDefault(ExploratoryLibDAO.COMPUTATIONAL_LIBS, new Document());
-
return (List<Document>) document.getOrDefault(computationalName, new ArrayList<>());
}
}
@@ -158,7 +158,7 @@
public List<String> getExploratoryLibGroups(UserInfo userInfo, String projectName, String exploratoryName) {
UserInstanceDTO userInstanceDTO = exploratoryDAO.fetchExploratoryFields(userInfo.getName(), projectName, exploratoryName);
final String templateName = userInstanceDTO.getTemplateName();
- List<LibraryGroups> groups = new ArrayList<>(Arrays.asList(GROUP_PIP2, GROUP_PIP3, GROUP_OTHERS, GROUP_OS_PKG));
+ List<LibraryGroups> groups = new ArrayList<>(Arrays.asList(GROUP_PIP3, GROUP_OTHERS, GROUP_OS_PKG));
if (isTemplateGroup(templateName, Stream.of(JUPYTER, ZEPPELIN))) {
groups.addAll(Arrays.asList(GROUP_R_PKG, GROUP_JAVA));
@@ -169,6 +169,9 @@
if (isTemplateGroup(templateName, Stream.of(RSTUDIO, TENSOR_RSTUDIO))) {
groups.add(GROUP_R_PKG);
}
+ if (isTemplateGroup(templateName, Stream.of(DEEP_LEARNING_GCP, TENSOR_GCP))) {
+ groups.add(GROUP_JAVA);
+ }
return groups
.stream()
diff --git a/services/self-service/src/main/java/com/epam/datalab/backendapi/util/RequestBuilder.java b/services/self-service/src/main/java/com/epam/datalab/backendapi/util/RequestBuilder.java
index 7362160..0dc1a2a 100644
--- a/services/self-service/src/main/java/com/epam/datalab/backendapi/util/RequestBuilder.java
+++ b/services/self-service/src/main/java/com/epam/datalab/backendapi/util/RequestBuilder.java
@@ -65,6 +65,7 @@
import com.epam.datalab.util.UsernameUtils;
import com.google.inject.Inject;
import com.google.inject.Singleton;
+import lombok.extern.slf4j.Slf4j;
import java.util.List;
import java.util.Map;
@@ -73,6 +74,7 @@
import static com.epam.datalab.cloud.CloudProvider.*;
@Singleton
+@Slf4j
public class RequestBuilder {
private static final String UNSUPPORTED_CLOUD_PROVIDER_MESSAGE = "Unsupported cloud provider ";
private static final String AZURE_REFRESH_TOKEN_KEY = "refresh_token";
@@ -335,6 +337,8 @@
UserInstanceDTO userInstance,
ComputationalCreateFormDTO form,
EndpointDTO endpointDTO) {
+ log.info("TEST LOG!!!: newComputationalCreate: \n form: {}", form);
+
T computationalCreate;
CloudProvider cloudProvider = endpointDTO.getCloudProvider();
switch (cloudProvider) {
@@ -351,6 +355,8 @@
.withVersion(awsForm.getVersion())
.withConfig((awsForm.getConfig()))
.withSharedImageEnabled(String.valueOf(projectDTO.isSharedImageEnabled()));
+ log.info("TEST LOG!!!: computationalCreate: {}", computationalCreate);
+
break;
case GCP:
GcpComputationalCreateForm gcpForm = (GcpComputationalCreateForm) form;
diff --git a/services/self-service/src/main/resources/webapp/src/app/core/util/helpUtils.ts b/services/self-service/src/main/resources/webapp/src/app/core/util/helpUtils.ts
index e078fde..a0eae85 100644
--- a/services/self-service/src/main/resources/webapp/src/app/core/util/helpUtils.ts
+++ b/services/self-service/src/main/resources/webapp/src/app/core/util/helpUtils.ts
@@ -36,40 +36,23 @@
}
}
- public static addSizeToGpuType(gpuType: string = ''): string {
- switch (gpuType) {
- case 'nvidia-tesla-t4':
- return 'S';
+ public static addSizeToGpuType(index): string {
- case 'nvidia-tesla-p100':
- return 'M';
+ const sizes = ['S', 'M', 'L', 'XL', 'XXL'];
- case 'nvidia-tesla-v100':
- return 'L';
- }
+ return sizes[index];
}
public static sortGpuTypes(gpuType: Array<string> = []): Array<string> {
- let sortedTypes = [];
+
+ const sortedTypes = [
+ 'nvidia-tesla-t4',
+ 'nvidia-tesla-k80',
+ 'nvidia-tesla-p4',
+ 'nvidia-tesla-p100',
+ 'nvidia-tesla-v100'
+ ];
- gpuType?.forEach(type => checkType(type));
-
- function checkType(type) {
- switch (type) {
- case 'nvidia-tesla-t4':
- sortedTypes[0] = type;
- return;
-
- case 'nvidia-tesla-p100':
- sortedTypes[1] = type;
- return;
-
- case 'nvidia-tesla-v100':
- sortedTypes[2] = type;
- return;
- }
- }
-
- return sortedTypes;
+ return sortedTypes.filter(el => gpuType.includes(el));;
}
}
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html
index 461c6ad..7b339f7 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/computational/computational-resource-create-dialog/computational-resource-create-dialog.component.html
@@ -170,9 +170,9 @@
<mat-select formControlName="master_GPU_type" disableOptionCentering
placeholder="Select master GPU type"
[disabled]="!resourceForm.controls['shape_master'].value">
- <mat-option *ngFor="let type of sortGpuTypes(selectedImage.computationGPU)"
+ <mat-option *ngFor="let type of sortGpuTypes(selectedImage.computationGPU); index as i"
[value]="type" >
- <strong class="highlight icon-label">{{ addSizeToGpuType(type) }}</strong> {{ type }}
+ <strong class="highlight icon-label">{{ addSizeToGpuType(i) }}</strong> {{ type }}
</mat-option>
<mat-option *ngIf="!selectedImage.computationGPU?.length" class="multiple-select ml-10" disabled>
Master GPU types list is empty
@@ -223,8 +223,8 @@
<mat-form-field>
<mat-label>Select slave GPU type</mat-label>
<mat-select formControlName="slave_GPU_type" disableOptionCentering [disabled]="!resourceForm.controls['shape_slave'].value">
- <mat-option *ngFor="let type of sortGpuTypes(selectedImage.computationGPU)" [value]="type">
- <strong class="highlight icon-label">{{ addSizeToGpuType(type) }}</strong> {{ type }}
+ <mat-option *ngFor="let type of sortGpuTypes(selectedImage.computationGPU); index as i" [value]="type">
+ <strong class="highlight icon-label">{{ addSizeToGpuType(i) }}</strong> {{ type }}
</mat-option>
<mat-option *ngIf="!selectedImage.computationGPU?.length" class="multiple-select ml-10" disabled>
Slave GPU types list is empty
@@ -309,7 +309,7 @@
rises above your bid price, the Spot instance is reclaimed by AWS so that it can be given to another
customer. Make sure to backup your data on periodic basis.</span>
</div>
- <div class="checkbox-group control-group"
+ <div class="checkbox-group control-group m-top-10"
[hidden]="PROVIDER === 'gcp' && selectedImage?.image === 'docker.datalab-dataengine-service'"
*ngIf="notebook_instance?.image !== 'docker.datalab-zeppelin'">
<div class="d-flex cursor-pointer label" (click)="addAdditionalParams('configuration')">
diff --git a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.html b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.html
index aaa633c..28d54f3 100644
--- a/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.html
+++ b/services/self-service/src/main/resources/webapp/src/app/resources/exploratory/create-environment/create-environment.component.html
@@ -231,8 +231,8 @@
<mat-label>Select GPU type</mat-label>
<mat-select formControlName="gpu_type" disableOptionCentering [disabled]="!createExploratoryForm.controls['shape'].value"
panelClass="create-resources-dialog" placeholder="GPU type">
- <mat-option *ngFor="let list_item of gpuTypes" [value]="list_item" (click)="setCount('', list_item)">
- <strong class="highlight icon-label">{{ addSizeToGpuType(list_item) }}</strong> {{ list_item }}
+ <mat-option *ngFor="let list_item of gpuTypes; index as i" [value]="list_item" (click)="setCount('', list_item)">
+ <strong class="highlight icon-label">{{ addSizeToGpuType(i) }}</strong> {{ list_item }}
</mat-option>
<mat-option *ngIf="!gpuTypes.length" class="multiple-select ml-10" disabled>
GPU list is empty
diff --git a/services/self-service/src/test/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImplTest.java b/services/self-service/src/test/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImplTest.java
index 80c0327..af5dc5a 100644
--- a/services/self-service/src/test/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImplTest.java
+++ b/services/self-service/src/test/java/com/epam/datalab/backendapi/service/impl/LibraryServiceImplTest.java
@@ -79,7 +79,6 @@
private final String COMPUTATIONAL_NAME = "compName";
private static final String GROUP_JAVA = "java";
- private static final String GROUP_PIP2 = "pip2";
private static final String GROUP_PIP3 = "pip3";
private static final String GROUP_R_PKG = "r_pkg";
private static final String GROUP_OS_PKG = "os_pkg";
@@ -366,7 +365,7 @@
@Test
public void getExploratoryJupyterLibGroups() {
- List<Object> exploratoryGroups = Arrays.asList(GROUP_PIP2, GROUP_PIP3, GROUP_OTHERS, GROUP_OS_PKG, GROUP_R_PKG, GROUP_JAVA);
+ List<Object> exploratoryGroups = Arrays.asList(GROUP_PIP3, GROUP_OTHERS, GROUP_OS_PKG, GROUP_R_PKG, GROUP_JAVA);
when(exploratoryDAO.fetchExploratoryFields(anyString(), anyString(), anyString())).thenReturn(getJupyterUserInstanceDtoForLibGroups());
List<String> exploratoryGroupsResult = libraryService.getExploratoryLibGroups(getUser(), PROJECT, EXPLORATORY_NAME);
@@ -377,7 +376,7 @@
@Test
public void getExploratoryRstudioLibGroups() {
- List<Object> exploratoryGroups = Arrays.asList(GROUP_PIP2, GROUP_PIP3, GROUP_OTHERS, GROUP_OS_PKG, GROUP_R_PKG);
+ List<Object> exploratoryGroups = Arrays.asList(GROUP_PIP3, GROUP_OTHERS, GROUP_OS_PKG, GROUP_R_PKG);
when(exploratoryDAO.fetchExploratoryFields(anyString(), anyString(), anyString())).thenReturn(getRstudioUserInstanceDtoForLibGroups());
List<String> exploratoryGroupsResult = libraryService.getExploratoryLibGroups(getUser(), PROJECT, EXPLORATORY_NAME);