[DATALAB-2091]: remade jars_parser script to use python3
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
index 6e9c8c6..093cbfd 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_create.py
@@ -121,7 +121,7 @@
           "Name=CUSTOM_JAR, Args=sudo " \
           "/usr/bin/python3 /tmp/key_importer.py --user_name {4}, " \
           "ActionOnFailure=TERMINATE_CLUSTER,Jar=command-runner.jar; " \
-          "Name=CUSTOM_JAR, Args=/usr/bin/python /tmp/jars_parser.py " \
+          "Name=CUSTOM_JAR, Args=/usr/bin/python3 /tmp/jars_parser.py " \
           "--bucket {0} --emr_version {3} --region {2} --user_name {4} " \
           "--cluster_name {5}, " \
           "ActionOnFailure=TERMINATE_CLUSTER,Jar=command-runner.jar".\
diff --git a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_jars_parser.py b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_jars_parser.py
index 660dc48..2f5e33e 100644
--- a/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_jars_parser.py
+++ b/infrastructure-provisioning/src/general/scripts/aws/dataengine-service_jars_parser.py
@@ -39,12 +39,10 @@
     spark_def_path = "/usr/lib/spark/conf/spark-defaults.conf"
     spark_def_path_line1 = subprocess.check_output("cat " + spark_def_path +
                                                    " | grep spark.driver.extraClassPath | awk '{print $2}' | "
-                                                   "sed 's/^:// ; s~jar:~jar ~g; s~/\*:~/\* ~g; s~:~/\* ~g'",
-                                                   shell=True)
+                                                   "sed 's/^:// ; s~jar:~jar ~g; s~/\*:~/\* ~g; s~:~/\* ~g'", shell=True).decode('UTF-8')
     spark_def_path_line2 = subprocess.check_output("cat " + spark_def_path +
                                                    " | grep spark.driver.extraLibraryPath | awk '{print $2}' | "
-                                                   "sed 's/^:// ; s~jar:~jar ~g; s~/\*:~/\* ~g; s~:\|$~/\* ~g'",
-                                                   shell=True)
+                                                   "sed 's/^:// ; s~jar:~jar ~g; s~/\*:~/\* ~g; s~:\|$~/\* ~g'",shell=True).decode('UTF-8')
     spark_def_path_line1 = spark_def_path_line1.strip('\n')
     spark_def_path_line2 = spark_def_path_line2.strip('\n')
     if args.region == 'us-east-1':
@@ -65,7 +63,7 @@
     os.system('touch /tmp/python_version')
     for v in range(4, 8):
         python_ver_checker = "python3.{} -V 2>/dev/null".format(v) + " | awk '{print $2}'"
-        python_ver = subprocess.check_output(python_ver_checker, shell=True)
+        python_ver = subprocess.check_output(python_ver_checker, shell=True).decode('UTF-8')
         if python_ver != '':
             with open('/tmp/python_version', 'w') as outfile:
                 outfile.write(python_ver)
@@ -76,10 +74,10 @@
               format(spark_def_path_line1,
                      spark_def_path_line2))
     os.system('/bin/tar -zhcvf /tmp/spark.tar.gz -C /usr/lib/ spark')
-    md5sum = subprocess.check_output('md5sum /tmp/jars.tar.gz', shell=True)
+    md5sum = subprocess.check_output('md5sum /tmp/jars.tar.gz', shell=True).decode('UTF-8')
     with open('/tmp/jars-checksum.chk', 'w') as outfile:
         outfile.write(md5sum)
-    md5sum = subprocess.check_output('md5sum /tmp/spark.tar.gz', shell=True)
+    md5sum = subprocess.check_output('md5sum /tmp/spark.tar.gz', shell=True).decode('UTF-8')
     with open('/tmp/spark-checksum.chk', 'w') as outfile:
         outfile.write(md5sum)
     os.system('aws s3 cp /tmp/jars.tar.gz '