trying to get worker to use the correct python executable
diff --git a/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py b/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
index 966c05f..d1c7755 100644
--- a/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
+++ b/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
@@ -23,15 +23,15 @@
 from .datasets import DatasetManager
 import os
 
-spark_env_content = '''
-#!/usr/bin/env bash
-
-export PYSPARK_PYTHON={}
-'''.format(os.getenv("PYSPARK_PYTHON"))
-pyspark_env_path = '{}/conf/spark_env.sh'.format(os.getenv('SPARK_HOME'))
-
-with open(pyspark_env_path, 'w') as f:
-    f.write(spark_env_content)
+# spark_env_content = '''
+# #!/usr/bin/env bash
+#
+# export PYSPARK_PYTHON={}
+# '''.format(os.getenv("PYSPARK_PYTHON"))
+# pyspark_env_path = '{}/conf/spark_env.sh'.format(os.getenv('_'))
+#
+# with open(pyspark_env_path, 'w') as f:
+#     f.write(spark_env_content)
 
 
 class SparkAmaContextBuilder(AmaContextBuilder):
@@ -42,7 +42,7 @@
             self.spark_conf = SparkConf()\
                 .setAppName('amaterasu-{}-{}'.format(self.ama_conf.runtime.jobId, self.ama_conf.runtime.actionName))\
                 .setMaster(self.ama_conf.env.master)\
-                .set("spark.pyspark.python", os.getenv("PYSPARK_PYTHON"))
+                .set("spark.pyspark.python", os.getenv("_"))
         else:
             self.spark_conf = SparkConf()