trying to get worker to use the correct python executable
diff --git a/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py b/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
index d1c7755..d7b5e8d 100644
--- a/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
+++ b/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
@@ -23,15 +23,15 @@
 from .datasets import DatasetManager
 import os
 
-# spark_env_content = '''
-# #!/usr/bin/env bash
-#
-# export PYSPARK_PYTHON={}
-# '''.format(os.getenv("PYSPARK_PYTHON"))
-# pyspark_env_path = '{}/conf/spark_env.sh'.format(os.getenv('_'))
-#
-# with open(pyspark_env_path, 'w') as f:
-#     f.write(spark_env_content)
+spark_env_content = '''
+#!/usr/bin/env bash
+
+export PYSPARK_PYTHON={}
+'''.format(os.getenv("PYSPARK_PYTHON"))
+pyspark_env_path = '{}/conf/spark_env.sh'.format(os.getenv('_'))
+
+with open(pyspark_env_path, 'w') as f:
+    f.write(spark_env_content)
 
 
 class SparkAmaContextBuilder(AmaContextBuilder):
@@ -41,8 +41,7 @@
         if self.ama_conf:
             self.spark_conf = SparkConf()\
                 .setAppName('amaterasu-{}-{}'.format(self.ama_conf.runtime.jobId, self.ama_conf.runtime.actionName))\
-                .setMaster(self.ama_conf.env.master)\
-                .set("spark.pyspark.python", os.getenv("_"))
+                .setMaster(self.ama_conf.env.master)
         else:
             self.spark_conf = SparkConf()