env not loaded correctly
diff --git a/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py b/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
index 07078e6..4a3119a 100644
--- a/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
+++ b/frameworks/spark/pyspark_runtime/amaterasu_pyspark/runtime.py
@@ -47,7 +47,7 @@
     def build(self) -> "AmaContext":
         spark = SparkSession.builder.config(conf=self.spark_conf).getOrCreate()
         sc = spark.sparkContext
-        return AmaContext(self.env, sc, spark)
+        return AmaContext(self.ama_conf, sc, spark)
 
 
 class AmaContext(BaseAmaContext):
@@ -68,10 +68,10 @@
     def spark(self) -> SparkSession:
         return self._spark
 
-    def __init__(self, env: Environment, sc: SparkContext = None, spark: SparkSession = None):
-        super(AmaContext, self).__init__(env)
+    def __init__(self, ama_conf, sc: SparkContext = None, spark: SparkSession = None):
+        super(AmaContext, self).__init__(ama_conf)
         self._sc, self._spark = sc, spark
-        self._dataset_manager = DatasetManager(env.datasets, self.spark)
+        self._dataset_manager = DatasetManager(ama_conf.datasets, self.spark)
 
     def get_dataset(self, dataset_name: str) -> DataFrame:
         return self._dataset_manager.load_dataset(dataset_name)