Merge branch 'dev' into STREAMPIPES-527
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index af9fb37..f6eea9e 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -114,6 +114,14 @@
           push: true
           tags: ${{ env.DOCKERHUB_APACHE_REPO }}/pipeline-elements-all-flink:${{ env.MVN_VERSION }}
 
+      - name: Build and Push Docker Image extensions-all-iiot
+        uses: docker/build-push-action@v2
+        with:
+          context: ./streampipes-extensions/streampipes-extensions-all-iiot
+          platforms: linux/amd64,linux/arm64/v8,linux/arm/v7
+          push: true
+          tags: ${{ env.DOCKERHUB_APACHE_REPO }}/extensions-all-iiot:${{ env.MVN_VERSION }}
+
       - name: Build and Push Docker Image pipeline-elements-all-jvm
         uses: docker/build-push-action@v2
         with:
@@ -122,10 +130,11 @@
           push: true
           tags: ${{ env.DOCKERHUB_APACHE_REPO }}/pipeline-elements-all-jvm:${{ env.MVN_VERSION }}
 
-      - name: Build and Push Docker Image sinks-internal-jvm
+      - name: Build and Push Docker Image sources-watertank-simulator
         uses: docker/build-push-action@v2
         with:
-          context: ./streampipes-extensions/streampipes-sinks-internal-jvm
+          context: ./streampipes-extensions/streampipes-sources-watertank-simulator
           platforms: linux/amd64,linux/arm64/v8,linux/arm/v7
           push: true
-          tags: ${{ env.DOCKERHUB_APACHE_REPO }}/sinks-internal-jvm:${{ env.MVN_VERSION }}
\ No newline at end of file
+          tags: ${{ env.DOCKERHUB_APACHE_REPO }}/sources-watertank-simulator:${{ env.MVN_VERSION }}
+
diff --git a/.gitignore b/.gitignore
index 4b3258f..324db87 100644
--- a/.gitignore
+++ b/.gitignore
@@ -83,7 +83,7 @@
 /test_data/
 
 ui/src/assets/lib/apps/*
-
+ui/.angular
 
 # compiled output
 ui/dist
diff --git a/.idea/runConfigurations/all_extensions_jvm.xml b/.idea/runConfigurations/all_extensions_jvm.xml
index 20063d0..103f85d 100644
--- a/.idea/runConfigurations/all_extensions_jvm.xml
+++ b/.idea/runConfigurations/all_extensions_jvm.xml
@@ -1,14 +1,14 @@
 <component name="ProjectRunConfigurationManager">
   <configuration default="false" name="all-extensions-jvm" type="Application" factoryName="Application">
     <envs>
-      <env name="SP_PORT" value="7023" />
-      <env name="SP_DEBUG" value="true" />
       <env name="SP_COUCHDB_HOST" value="localhost" />
-      <env name="SP_JMS_HOST" value="localhost" />
-      <env name="SP_JMS_PORT" value="61616" />
       <env name="SP_DATA_LAKE_HOST" value="localhost" />
       <env name="SP_DATA_LAKE_PORT" value="8086" />
+      <env name="SP_DEBUG" value="true" />
       <env name="SP_IMAGE_STORAGE_LOCATION" value=".streampipes/spImages/" />
+      <env name="SP_JMS_HOST" value="localhost" />
+      <env name="SP_JMS_PORT" value="61616" />
+      <env name="SP_PORT" value="7023" />
     </envs>
     <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.extensions.all.jvm.AllExtensionsInit" />
     <module name="streampipes-extensions-all-jvm" />
@@ -27,4 +27,4 @@
       <option name="Make" enabled="true" />
     </method>
   </configuration>
-</component>
\ No newline at end of file
+</component>
diff --git a/.idea/runConfigurations/all_pipeline_elements_jvm.xml b/.idea/runConfigurations/all_pipeline_elements_jvm.xml
index 08521f8..60c64ea 100644
--- a/.idea/runConfigurations/all_pipeline_elements_jvm.xml
+++ b/.idea/runConfigurations/all_pipeline_elements_jvm.xml
@@ -1,7 +1,5 @@
 <component name="ProjectRunConfigurationManager">
   <configuration default="false" name="all-pipeline-elements-jvm" type="Application" factoryName="Application">
-    <option name="ALTERNATIVE_JRE_PATH" value="/Library/Java/JavaVirtualMachines/jdk-17.0.1.jdk/Contents/Home" />
-    <option name="ALTERNATIVE_JRE_PATH_ENABLED" value="true" />
     <envs>
       <env name="SP_PORT" value="7023" />
       <env name="SP_DEBUG" value="true" />
diff --git a/.idea/runConfigurations/backend.xml b/.idea/runConfigurations/backend.xml
index fb5908b..344ff6e 100644
--- a/.idea/runConfigurations/backend.xml
+++ b/.idea/runConfigurations/backend.xml
@@ -1,20 +1,19 @@
 <component name="ProjectRunConfigurationManager">
   <configuration default="false" name="backend" type="SpringBootApplicationConfigurationType" factoryName="Spring Boot">
-    <module name="streampipes-backend" />
-    <option name="SPRING_BOOT_MAIN_CLASS" value="org.apache.streampipes.backend.StreamPipesBackendApplication" />
-    <option name="ALTERNATIVE_JRE_PATH" value="11" />
-    <option name="SHORTEN_COMMAND_LINE" value="NONE" />
+    <option name="ACTIVE_PROFILES" />
     <envs>
-      <env name="SP_COUCHDB_HOST" value="localhost" />
       <env name="SP_BACKEND_HOST" value="localhost" />
+      <env name="SP_COUCHDB_HOST" value="localhost" />
+      <env name="SP_DEBUG" value="true " />
       <env name="SP_INFLUX_HOST" value="localhost" />
-      <env name="SP_KAFKA_HOST" value="localhost" />
       <env name="SP_INFLUX_PORT" value="8086" />
-      <env name="SP_KAFKA_PORT" value="9094" />
       <env name="SP_JMS_HOST" value="localhost" />
-      <env name="SP_DEBUG" value="true" />
+      <env name="SP_KAFKA_HOST" value="localhost" />
+      <env name="SP_KAFKA_PORT" value="9094" />
       <env name="SP_PRIORITIZED_PROTOCOL" value="mqtt" />
     </envs>
+    <module name="streampipes-backend" />
+    <option name="SPRING_BOOT_MAIN_CLASS" value="org.apache.streampipes.backend.StreamPipesBackendApplication" />
     <method v="2">
       <option name="Make" enabled="true" />
     </method>
diff --git a/.idea/runConfigurations/processors_aggregation_flink.xml b/.idea/runConfigurations/processors_aggregation_flink.xml
deleted file mode 100644
index 183a887..0000000
--- a/.idea/runConfigurations/processors_aggregation_flink.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-aggregation-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6005" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.aggregation.flink.AggregationFlinkInit" />
-    <module name="streampipes-processors-aggregation-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_enricher_flink.xml b/.idea/runConfigurations/processors_enricher_flink.xml
deleted file mode 100644
index 274dfd1..0000000
--- a/.idea/runConfigurations/processors_enricher_flink.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-enricher-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6010" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.enricher.flink.EnricherFlinkInit" />
-    <module name="streampipes-processors-enricher-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_enricher_jvm.xml b/.idea/runConfigurations/processors_enricher_jvm.xml
deleted file mode 100644
index ae43ed6..0000000
--- a/.idea/runConfigurations/processors_enricher_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-enricher-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6015" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.enricher.jvm.EnricherJvmInit" />
-    <module name="streampipes-processors-enricher-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_filters_jvm.xml b/.idea/runConfigurations/processors_filters_jvm.xml
deleted file mode 100644
index b504325..0000000
--- a/.idea/runConfigurations/processors_filters_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-filters-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6025" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.filters.jvm.FiltersJvmInit" />
-    <module name="streampipes-processors-filters-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_filters_siddhi.xml b/.idea/runConfigurations/processors_filters_siddhi.xml
deleted file mode 100644
index 6b060b6..0000000
--- a/.idea/runConfigurations/processors_filters_siddhi.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-filters-siddhi" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6025" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.siddhi.FiltersSiddhiInit" />
-    <module name="streampipes-processors-filters-siddhi" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_geo_flink.xml b/.idea/runConfigurations/processors_geo_flink.xml
deleted file mode 100644
index 5e89134..0000000
--- a/.idea/runConfigurations/processors_geo_flink.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-geo-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6025" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processor.geo.flink.GeoFlinkInit" />
-    <module name="streampipes-processors-geo-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_geo_jvm.xml b/.idea/runConfigurations/processors_geo_jvm.xml
deleted file mode 100644
index 78165cd..0000000
--- a/.idea/runConfigurations/processors_geo_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-geo-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="8005" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.geo.jvm.GeoJvmInit" />
-    <module name="streampipes-processors-geo-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_imageprocessing_jvm.xml b/.idea/runConfigurations/processors_imageprocessing_jvm.xml
deleted file mode 100644
index bb2e7b5..0000000
--- a/.idea/runConfigurations/processors_imageprocessing_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-imageprocessing-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6035" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.imageprocessing.jvm.ImageProcessingJvmInit" />
-    <module name="streampipes-processors-image-processing-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_pattern_detection_flink.xml b/.idea/runConfigurations/processors_pattern_detection_flink.xml
deleted file mode 100644
index 8ed0376..0000000
--- a/.idea/runConfigurations/processors_pattern_detection_flink.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-pattern-detection-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6040" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.pattern.detection.flink.PatternDetectionFlinkInit" />
-    <module name="streampipes-processors-pattern-detection-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_statistics_flink.xml b/.idea/runConfigurations/processors_statistics_flink.xml
deleted file mode 100644
index e9b74c8..0000000
--- a/.idea/runConfigurations/processors_statistics_flink.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-statistics-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6045" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.statistics.flink.StatisticsFlinkInit" />
-    <module name="streampipes-processors-statistics-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_textmining_flink.xml b/.idea/runConfigurations/processors_textmining_flink.xml
deleted file mode 100644
index 18a4204..0000000
--- a/.idea/runConfigurations/processors_textmining_flink.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-textmining-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6050" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.textmining.flink.TextMiningFlinkInit" />
-    <module name="streampipes-processors-text-mining-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_textmining_jvm.xml b/.idea/runConfigurations/processors_textmining_jvm.xml
deleted file mode 100644
index 4fb63c4..0000000
--- a/.idea/runConfigurations/processors_textmining_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-textmining-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6065" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.textmining.jvm.TextMiningJvmInit" />
-    <module name="streampipes-processors-text-mining-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_transformation_flink.xml b/.idea/runConfigurations/processors_transformation_flink.xml
deleted file mode 100644
index 456b92c..0000000
--- a/.idea/runConfigurations/processors_transformation_flink.xml
+++ /dev/null
@@ -1,14 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-transformation-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6055" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.transformation.flink.TransformationFlinkInit" />
-    <module name="streampipes-processors-transformation-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/processors_transformation_jvm.xml b/.idea/runConfigurations/processors_transformation_jvm.xml
deleted file mode 100644
index bfa0fff..0000000
--- a/.idea/runConfigurations/processors_transformation_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="processors-transformation-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="6060" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.processors.transformation.jvm.TransformationJvmInit" />
-    <module name="streampipes-processors-transformation-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/sinks_brokers_jvm.xml b/.idea/runConfigurations/sinks_brokers_jvm.xml
deleted file mode 100644
index bb2962d..0000000
--- a/.idea/runConfigurations/sinks_brokers_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="sinks-brokers-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="7005" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.sinks.brokers.jvm.BrokersJvmInit" />
-    <module name="streampipes-sinks-brokers-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/sinks_databases_flink.xml b/.idea/runConfigurations/sinks_databases_flink.xml
deleted file mode 100644
index e0ec58c..0000000
--- a/.idea/runConfigurations/sinks_databases_flink.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="sinks-databases-flink" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="7010" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SP_FLINK_DEBUG" value="true" />
-      <env name="SP_ELASTICSEARCH_HOST" value="localhost" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.sinks.databases.flink.DatabasesFlinkInit" />
-    <module name="streampipes-sinks-databases-flink" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/sinks_databases_jvm.xml b/.idea/runConfigurations/sinks_databases_jvm.xml
deleted file mode 100644
index 358b372..0000000
--- a/.idea/runConfigurations/sinks_databases_jvm.xml
+++ /dev/null
@@ -1,13 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="sinks-databases-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="7015" />
-      <env name="SP_DEBUG" value="true" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.sinks.databases.jvm.DatabasesJvmInit" />
-    <module name="streampipes-sinks-databases-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/.idea/runConfigurations/sinks_notifications_jvm.xml b/.idea/runConfigurations/sinks_notifications_jvm.xml
deleted file mode 100644
index 77748b2..0000000
--- a/.idea/runConfigurations/sinks_notifications_jvm.xml
+++ /dev/null
@@ -1,22 +0,0 @@
-<component name="ProjectRunConfigurationManager">
-  <configuration default="false" name="sinks-notifications-jvm" type="Application" factoryName="Application">
-    <envs>
-      <env name="SP_PORT" value="7025" />
-      <env name="SP_DEBUG" value="true" />
-      <env name="SLACK_TOKEN" value="" />
-      <env name="EMAIL_FROMS" value="" />
-      <env name="EMAIL_USERNAME" value="" />
-      <env name="EMAIL_PASSWORD" value="" />
-      <env name="EMAIL_SMTP_HOST" value="" />
-      <env name="EMAIL_SMTP_PORT" value="" />
-      <env name="EMAIL_STARTTLS" value="" />
-      <env name="EMAIL_SILL" value="" />
-      <env name="WEBSOCKET_PROTOCOL" value="ws" />
-    </envs>
-    <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.sinks.notifications.jvm.SinksNotificationsJvmInit" />
-    <module name="streampipes-sinks-notifications-jvm" />
-    <method v="2">
-      <option name="Make" enabled="true" />
-    </method>
-  </configuration>
-</component>
diff --git a/Jenkinsfile b/Jenkinsfile
index 8f7050b..c202e02 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -30,7 +30,7 @@
 
   tools {
         maven 'maven_3_latest'
-        jdk 'jdk_1.8_latest'
+        jdk 'jdk_11_latest'
     }
 
 
diff --git a/README.md b/README.md
index f456bf5..b054d0a 100644
--- a/README.md
+++ b/README.md
@@ -103,8 +103,8 @@
 To properly build the StreamPipes core, the following tools should be installed:
 
 ### Prerequisites
-* Java 8 JDK (minimum)
-* Maven (tested with 3.6)
+* Java 11 JDK (minimum)
+* Maven (tested with 3.8)
 * NodeJS + NPM (tested with v12+/ v6+)
 * Docker + Docker-Compose
 
@@ -126,7 +126,7 @@
 
 ### Starting
 
-To start StreamPipes, run ``docker-compose up -d`` from the root directory.
+To start StreamPipes, run ``docker-compose up --build -d`` from the root directory.
 
 You can also use the installer or CLI as described in the ``Installation`` section.
 
@@ -135,7 +135,7 @@
 * **Connect adapters** for a variety of IoT data sources as well as 
 * **Data Processors** and **Data Sinks** as ready-to-use pipeline elements. 
 
-A description of the standard elements can be found in the Github repository [streampipes-extensions](https://www.github.com/apache/incubator-streampipes-extensions).
+A description of the standard elements can be found in [streampipes-extensions](https://github.com/apache/incubator-streampipes/tree/dev/streampipes-extensions).
 
 ## Extending StreamPipes
 
diff --git a/RELEASE_NOTES.md b/RELEASE_NOTES.md
index ec67a78..dbb6925 100644
--- a/RELEASE_NOTES.md
+++ b/RELEASE_NOTES.md
@@ -21,12 +21,72 @@
 
 The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
 
-# [Unreleased]
-### New Features
+# [0.70.0]
 
-### Improvements
+## Sub-task
 
-### Bug Fixes
+*   [[STREAMPIPES-535](https://issues.apache.org/jira/browse/STREAMPIPES-535)] - Support JWT signing with private/public key
+
+## Bug
+
+*   [[STREAMPIPES-243](https://issues.apache.org/jira/browse/STREAMPIPES-243)] - Configuration of And Processor is broken
+*   [[STREAMPIPES-255](https://issues.apache.org/jira/browse/STREAMPIPES-255)] - Error when importing AdapterDescriptions with file upload references
+*   [[STREAMPIPES-515](https://issues.apache.org/jira/browse/STREAMPIPES-515)] - Missing mapping in dev compose files for new docker-compose versions
+*   [[STREAMPIPES-521](https://issues.apache.org/jira/browse/STREAMPIPES-521)] - Filter can not be deleted in data explorer
+*   [[STREAMPIPES-524](https://issues.apache.org/jira/browse/STREAMPIPES-524)] - No data is shown in data explorer
+*   [[STREAMPIPES-529](https://issues.apache.org/jira/browse/STREAMPIPES-529)] - Newly created pipelines break dashboard
+*   [[STREAMPIPES-540](https://issues.apache.org/jira/browse/STREAMPIPES-540)] - Data download returns error
+*   [[STREAMPIPES-542](https://issues.apache.org/jira/browse/STREAMPIPES-542)] - Web UI pipelines won't import multiple pipelines
+*   [[STREAMPIPES-543](https://issues.apache.org/jira/browse/STREAMPIPES-543)] - Using UI can't choose a source for the new dashboard
+*   [[STREAMPIPES-547](https://issues.apache.org/jira/browse/STREAMPIPES-547)] - Fix repeating colors for time-series chart
+*   [[STREAMPIPES-548](https://issues.apache.org/jira/browse/STREAMPIPES-548)] - Aggregation settings for data-explorer partially not persisted
+*   [[STREAMPIPES-550](https://issues.apache.org/jira/browse/STREAMPIPES-550)] - Empty property configuration in data-explorer visualization config
+*   [[STREAMPIPES-551](https://issues.apache.org/jira/browse/STREAMPIPES-551)] - Missing naming for (multiple) data sources in visualization config of data-explorer
+*   [[STREAMPIPES-553](https://issues.apache.org/jira/browse/STREAMPIPES-553)] - Lite configuration for k8s does not include message broker
+*   [[STREAMPIPES-554](https://issues.apache.org/jira/browse/STREAMPIPES-554)] - Data-explorer widgets reload when token is renewed
+*   [[STREAMPIPES-564](https://issues.apache.org/jira/browse/STREAMPIPES-564)] - Group by fields don't change in data explorer
+*   [[STREAMPIPES-572](https://issues.apache.org/jira/browse/STREAMPIPES-572)] - Fix automatic lower casing when persisting data in connect
+*   [[STREAMPIPES-578](https://issues.apache.org/jira/browse/STREAMPIPES-578)] - Data Explorer download does not update measurement
+*   [[STREAMPIPES-579](https://issues.apache.org/jira/browse/STREAMPIPES-579)] - Larger live dashboards become unresponsive
+
+## New Feature
+
+*   [[STREAMPIPES-209](https://issues.apache.org/jira/browse/STREAMPIPES-209)] - FileStaticProperty should support filtering for extensions
+*   [[STREAMPIPES-534](https://issues.apache.org/jira/browse/STREAMPIPES-534)] - Support authentication for extensions services
+*   [[STREAMPIPES-539](https://issues.apache.org/jira/browse/STREAMPIPES-539)] - Support full screen data view in data explorer
+*   [[STREAMPIPES-546](https://issues.apache.org/jira/browse/STREAMPIPES-546)] - Support data download of configured query in data explorer
+*   [[STREAMPIPES-549](https://issues.apache.org/jira/browse/STREAMPIPES-549)] - Add extensions service for IIoT-related processors and sinks
+*   [[STREAMPIPES-559](https://issues.apache.org/jira/browse/STREAMPIPES-559)] - Support templates for adapter configurations
+*   [[STREAMPIPES-561](https://issues.apache.org/jira/browse/STREAMPIPES-561)] - Add breadcrumb navigation
+*   [[STREAMPIPES-565](https://issues.apache.org/jira/browse/STREAMPIPES-565)] - Allow to export and import StreamPipes resources
+*   [[STREAMPIPES-569](https://issues.apache.org/jira/browse/STREAMPIPES-569)] - Export data from data lake configuration
+*   [[STREAMPIPES-570](https://issues.apache.org/jira/browse/STREAMPIPES-570)] - Import multiple files at once
+*   [[STREAMPIPES-573](https://issues.apache.org/jira/browse/STREAMPIPES-573)] - Make CSV delimiter selectable in download dialog
+
+## Improvement
+
+*   [[STREAMPIPES-192](https://issues.apache.org/jira/browse/STREAMPIPES-192)] - A user has to enter too many names when using the system
+*   [[STREAMPIPES-223](https://issues.apache.org/jira/browse/STREAMPIPES-223)] - Add connection retry to consul for pipeline elements when starting up
+*   [[STREAMPIPES-228](https://issues.apache.org/jira/browse/STREAMPIPES-228)] - Edit dashboard
+*   [[STREAMPIPES-517](https://issues.apache.org/jira/browse/STREAMPIPES-517)] - Update UI to Angular 13
+*   [[STREAMPIPES-522](https://issues.apache.org/jira/browse/STREAMPIPES-522)] - Deleting adapter instance after previously stopping adapter throws error
+*   [[STREAMPIPES-528](https://issues.apache.org/jira/browse/STREAMPIPES-528)] - Support images in data explorer
+*   [[STREAMPIPES-531](https://issues.apache.org/jira/browse/STREAMPIPES-531)] - Extract shared UI modules to Angular library
+*   [[STREAMPIPES-533](https://issues.apache.org/jira/browse/STREAMPIPES-533)] - Bump Spring dependencies
+*   [[STREAMPIPES-536](https://issues.apache.org/jira/browse/STREAMPIPES-536)] - Escape asterisk in installer/upgrade_versions.sh
+*   [[STREAMPIPES-552](https://issues.apache.org/jira/browse/STREAMPIPES-552)] - Cancel subscriptions in data explorer when config changes
+*   [[STREAMPIPES-556](https://issues.apache.org/jira/browse/STREAMPIPES-556)] - Add silent period to notifications sink
+*   [[STREAMPIPES-557](https://issues.apache.org/jira/browse/STREAMPIPES-557)] - Move notifications icon from iconbar to toolbar
+*   [[STREAMPIPES-558](https://issues.apache.org/jira/browse/STREAMPIPES-558)] - Change navigation of connect module
+*   [[STREAMPIPES-560](https://issues.apache.org/jira/browse/STREAMPIPES-560)] - Add confirm dialog before leaving data explorer widget view
+*   [[STREAMPIPES-575](https://issues.apache.org/jira/browse/STREAMPIPES-575)] - Migrate Math operators from Flink to plain JVM wrapper
+*   [[STREAMPIPES-576](https://issues.apache.org/jira/browse/STREAMPIPES-576)] - Migrate transformation processors from Flink to JVM
+
+## Task
+
+*   [[STREAMPIPES-463](https://issues.apache.org/jira/browse/STREAMPIPES-463)] - Merge StreamPipes repos into a single repo
+*   [[STREAMPIPES-555](https://issues.apache.org/jira/browse/STREAMPIPES-555)] - Remove feedback button from UI
+*   [[STREAMPIPES-581](https://issues.apache.org/jira/browse/STREAMPIPES-581)] - Restructure documentantion
 
 # [0.69.0]
 
diff --git a/archetypes/streampipes-archetype-extensions-jvm/pom.xml b/archetypes/streampipes-archetype-extensions-jvm/pom.xml
index 0bcfc76..f9f1152 100644
--- a/archetypes/streampipes-archetype-extensions-jvm/pom.xml
+++ b/archetypes/streampipes-archetype-extensions-jvm/pom.xml
@@ -22,7 +22,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
         <relativePath>../../pom.xml</relativePath>
     </parent>
     <artifactId>streampipes-archetype-extensions-jvm</artifactId>
diff --git a/archetypes/streampipes-archetype-extensions-jvm/src/main/resources/archetype-resources/pom.xml b/archetypes/streampipes-archetype-extensions-jvm/src/main/resources/archetype-resources/pom.xml
index 365ab51..5fb4c9c 100644
--- a/archetypes/streampipes-archetype-extensions-jvm/src/main/resources/archetype-resources/pom.xml
+++ b/archetypes/streampipes-archetype-extensions-jvm/src/main/resources/archetype-resources/pom.xml
@@ -25,7 +25,7 @@
     <version>${version}</version>
 
     <properties>
-        <sp.version>0.70.0-SNAPSHOT</sp.version>
+        <sp.version>0.71.0-SNAPSHOT</sp.version>
     </properties>
 
     <dependencies>
diff --git a/archetypes/streampipes-archetype-pe-processors-flink/pom.xml b/archetypes/streampipes-archetype-pe-processors-flink/pom.xml
index 5ea6bab..31ff42f 100644
--- a/archetypes/streampipes-archetype-pe-processors-flink/pom.xml
+++ b/archetypes/streampipes-archetype-pe-processors-flink/pom.xml
@@ -22,7 +22,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
         <relativePath>../../pom.xml</relativePath>
     </parent>
     <artifactId>streampipes-archetype-pe-processors-flink</artifactId>
diff --git a/archetypes/streampipes-archetype-pe-processors-flink/src/main/resources/archetype-resources/pom.xml b/archetypes/streampipes-archetype-pe-processors-flink/src/main/resources/archetype-resources/pom.xml
index e55ddab..2a63d0c 100644
--- a/archetypes/streampipes-archetype-pe-processors-flink/src/main/resources/archetype-resources/pom.xml
+++ b/archetypes/streampipes-archetype-pe-processors-flink/src/main/resources/archetype-resources/pom.xml
@@ -25,7 +25,7 @@
     <version>${version}</version>
 
     <properties>
-        <sp.version>0.70.0-SNAPSHOT</sp.version>
+        <sp.version>0.71.0-SNAPSHOT</sp.version>
     </properties>
 
     <dependencies>
diff --git a/archetypes/streampipes-archetype-pe-sinks-flink/pom.xml b/archetypes/streampipes-archetype-pe-sinks-flink/pom.xml
index 6d0837e..0771e8f 100644
--- a/archetypes/streampipes-archetype-pe-sinks-flink/pom.xml
+++ b/archetypes/streampipes-archetype-pe-sinks-flink/pom.xml
@@ -22,7 +22,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
         <relativePath>../../pom.xml</relativePath>
     </parent>
     <artifactId>streampipes-archetype-pe-sinks-flink</artifactId>
diff --git a/archetypes/streampipes-archetype-pe-sinks-flink/src/main/resources/archetype-resources/pom.xml b/archetypes/streampipes-archetype-pe-sinks-flink/src/main/resources/archetype-resources/pom.xml
index cf27b6f..540d108 100644
--- a/archetypes/streampipes-archetype-pe-sinks-flink/src/main/resources/archetype-resources/pom.xml
+++ b/archetypes/streampipes-archetype-pe-sinks-flink/src/main/resources/archetype-resources/pom.xml
@@ -25,7 +25,7 @@
     <version>${version}</version>
 
     <properties>
-        <sp.version>0.70.0-SNAPSHOT</sp.version>
+        <sp.version>0.71.0-SNAPSHOT</sp.version>
     </properties>
 
     <dependencies>
diff --git a/installer/cli/.env b/installer/cli/.env
index 8f4f118..09e1aa2 100644
--- a/installer/cli/.env
+++ b/installer/cli/.env
@@ -14,7 +14,7 @@
 # limitations under the License.
 
 SP_DOCKER_REGISTRY=apachestreampipes
-SP_VERSION=0.70.0-SNAPSHOT
+SP_VERSION=0.71.0-SNAPSHOT
 SP_SUBNET=172.31.0.0/16
 SP_CONSUL_CONTAINER_IP=172.31.0.9
 COMPOSE_PROJECT_NAME=streampipes
diff --git a/installer/cli/README.md b/installer/cli/README.md
index bcfa97a..934a18f 100644
--- a/installer/cli/README.md
+++ b/installer/cli/README.md
@@ -23,7 +23,7 @@
 * new core features for **backend** and **ui**.
 
 <!-- BEGIN do not edit: set via ../upgrade_versions.sh -->
-**Current version:** 0.70.0-SNAPSHOT
+**Current version:** 0.71.0-SNAPSHOT
 <!-- END do not edit -->
 
 ## TL;DR
diff --git a/installer/cli/deploy/standalone/processors-aggregation-flink/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-aggregation-flink/docker-compose.dev.yml
deleted file mode 100644
index dbde6e5..0000000
--- a/installer/cli/deploy/standalone/processors-aggregation-flink/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-aggregation-flink:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-aggregation-flink/docker-compose.yml b/installer/cli/deploy/standalone/processors-aggregation-flink/docker-compose.yml
deleted file mode 100644
index a0d0312..0000000
--- a/installer/cli/deploy/standalone/processors-aggregation-flink/docker-compose.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-aggregation-flink:
-    image: ${SP_DOCKER_REGISTRY}/processors-aggregation-flink:${SP_VERSION}
-    depends_on:
-      - "consul"
-      - "jobmanager"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-enricher-flink/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-enricher-flink/docker-compose.dev.yml
deleted file mode 100644
index 3519024..0000000
--- a/installer/cli/deploy/standalone/processors-enricher-flink/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-enricher-flink:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-enricher-flink/docker-compose.yml b/installer/cli/deploy/standalone/processors-enricher-flink/docker-compose.yml
deleted file mode 100644
index a1cbb64..0000000
--- a/installer/cli/deploy/standalone/processors-enricher-flink/docker-compose.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-enricher-flink:
-    image: ${SP_DOCKER_REGISTRY}/processors-enricher-flink:${SP_VERSION}
-    depends_on:
-      - "consul"
-      - "jobmanager"      
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-enricher-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-enricher-jvm/docker-compose.dev.yml
deleted file mode 100644
index 67ed576..0000000
--- a/installer/cli/deploy/standalone/processors-enricher-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-enricher-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-enricher-jvm/docker-compose.yml b/installer/cli/deploy/standalone/processors-enricher-jvm/docker-compose.yml
deleted file mode 100644
index 1230b85..0000000
--- a/installer/cli/deploy/standalone/processors-enricher-jvm/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-enricher-jvm:
-    image: ${SP_DOCKER_REGISTRY}/processors-enricher-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-filters-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-filters-jvm/docker-compose.dev.yml
deleted file mode 100644
index 178b2b5..0000000
--- a/installer/cli/deploy/standalone/processors-filters-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-filters-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-filters-jvm/docker-compose.yml b/installer/cli/deploy/standalone/processors-filters-jvm/docker-compose.yml
deleted file mode 100644
index e3e2ce0..0000000
--- a/installer/cli/deploy/standalone/processors-filters-jvm/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-filters-jvm:
-    image: ${SP_DOCKER_REGISTRY}/processors-filters-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-filters-siddhi/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-filters-siddhi/docker-compose.dev.yml
deleted file mode 100644
index 207d1ca..0000000
--- a/installer/cli/deploy/standalone/processors-filters-siddhi/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-filters-siddhi:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-filters-siddhi/docker-compose.yml b/installer/cli/deploy/standalone/processors-filters-siddhi/docker-compose.yml
deleted file mode 100644
index 0e647c9..0000000
--- a/installer/cli/deploy/standalone/processors-filters-siddhi/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-filters-siddhi:
-    image: ${SP_DOCKER_REGISTRY}/processors-filters-siddhi:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-geo-flink/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-geo-flink/docker-compose.dev.yml
deleted file mode 100644
index 0369706..0000000
--- a/installer/cli/deploy/standalone/processors-geo-flink/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-geo-flink:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-geo-flink/docker-compose.yml b/installer/cli/deploy/standalone/processors-geo-flink/docker-compose.yml
deleted file mode 100644
index 8025853..0000000
--- a/installer/cli/deploy/standalone/processors-geo-flink/docker-compose.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-geo-flink:
-    image: ${SP_DOCKER_REGISTRY}/processors-geo-flink:${SP_VERSION}
-    depends_on:
-      - "consul"
-      - "jobmanager"      
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-geo-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-geo-jvm/docker-compose.dev.yml
deleted file mode 100644
index cb8ce93..0000000
--- a/installer/cli/deploy/standalone/processors-geo-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-geo-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-geo-jvm/docker-compose.yml b/installer/cli/deploy/standalone/processors-geo-jvm/docker-compose.yml
deleted file mode 100644
index 9bd02df..0000000
--- a/installer/cli/deploy/standalone/processors-geo-jvm/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-geo-jvm:
-    image: ${SP_DOCKER_REGISTRY}/processors-geo-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-image-processing-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-image-processing-jvm/docker-compose.dev.yml
deleted file mode 100644
index d8130c0..0000000
--- a/installer/cli/deploy/standalone/processors-image-processing-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-image-processing-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-image-processing-jvm/docker-compose.yml b/installer/cli/deploy/standalone/processors-image-processing-jvm/docker-compose.yml
deleted file mode 100644
index 2580b4f..0000000
--- a/installer/cli/deploy/standalone/processors-image-processing-jvm/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-image-processing-jvm:
-    image: ${SP_DOCKER_REGISTRY}/processors-image-processing-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-pattern-detection-flink/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-pattern-detection-flink/docker-compose.dev.yml
deleted file mode 100644
index 541a2f4..0000000
--- a/installer/cli/deploy/standalone/processors-pattern-detection-flink/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-pattern-detection-flink:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-pattern-detection-flink/docker-compose.yml b/installer/cli/deploy/standalone/processors-pattern-detection-flink/docker-compose.yml
deleted file mode 100644
index 7fcc509..0000000
--- a/installer/cli/deploy/standalone/processors-pattern-detection-flink/docker-compose.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-pattern-detection-flink:
-    image: ${SP_DOCKER_REGISTRY}/processors-pattern-detection-flink:${SP_VERSION}
-    depends_on:
-      - "consul"
-      - "jobmanager"      
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-statistics-flink/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-statistics-flink/docker-compose.dev.yml
deleted file mode 100644
index aeda1c1..0000000
--- a/installer/cli/deploy/standalone/processors-statistics-flink/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-statistics-flink:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-statistics-flink/docker-compose.yml b/installer/cli/deploy/standalone/processors-statistics-flink/docker-compose.yml
deleted file mode 100644
index 0d5c24d..0000000
--- a/installer/cli/deploy/standalone/processors-statistics-flink/docker-compose.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-statistics-flink:
-    image: ${SP_DOCKER_REGISTRY}/processors-statistics-flink:${SP_VERSION}
-    depends_on:
-      - "consul"
-      - "jobmanager"      
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-text-mining-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-text-mining-jvm/docker-compose.dev.yml
deleted file mode 100644
index 54bf152..0000000
--- a/installer/cli/deploy/standalone/processors-text-mining-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-text-mining-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-text-mining-jvm/docker-compose.yml b/installer/cli/deploy/standalone/processors-text-mining-jvm/docker-compose.yml
deleted file mode 100644
index 7342d8d..0000000
--- a/installer/cli/deploy/standalone/processors-text-mining-jvm/docker-compose.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-text-mining-jvm:
-    image: ${SP_DOCKER_REGISTRY}/processors-text-mining-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    volumes:
-      - ./config/models:/data/models
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-transformation-flink/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-transformation-flink/docker-compose.dev.yml
deleted file mode 100644
index b1b8722..0000000
--- a/installer/cli/deploy/standalone/processors-transformation-flink/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-transformation-flink:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-transformation-flink/docker-compose.yml b/installer/cli/deploy/standalone/processors-transformation-flink/docker-compose.yml
deleted file mode 100644
index ad767bc..0000000
--- a/installer/cli/deploy/standalone/processors-transformation-flink/docker-compose.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-transformation-flink:
-    image: ${SP_DOCKER_REGISTRY}/processors-transformation-flink:${SP_VERSION}
-    depends_on:
-      - "consul"
-      - "jobmanager"      
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/processors-transformation-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/processors-transformation-jvm/docker-compose.dev.yml
deleted file mode 100644
index 92ae2be..0000000
--- a/installer/cli/deploy/standalone/processors-transformation-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-transformation-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/processors-transformation-jvm/docker-compose.yml b/installer/cli/deploy/standalone/processors-transformation-jvm/docker-compose.yml
deleted file mode 100644
index f2269dc..0000000
--- a/installer/cli/deploy/standalone/processors-transformation-jvm/docker-compose.yml
+++ /dev/null
@@ -1,34 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  processors-transformation-jvm:
-    image: ${SP_DOCKER_REGISTRY}/processors-transformation-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-#    ports:
-#      - "8098:8090"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/sinks-brokers-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/sinks-brokers-jvm/docker-compose.dev.yml
deleted file mode 100644
index 5959966..0000000
--- a/installer/cli/deploy/standalone/sinks-brokers-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-brokers-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/sinks-brokers-jvm/docker-compose.yml b/installer/cli/deploy/standalone/sinks-brokers-jvm/docker-compose.yml
deleted file mode 100644
index 824ef28..0000000
--- a/installer/cli/deploy/standalone/sinks-brokers-jvm/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-brokers-jvm:
-    image: ${SP_DOCKER_REGISTRY}/sinks-brokers-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/sinks-databases-flink/docker-compose.dev.yml b/installer/cli/deploy/standalone/sinks-databases-flink/docker-compose.dev.yml
deleted file mode 100644
index 41becda..0000000
--- a/installer/cli/deploy/standalone/sinks-databases-flink/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-databases-flink:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/sinks-databases-flink/docker-compose.yml b/installer/cli/deploy/standalone/sinks-databases-flink/docker-compose.yml
deleted file mode 100644
index 02467dd..0000000
--- a/installer/cli/deploy/standalone/sinks-databases-flink/docker-compose.yml
+++ /dev/null
@@ -1,33 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-databases-flink:
-    image: ${SP_DOCKER_REGISTRY}/sinks-databases-flink:${SP_VERSION}
-    depends_on:
-      - "consul"
-      - "jobmanager"      
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/sinks-databases-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/sinks-databases-jvm/docker-compose.dev.yml
deleted file mode 100644
index 8433b08..0000000
--- a/installer/cli/deploy/standalone/sinks-databases-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-databases-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/sinks-databases-jvm/docker-compose.yml b/installer/cli/deploy/standalone/sinks-databases-jvm/docker-compose.yml
deleted file mode 100644
index ec593aa..0000000
--- a/installer/cli/deploy/standalone/sinks-databases-jvm/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-databases-jvm:
-    image: ${SP_DOCKER_REGISTRY}/sinks-databases-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/cli/deploy/standalone/sinks-notifications-jvm/docker-compose.dev.yml b/installer/cli/deploy/standalone/sinks-notifications-jvm/docker-compose.dev.yml
deleted file mode 100644
index 25169fe..0000000
--- a/installer/cli/deploy/standalone/sinks-notifications-jvm/docker-compose.dev.yml
+++ /dev/null
@@ -1,20 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-notifications-jvm:
-#    ports:
-#      - "8098:8090"
diff --git a/installer/cli/deploy/standalone/sinks-notifications-jvm/docker-compose.yml b/installer/cli/deploy/standalone/sinks-notifications-jvm/docker-compose.yml
deleted file mode 100644
index 5ef9f78..0000000
--- a/installer/cli/deploy/standalone/sinks-notifications-jvm/docker-compose.yml
+++ /dev/null
@@ -1,32 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-version: "3.4"
-services:
-  sinks-notifications-jvm:
-    image: ${SP_DOCKER_REGISTRY}/sinks-notifications-jvm:${SP_VERSION}
-    depends_on:
-      - "consul"
-    logging:
-      driver: "json-file"
-      options:
-        max-size: "1m"
-        max-file: "1"
-    networks:
-      spnet:
-
-networks:
-  spnet:
-    external: true
diff --git a/installer/compose/.env b/installer/compose/.env
index 1b8ff24..d0e1f62 100644
--- a/installer/compose/.env
+++ b/installer/compose/.env
@@ -13,7 +13,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-SP_VERSION=0.70.0-SNAPSHOT
+SP_VERSION=0.71.0-SNAPSHOT
 SP_DOCKER_REGISTRY=apachestreampipes
 SP_SUBNET=172.31.0.0/16
 SP_CONSUL_CONTAINER_IP=172.31.0.9
diff --git a/installer/compose/README.md b/installer/compose/README.md
index c22dbed..bd20296 100644
--- a/installer/compose/README.md
+++ b/installer/compose/README.md
@@ -19,7 +19,7 @@
 StreamPipes Compose is a simple collection of user-friendly `docker-compose` files that easily lets gain first-hand experience with Apache StreamPipes.
 
 <!-- BEGIN do not edit: set via ../upgrade_versions.sh -->
-**Current version:** 0.70.0-SNAPSHOT
+**Current version:** 0.71.0-SNAPSHOT
 <!-- END do not edit -->
 
 > **NOTE**: We recommend StreamPipes Compose to only use for initial try-out and testing. If you are a developer and want to develop new pipeline elements or core feature, use the [StreamPipes CLI](../cli).
diff --git a/installer/k8s/Chart.yaml b/installer/k8s/Chart.yaml
index a42d4a8..e88c71a 100644
--- a/installer/k8s/Chart.yaml
+++ b/installer/k8s/Chart.yaml
@@ -14,18 +14,9 @@
 # limitations under the License.
 
 apiVersion: v1
-appVersion: "0.70.0-SNAPSHOT"
+appVersion: "0.71.0-SNAPSHOT"
 description: Self-Service Data Analytics for the Industrial IoT
 name: streampipes-helm-chart
 home: https://streampipes.apache.org
-version: 0.70.0-SNAPSHOT
+version: 0.71.0-SNAPSHOT
 icon: https://avatars1.githubusercontent.com/u/33908576
-maintainers:
-  - name: Dominik Riemer
-    email: riemer@fzi.de
-  - name: Philipp Zehnder
-    email: zehnder@fzi.de
-  - name: Samuel Abt
-    email: abt@fzi.de
-  - name: Patrick Wiener
-    email: wiener@fzi.de
diff --git a/installer/k8s/README.md b/installer/k8s/README.md
index 0449bc6..dff111a 100644
--- a/installer/k8s/README.md
+++ b/installer/k8s/README.md
@@ -19,7 +19,7 @@
 StreamPipes k8s is a helm chart to deploy StreamPipes on Kubernetes.
 
 <!-- BEGIN do not edit: set via ../upgrade_versions.sh -->
-**Current version:** 0.70.0-SNAPSHOT
+**Current version:** 0.71.0-SNAPSHOT
 <!-- END do not edit -->
 
 We provide two helm chart templates to get you going:
diff --git a/installer/k8s/templates/core/backend-deployment.yaml b/installer/k8s/templates/core/backend-deployment.yaml
index a2a1b63..592c569 100644
--- a/installer/k8s/templates/core/backend-deployment.yaml
+++ b/installer/k8s/templates/core/backend-deployment.yaml
@@ -44,7 +44,7 @@
           imagePullPolicy: {{ .Values.pullPolicy }}
           env:
             - name: SP_PRIORITIZED_PROTOCOL
-              {{ if (eq .Values.deployment "lite") }}
+              {{ if (eq .Values.preferredBroker "mqtt") }}
               value: "mqtt"
               {{ else }}
               value: "kafka"
diff --git a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-deployment.yaml b/installer/k8s/templates/extensions/connect-adapters/connect-adapters-deployment.yaml
deleted file mode 100644
index 1dc5526..0000000
--- a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-deployment.yaml
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: connect-adapters
-spec:
-  selector:
-    matchLabels:
-      app: connect-adapters
-  replicas: 1
-  template:
-    metadata:
-      labels:
-        app: connect-adapters
-    spec:         
-      containers:
-        - name: connect-adapters
-          image: {{ .Values.streampipes.registry }}/connect-adapters:{{ .Values.streampipes.version }}
-          imagePullPolicy: {{ .Values.pullPolicy }}
-          ports:
-            - containerPort: 8090
diff --git a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-iiot-deployment.yaml b/installer/k8s/templates/extensions/connect-adapters/connect-adapters-iiot-deployment.yaml
deleted file mode 100644
index a67ff5e..0000000
--- a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-iiot-deployment.yaml
+++ /dev/null
@@ -1,35 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-apiVersion: apps/v1
-kind: Deployment
-metadata:
-  name: connect-adapters-iiot
-spec:
-  selector:
-    matchLabels:
-      app: connect-adapters-iiot
-  replicas: 1
-  template:
-    metadata:
-      labels:
-        app: connect-adapters-iiot
-    spec:         
-      containers:
-        - name: connect-adapters-iiot
-          image: {{ .Values.streampipes.registry }}/connect-adapters-iiot:{{ .Values.streampipes.version }}
-          imagePullPolicy: {{ .Values.pullPolicy }}
-          ports:
-            - containerPort: 8090
diff --git a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-iiot-service.yaml b/installer/k8s/templates/extensions/connect-adapters/connect-adapters-iiot-service.yaml
deleted file mode 100644
index d4ec559..0000000
--- a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-iiot-service.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-apiVersion: v1
-kind: Service
-metadata:
-  name: connect-adapters-iiot
-spec:
-  selector:
-    app: connect-adapters-iiot
-  ports:
-    - name: main
-      protocol: TCP
-      port: 8090
-      targetPort: 8002
diff --git a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-service.yaml b/installer/k8s/templates/extensions/connect-adapters/connect-adapters-service.yaml
deleted file mode 100644
index 213aef3..0000000
--- a/installer/k8s/templates/extensions/connect-adapters/connect-adapters-service.yaml
+++ /dev/null
@@ -1,27 +0,0 @@
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-apiVersion: v1
-kind: Service
-metadata:
-  name: connect-adapters
-spec:
-  selector:
-    app: connect-adapters
-  ports:
-    - name: main
-      protocol: TCP
-      port: 8090
-      targetPort: 8001
diff --git a/installer/k8s/templates/extensions/pipeline-elements/pipeline-elements-all-jvm-deployment.yaml b/installer/k8s/templates/extensions/pipeline-elements/extensions-all-jvm-deployment.yaml
similarity index 77%
rename from installer/k8s/templates/extensions/pipeline-elements/pipeline-elements-all-jvm-deployment.yaml
rename to installer/k8s/templates/extensions/pipeline-elements/extensions-all-jvm-deployment.yaml
index 6508e37..fb1cf7f 100644
--- a/installer/k8s/templates/extensions/pipeline-elements/pipeline-elements-all-jvm-deployment.yaml
+++ b/installer/k8s/templates/extensions/pipeline-elements/extensions-all-jvm-deployment.yaml
@@ -16,27 +16,27 @@
 apiVersion: apps/v1
 kind: Deployment
 metadata:
-  name: pipeline-elements-all-jvm
+  name: extensions-all-jvm
 spec:
   selector:
     matchLabels:
-      app: pipeline-elements-all-jvm
+      app: extensions-all-jvm
   replicas: 1
   template:
     metadata:
       labels:
-        app: pipeline-elements-all-jvm
+        app: extensions-all-jvm
     spec:
       volumes:
         - name: files-pv
           persistentVolumeClaim:
-            claimName: files-pvc          
+            claimName: files-pvc
       containers:
-        - name: pipeline-elements-all-jvm
-          image: {{ .Values.streampipes.registry }}/pipeline-elements-all-jvm:{{ .Values.streampipes.version }}
+        - name: extensions-all-jvm
+          image: {{ .Values.streampipes.registry }}/extensions-all-jvm:{{ .Values.streampipes.version }}
           imagePullPolicy: {{ .Values.pullPolicy }}
           ports:
             - containerPort: 8090
           volumeMounts:
             - mountPath: "/spImages"
-              name: files-pv          
+              name: files-pv
diff --git a/installer/k8s/templates/extensions/pipeline-elements/pipeline-elements-all-jvm-service.yaml b/installer/k8s/templates/extensions/pipeline-elements/extensions-all-jvm-service.yaml
similarity index 89%
rename from installer/k8s/templates/extensions/pipeline-elements/pipeline-elements-all-jvm-service.yaml
rename to installer/k8s/templates/extensions/pipeline-elements/extensions-all-jvm-service.yaml
index 1b1b01c..8a3a36a 100644
--- a/installer/k8s/templates/extensions/pipeline-elements/pipeline-elements-all-jvm-service.yaml
+++ b/installer/k8s/templates/extensions/pipeline-elements/extensions-all-jvm-service.yaml
@@ -16,14 +16,14 @@
 apiVersion: v1
 kind: Service
 metadata:
-  name: pipeline-elements-all-jvm
+  name: extensions-all-jvm
   labels:
-    name: pipeline-elements-all-jvm
+    name: extensions-all-jvm
 spec:
   selector:
-    app: pipeline-elements-all-jvm
+    app: extensions-all-jvm
   ports:
     - name: main
       protocol: TCP
       port: 8090
-      targetPort: 8090
\ No newline at end of file
+      targetPort: 8090
diff --git a/installer/k8s/templates/extensions/pipeline-elements/files-pvc.yaml b/installer/k8s/templates/extensions/pipeline-elements/files-pvc.yaml
index bd6a088..ee0fb64 100644
--- a/installer/k8s/templates/extensions/pipeline-elements/files-pvc.yaml
+++ b/installer/k8s/templates/extensions/pipeline-elements/files-pvc.yaml
@@ -31,7 +31,7 @@
 kind: PersistentVolumeClaim
 metadata:
   labels:
-    app: pipeline-elements-all-jvm
+    app: extensions-all-jvm
   name: files-pvc
 spec:
   storageClassName: local-storage-files
@@ -39,4 +39,4 @@
     - {{ .Values.persistentVolumeAccessModes }}
   resources:
     requests:
-      storage: 250Mi
\ No newline at end of file
+      storage: 250Mi
diff --git a/installer/k8s/templates/external/kafka/kafka-deployment.yaml b/installer/k8s/templates/external/kafka/kafka-deployment.yaml
index eb844d7..4d45116 100644
--- a/installer/k8s/templates/external/kafka/kafka-deployment.yaml
+++ b/installer/k8s/templates/external/kafka/kafka-deployment.yaml
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-{{- if (eq .Values.deployment "full") }}
 apiVersion: apps/v1
 kind: Deployment
 metadata:
@@ -66,4 +65,3 @@
               value: "5000012"
             - name: KAFKA_REPLICA_FETCH_MAX_BYTES
               value: "10000000"
-{{- end }}
\ No newline at end of file
diff --git a/installer/k8s/templates/external/kafka/kafka-pvc.yaml b/installer/k8s/templates/external/kafka/kafka-pvc.yaml
index 3bda2e2..894ae6c 100644
--- a/installer/k8s/templates/external/kafka/kafka-pvc.yaml
+++ b/installer/k8s/templates/external/kafka/kafka-pvc.yaml
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-{{- if (eq .Values.deployment "full") }}
 apiVersion: v1
 kind: PersistentVolume
 metadata:
@@ -41,4 +40,3 @@
   resources:
     requests:
       storage: 50M
-{{- end }}
\ No newline at end of file
diff --git a/installer/k8s/templates/external/kafka/kafka-service.yaml b/installer/k8s/templates/external/kafka/kafka-service.yaml
index 1b75c43..1e1c265 100644
--- a/installer/k8s/templates/external/kafka/kafka-service.yaml
+++ b/installer/k8s/templates/external/kafka/kafka-service.yaml
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-{{- if (eq .Values.deployment "full") }}
 apiVersion: v1
 kind: Service
 metadata:
@@ -27,4 +26,3 @@
     - name: main
       protocol: TCP
       port: 9092
-{{- end }}
\ No newline at end of file
diff --git a/installer/k8s/templates/external/zookeeper/zookeeper-deployment.yaml b/installer/k8s/templates/external/zookeeper/zookeeper-deployment.yaml
index 6debfc6..756ae4e 100644
--- a/installer/k8s/templates/external/zookeeper/zookeeper-deployment.yaml
+++ b/installer/k8s/templates/external/zookeeper/zookeeper-deployment.yaml
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-{{- if (eq .Values.deployment "full") }}
 apiVersion: apps/v1
 kind: Deployment
 metadata:
@@ -43,4 +42,3 @@
           volumeMounts:
             - mountPath: "/opt/zookeeper-{{ .Values.external.zookeeperVersion }}/data"
               name: zookeeper-pv
-{{- end }}
\ No newline at end of file
diff --git a/installer/k8s/templates/external/zookeeper/zookeeper-pvc.yaml b/installer/k8s/templates/external/zookeeper/zookeeper-pvc.yaml
index cbd9b45..741bc9b 100644
--- a/installer/k8s/templates/external/zookeeper/zookeeper-pvc.yaml
+++ b/installer/k8s/templates/external/zookeeper/zookeeper-pvc.yaml
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-{{- if (eq .Values.deployment "full") }}
 apiVersion: v1
 kind: PersistentVolume
 metadata:
@@ -41,4 +40,3 @@
   resources:
     requests:
       storage: 20M
-{{- end }}
\ No newline at end of file
diff --git a/installer/k8s/templates/external/zookeeper/zookeeper-service.yaml b/installer/k8s/templates/external/zookeeper/zookeeper-service.yaml
index cb10589..d65bdb9 100644
--- a/installer/k8s/templates/external/zookeeper/zookeeper-service.yaml
+++ b/installer/k8s/templates/external/zookeeper/zookeeper-service.yaml
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-{{- if (eq .Values.deployment "full") }}
 apiVersion: v1
 kind: Service
 metadata:
@@ -26,4 +25,3 @@
       protocol: TCP
       port: 2181
       targetPort: 2181
-{{- end }}
\ No newline at end of file
diff --git a/installer/k8s/values.yaml b/installer/k8s/values.yaml
index 0f662c7..cbbfd9b 100644
--- a/installer/k8s/values.yaml
+++ b/installer/k8s/values.yaml
@@ -15,12 +15,13 @@
 
 # lite or full (default: lite)
 deployment: lite
+preferredBroker: "kafka"
 pullPolicy: "IfNotPresent"
-persistentVolumeReclaimPolicy: "Retain"
+persistentVolumeReclaimPolicy: "Delete"
 persistentVolumeAccessModes: "ReadWriteOnce"
 
 streampipes:
-  version: "0.70.0-SNAPSHOT"
+  version: "0.71.0-SNAPSHOT"
   registry: "apachestreampipes"
 
 external:
diff --git a/installer/upgrade_versions.sh b/installer/upgrade_versions.sh
index 1c98145..fef8845 100755
--- a/installer/upgrade_versions.sh
+++ b/installer/upgrade_versions.sh
@@ -47,7 +47,7 @@
 
         for opt in "${options[@]}"
         do
-          sed -i 's/**Current version:** .*/**Current version:** '$NEW_VERSION'/g' ./$opt/README.md
+          sed -i 's/\*\*Current version:\*\* .*/\*\*Current version:\*\* '$NEW_VERSION'/g' ./$opt/README.md
         done
         ;;
   esac
diff --git a/pom.xml b/pom.xml
index e35c0f4..30e1d65 100644
--- a/pom.xml
+++ b/pom.xml
@@ -17,8 +17,7 @@
 ~
 -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-	xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 <modelVersion>4.0.0</modelVersion>
 
 <parent>
@@ -29,7 +28,7 @@
 
 <groupId>org.apache.streampipes</groupId>
 <artifactId>streampipes-parent</artifactId>
-<version>0.70.0-SNAPSHOT</version>
+<version>0.71.0-SNAPSHOT</version>
 <packaging>pom</packaging>
 
 <properties>
@@ -83,7 +82,7 @@
 	<jboss-logging-processor.version>2.2.1.Final</jboss-logging-processor.version>
 	<jersey.version>2.35</jersey.version>
 	<jetbrains-annotations.version>16.0.2</jetbrains-annotations.version>
-	<jetty.version>9.4.44.v20210927</jetty.version>
+	<jetty.version>10.0.10</jetty.version>
 	<jgrapht.version>1.3.1</jgrapht.version>
 	<jjwt.version>0.11.2</jjwt.version>
 	<json-path.version>3.1.0</json-path.version>
@@ -109,14 +108,15 @@
 	<slf4j.version>1.7.30</slf4j.version>
 	<snakeyaml.version>1.26</snakeyaml.version>
 	<snappy-java.version>1.1.7.7</snappy-java.version>
-	<spring.version>5.3.19</spring.version>
+	<spring.version>5.3.20</spring.version>
 	<spring-boot.version>2.6.7</spring-boot.version>
-	<spring-security.version>5.6.3</spring-security.version>
+	<spring-security.version>5.6.4</spring-security.version>
 	<swagger.version>2.1.12</swagger.version>
 	<type-parser.version>0.6.0</type-parser.version>
 	<underscore.version>1.47</underscore.version>
 	<wildfly-common.version>1.5.4.Final</wildfly-common.version>
 	<hawtbuf.version>1.11</hawtbuf.version>
+	<netty-tc-native.version>2.0.52.Final</netty-tc-native.version>
 
 	<!-- Test dependencies -->
 	<junit.version>4.13.2</junit.version>
@@ -222,7 +222,7 @@
 				<dependency>
 					<groupId>com.google.code.gson</groupId>
 					<artifactId>gson</artifactId>
-					<version>2.8.8</version>
+					<version>2.8.9</version>
 				</dependency>
 				<dependency>
 					<groupId>com.google.guava</groupId>
@@ -840,6 +840,11 @@
 					<artifactId>siddhi-query-compiler</artifactId>
 					<version>${siddhi.version}</version>
 				</dependency>
+				<dependency>
+					<groupId>io.netty</groupId>
+					<artifactId>netty-tcnative-classes</artifactId>
+					<version>${netty-tc-native.version}</version>
+				</dependency>
 
 				<!-- Test dependencies -->
 				<dependency>
@@ -956,7 +961,9 @@
 			<module>streampipes-mail</module>
 			<module>streampipes-resource-management</module>
 			<module>streampipes-sdk-bundle</module>
-		</modules>
+            <module>streampipes-data-explorer-commons</module>
+            <module>streampipes-data-export</module>
+        </modules>
 
 		<profiles>
 			<profile>
@@ -1153,7 +1160,7 @@
 						</goals>
 						<configuration>
 							<rules>
-								<dependencyConvergence/>
+								<dependencyConvergence />
 							</rules>
 						</configuration>
 					</execution>
@@ -1272,6 +1279,9 @@
 						<!-- Exclude some UI files which we need to check in more detail -->
 						<exclude>ui/src/assets/img/svg/**</exclude>
 
+						<!-- Exclude .angular folder -->
+						<exclude>ui/.angular/**</exclude>
+
 						<!-- Exclude disclaimer and notice files -->
 						<exclude>DISCLAIMER</exclude>
 						<exclude>NOTICE-binary</exclude>
@@ -1381,7 +1391,7 @@
 		<developerConnection>scm:git:ssh://git@github.com/apache/incubator-streampipes.git</developerConnection>
 		<connection>scm:git:ssh://git@github.com/apache/incubator-streampipes.git</connection>
 		<url>https://github.com/apache/incubator-streampipes</url>
-		<tag>HEAD</tag>
+		<tag>rel/0.70.0</tag>
 	</scm>
 
 	<issueManagement>
diff --git a/streampipes-backend/pom.xml b/streampipes-backend/pom.xml
index 30797b6..a0c5280 100644
--- a/streampipes-backend/pom.xml
+++ b/streampipes-backend/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
 
     <artifactId>streampipes-backend</artifactId>
@@ -33,7 +33,7 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-service-base</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
             <exclusions>
                 <exclusion>
                     <groupId>org.springframework.boot</groupId>
@@ -44,27 +44,27 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-config</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-messaging-kafka</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-rest</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-connect-container-master</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-platform-services</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <!-- External dependencies -->
         <dependency>
diff --git a/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesBackendApplication.java b/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesBackendApplication.java
index 6b0fe4f..260fc6f 100644
--- a/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesBackendApplication.java
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesBackendApplication.java
@@ -17,6 +17,7 @@
  */
 package org.apache.streampipes.backend;
 
+import org.apache.streampipes.backend.migrations.MigrationsHandler;
 import org.apache.streampipes.config.backend.BackendConfig;
 import org.apache.streampipes.manager.health.PipelineHealthCheck;
 import org.apache.streampipes.manager.operations.Operations;
@@ -27,6 +28,7 @@
 import org.apache.streampipes.service.base.BaseNetworkingConfig;
 import org.apache.streampipes.service.base.StreamPipesServiceBase;
 import org.apache.streampipes.storage.api.IPipelineStorage;
+import org.apache.streampipes.storage.couchdb.utils.CouchDbViewGenerator;
 import org.apache.streampipes.storage.management.StorageDispatcher;
 import org.apache.streampipes.svcdiscovery.api.model.DefaultSpServiceGroups;
 import org.apache.streampipes.svcdiscovery.api.model.DefaultSpServiceTags;
@@ -96,11 +98,14 @@
     this.executorService = Executors.newSingleThreadScheduledExecutor();
     this.healthCheckExecutorService = Executors.newSingleThreadScheduledExecutor();
 
+    new StreamPipesEnvChecker().updateEnvironmentVariables();
+    new CouchDbViewGenerator().createGenericDatabaseIfNotExists();
+
     if (!isConfigured()) {
       doInitialSetup();
     }
 
-    new StreamPipesEnvChecker().updateEnvironmentVariables();
+    new MigrationsHandler().performMigrations();
 
     executorService.schedule(this::startAllPreviouslyStoppedPipelines, 5, TimeUnit.SECONDS);
     LOG.info("Pipeline health check will run every {} seconds", HEALTH_CHECK_INTERVAL);
diff --git a/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesResourceConfig.java b/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesResourceConfig.java
index 80dd28b..96ccf69 100644
--- a/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesResourceConfig.java
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/StreamPipesResourceConfig.java
@@ -19,10 +19,7 @@
 package org.apache.streampipes.backend;
 
 import io.swagger.v3.jaxrs2.integration.resources.OpenApiResource;
-import org.apache.streampipes.ps.DataLakeImageResource;
-import org.apache.streampipes.ps.DataLakeResourceV3;
-import org.apache.streampipes.ps.DataLakeResourceV4;
-import org.apache.streampipes.ps.PipelineElementTemplateResource;
+import org.apache.streampipes.ps.*;
 import org.apache.streampipes.rest.impl.*;
 import org.apache.streampipes.rest.impl.admin.*;
 import org.apache.streampipes.rest.impl.connect.*;
@@ -30,7 +27,6 @@
 import org.apache.streampipes.rest.impl.dashboard.DashboardWidget;
 import org.apache.streampipes.rest.impl.dashboard.VisualizablePipelineResource;
 import org.apache.streampipes.rest.impl.datalake.DataLakeDashboardResource;
-import org.apache.streampipes.rest.impl.datalake.DataLakeMeasureResourceV3;
 import org.apache.streampipes.rest.impl.datalake.DataLakeWidgetResource;
 import org.apache.streampipes.rest.impl.datalake.PersistedDataStreamResource;
 import org.apache.streampipes.rest.impl.nouser.PipelineElementImportNoUser;
@@ -55,20 +51,25 @@
         register(AccountActivationResource.class);
         register(Authentication.class);
         register(AssetDashboardResource.class);
+        register(AssetManagementResource.class);
         register(AutoComplete.class);
         register(CategoryResource.class);
         register(ConsulConfig.class);
         register(ContainerProvidedOptions.class);
         register(DashboardWidget.class);
         register(Dashboard.class);
+        register(DataExportResource.class);
+        register(DataImportResource.class);
         register(DataLakeImageResource.class);
         register(DataLakeResourceV3.class);
         register(DataLakeMeasureResourceV3.class);
+        register(DataLakeMeasureResourceV4.class);
         register(DataStream.class);
         register(EmailConfigurationResource.class);
         register(EmailResource.class);
         register(ExtensionsServiceEndpointResource.class);
         register(GeneralConfigurationResource.class);
+        register(GenericStorageResource.class);
         register(LabelResource.class);
         register(MeasurementUnitResource.class);
         register(Notification.class);
@@ -102,7 +103,6 @@
         register(DataLakeDashboardResource.class);
         register(DataLakeWidgetResource.class);
         register(DataLakeResourceV3.class);
-        register(DataLakeMeasureResourceV3.class);
         register(PipelineElementFile.class);
         register(DashboardWidget.class);
         register(Dashboard.class);
diff --git a/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/AvailableMigrations.java b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/AvailableMigrations.java
new file mode 100644
index 0000000..5b26d79
--- /dev/null
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/AvailableMigrations.java
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.streampipes.backend.migrations;
+
+import org.apache.streampipes.backend.migrations.v070.CreateAssetLinkTypeMigration;
+import org.apache.streampipes.backend.migrations.v070.CreateDefaultAssetMigration;
+import org.apache.streampipes.backend.migrations.v070.CreateFileAssetTypeMigration;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class AvailableMigrations {
+
+  public List<Migration> getAvailableMigrations() {
+    return Arrays.asList(
+      new CreateAssetLinkTypeMigration(),
+      new CreateDefaultAssetMigration(),
+      new CreateFileAssetTypeMigration()
+    );
+  }
+}
diff --git a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/Migration.java
similarity index 78%
copy from ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
copy to streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/Migration.java
index 58ba04b..0cabbfa 100644
--- a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/Migration.java
@@ -16,3 +16,16 @@
  *
  */
 
+
+package org.apache.streampipes.backend.migrations;
+
+import java.io.IOException;
+
+public interface Migration {
+
+  boolean shouldExecute();
+
+  void executeMigration() throws IOException;
+
+  String getDescription();
+}
diff --git a/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/MigrationsHandler.java b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/MigrationsHandler.java
new file mode 100644
index 0000000..72ea592
--- /dev/null
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/MigrationsHandler.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.streampipes.backend.migrations;
+
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+public class MigrationsHandler {
+
+  private static final Logger LOG = LoggerFactory.getLogger(MigrationsHandler.class);
+
+  public void performMigrations() {
+    LOG.info("Checking for required migrations...");
+    var availableMigrations = new AvailableMigrations().getAvailableMigrations();
+
+    availableMigrations.forEach(migration -> {
+      if (migration.shouldExecute()) {
+        LOG.info("Performing migration: {}", migration.getDescription());
+        try {
+          migration.executeMigration();
+        } catch (IOException e) {
+          LOG.error("An error has occurred while executing migration '{}'", migration.getDescription(), e);
+        }
+      }
+    });
+
+    LOG.info("All migrations completed.");
+  }
+}
diff --git a/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateAssetLinkTypeMigration.java b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateAssetLinkTypeMigration.java
new file mode 100644
index 0000000..27463d9
--- /dev/null
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateAssetLinkTypeMigration.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.streampipes.backend.migrations.v070;
+
+import org.apache.streampipes.backend.migrations.Migration;
+import org.apache.streampipes.commons.constants.GenericDocTypes;
+import org.apache.streampipes.manager.setup.tasks.CreateAssetLinkTypeTask;
+import org.apache.streampipes.storage.management.StorageDispatcher;
+
+import java.io.IOException;
+
+public class CreateAssetLinkTypeMigration implements Migration {
+
+  @Override
+  public boolean shouldExecute() {
+    try {
+      return StorageDispatcher.INSTANCE.getNoSqlStore().getGenericStorage().findAll(GenericDocTypes.DOC_ASSET_LINK_TYPE).size() == 0;
+    } catch (IOException e) {
+      return true;
+    }
+  }
+
+  @Override
+  public void executeMigration() {
+    new CreateAssetLinkTypeTask().execute();
+  }
+
+  @Override
+  public String getDescription() {
+    return "Populating database with default asset links";
+  }
+}
diff --git a/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateDefaultAssetMigration.java b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateDefaultAssetMigration.java
new file mode 100644
index 0000000..7cf10d5
--- /dev/null
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateDefaultAssetMigration.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.streampipes.backend.migrations.v070;
+
+import org.apache.streampipes.backend.migrations.Migration;
+import org.apache.streampipes.commons.constants.GenericDocTypes;
+import org.apache.streampipes.manager.setup.tasks.CreateDefaultAssetTask;
+import org.apache.streampipes.storage.management.StorageDispatcher;
+
+import java.io.IOException;
+
+public class CreateDefaultAssetMigration implements Migration {
+
+  @Override
+  public boolean shouldExecute() {
+    try {
+      return StorageDispatcher.INSTANCE.getNoSqlStore().getGenericStorage().findOne(GenericDocTypes.DEFAULT_ASSET_DOC_ID) == null;
+    } catch (IOException e) {
+      return true;
+    }
+  }
+
+  @Override
+  public void executeMigration() {
+    new CreateDefaultAssetTask().execute();
+  }
+
+  @Override
+  public String getDescription() {
+    return "Creating a default asset representation";
+  }
+}
diff --git a/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateFileAssetTypeMigration.java b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateFileAssetTypeMigration.java
new file mode 100644
index 0000000..56d837b
--- /dev/null
+++ b/streampipes-backend/src/main/java/org/apache/streampipes/backend/migrations/v070/CreateFileAssetTypeMigration.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.backend.migrations.v070;
+
+import org.apache.streampipes.backend.migrations.Migration;
+import org.apache.streampipes.commons.constants.GenericDocTypes;
+import org.apache.streampipes.commons.random.UUIDGenerator;
+import org.apache.streampipes.model.assets.AssetLinkType;
+import org.apache.streampipes.storage.management.StorageDispatcher;
+
+import java.io.IOException;
+import java.util.List;
+
+public class CreateFileAssetTypeMigration implements Migration {
+
+  @Override
+  public boolean shouldExecute() {
+    try {
+      return StorageDispatcher
+        .INSTANCE
+        .getNoSqlStore()
+        .getGenericStorage()
+        .findAll(GenericDocTypes.DOC_ASSET_LINK_TYPE)
+        .stream()
+        .noneMatch(al -> al.get("linkType").equals("file"));
+    } catch (IOException e) {
+      return true;
+    }
+  }
+
+  @Override
+  public void executeMigration() throws IOException {
+    var fileAsset = new AssetLinkType("file", "File", "var(--color-file)", "draft", "file", List.of(), false);
+    fileAsset.setId(UUIDGenerator.generateUuid());
+    StorageDispatcher.INSTANCE.getNoSqlStore().getGenericStorage().create(fileAsset, AssetLinkType.class);
+
+  }
+
+  @Override
+  public String getDescription() {
+    return "Create asset type 'File'";
+  }
+}
diff --git a/streampipes-client/pom.xml b/streampipes-client/pom.xml
index be30324..b654f3b 100644
--- a/streampipes-client/pom.xml
+++ b/streampipes-client/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,42 +32,42 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat-json</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat-cbor</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat-fst</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-messaging-kafka</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-model</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-model-client</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-security-jwt</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-serializers-json</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- 3rd party Dependencies -->
diff --git a/streampipes-client/src/main/java/org/apache/streampipes/client/http/HttpRequest.java b/streampipes-client/src/main/java/org/apache/streampipes/client/http/HttpRequest.java
index 5c11455..cd5d178 100644
--- a/streampipes-client/src/main/java/org/apache/streampipes/client/http/HttpRequest.java
+++ b/streampipes-client/src/main/java/org/apache/streampipes/client/http/HttpRequest.java
@@ -74,11 +74,11 @@
     return headers.toArray(new Header[0]);
   }
 
-  protected String makeUrl() {
+  protected String makeUrl() throws SpRuntimeException {
     return makeUrl(true);
   }
 
-  protected String makeUrl(boolean includePath) {
+  protected String makeUrl(boolean includePath) throws SpRuntimeException {
     String baseUrl = clientConfig.getConnectionConfig().getBaseUrl();
     if (includePath) {
       baseUrl = baseUrl + "/" + apiPath.toString();
@@ -87,7 +87,7 @@
     return baseUrl;
   }
 
-  public DT executeRequest() throws SpRuntimeException {
+  public DT executeRequest() {
     Request request = makeRequest(serializer);
     try {
       HttpResponse response = request.execute().returnResponse();
@@ -101,10 +101,8 @@
           throw new SpRuntimeException(status.getStatusCode() + " - " + status.getReasonPhrase());
         }
       }
-    } catch (NoHttpResponseException e) {
-      throw new SpRuntimeException("Could not connect to the StreamPipes API - please check that StreamPipes is available at " + makeUrl(false));
-    } catch (IOException e) {
-      throw new SpRuntimeException(e.getMessage());
+    } catch (IOException | SpRuntimeException e) {
+      throw new SpRuntimeException("Could not connect to the StreamPipes API - please check that StreamPipes is available");
     }
   }
 
diff --git a/streampipes-commons/pom.xml b/streampipes-commons/pom.xml
index ab46b5f..8a9e80b 100644
--- a/streampipes-commons/pom.xml
+++ b/streampipes-commons/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
 
     <artifactId>streampipes-commons</artifactId>
diff --git a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss b/streampipes-commons/src/main/java/org/apache/streampipes/commons/constants/GenericDocTypes.java
similarity index 72%
copy from ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
copy to streampipes-commons/src/main/java/org/apache/streampipes/commons/constants/GenericDocTypes.java
index 58ba04b..813f06b 100644
--- a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
+++ b/streampipes-commons/src/main/java/org/apache/streampipes/commons/constants/GenericDocTypes.java
@@ -16,3 +16,13 @@
  *
  */
 
+package org.apache.streampipes.commons.constants;
+
+public class GenericDocTypes {
+
+  public static final String DOC_ASSET_MANGEMENT = "asset-management";
+  public static final String DOC_ASSET_LINK_TYPE = "asset-link-type";
+
+
+  public static final String DEFAULT_ASSET_DOC_ID = "default-asset";
+}
diff --git a/streampipes-commons/src/main/java/org/apache/streampipes/commons/exceptions/SpConfigurationException.java b/streampipes-commons/src/main/java/org/apache/streampipes/commons/exceptions/SpConfigurationException.java
new file mode 100644
index 0000000..321330b
--- /dev/null
+++ b/streampipes-commons/src/main/java/org/apache/streampipes/commons/exceptions/SpConfigurationException.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.commons.exceptions;
+
+public class SpConfigurationException extends Exception {
+
+  /**
+   * Creates a new Exception with the given message and null as the cause.
+   *
+   * @param message The exception message
+   */
+  public SpConfigurationException(String message) {
+    super(message);
+  }
+
+  /**
+   * Creates a new exception with a null message and the given cause.
+   *
+   * @param cause The exception that caused this exception
+   */
+  public SpConfigurationException(Throwable cause) {
+    super(cause);
+  }
+
+  /**
+   * Creates a new exception with the given message and cause
+   *
+   * @param message The exception message
+   * @param cause The exception that caused this exception
+   */
+  public SpConfigurationException(String message, Throwable cause) {
+    super(message, cause);
+  }
+}
diff --git a/streampipes-commons/src/main/java/org/apache/streampipes/commons/networking/Networking.java b/streampipes-commons/src/main/java/org/apache/streampipes/commons/networking/Networking.java
index 489a557..bb98349 100644
--- a/streampipes-commons/src/main/java/org/apache/streampipes/commons/networking/Networking.java
+++ b/streampipes-commons/src/main/java/org/apache/streampipes/commons/networking/Networking.java
@@ -21,13 +21,15 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
-import java.net.InetAddress;
-import java.net.UnknownHostException;
+import java.io.IOException;
+import java.net.*;
 
 public class Networking {
 
   private static final Logger LOG = LoggerFactory.getLogger(Networking.class);
 
+  private static final String DEFAULT_LOCALHOST_IP = "127.0.0.1";
+
   public static String getHostname() throws UnknownHostException {
     String selectedAddress;
     if (Envs.SP_HOST.exists()) {
@@ -35,12 +37,43 @@
       LOG.info("Using IP from provided environment variable {}: {}", Envs.SP_HOST, selectedAddress);
     } else {
       selectedAddress = InetAddress.getLocalHost().getHostAddress();
+
+      // this condition is only used as a workaround when the IP address was not derived correctly
+      // when this also does not work, you must set the environment variable SP_HOST manually
+      if (selectedAddress.equals(DEFAULT_LOCALHOST_IP)) {
+        selectedAddress = getIpAddressForOsx();
+      }
+
       LOG.info("Using auto-discovered IP: {}", selectedAddress);
     }
 
     return selectedAddress;
   }
 
+  /**
+   * this method is a workaround for developers using osx
+   * in OSX InetAddress.getLocalHost().getHostAddress() always returns 127.0.0.1
+   * as a workaround developers must manually set the SP_HOST environment variable with the actual ip
+   * with this method the IP is set automatically
+   *
+   * @return IP
+   */
+  private static String getIpAddressForOsx() {
+
+    Socket socket = new Socket();
+    String result = DEFAULT_LOCALHOST_IP;
+    try {
+      socket.connect(new InetSocketAddress("streampipes.apache.org", 80));
+      result = socket.getLocalAddress().getHostAddress();
+      socket.close();
+    } catch (IOException e) {
+      LOG.error(e.getMessage());
+      LOG.error("IP address was not set automatically. Use the environment variable SP_HOST to set it manually.");
+    }
+
+    return result;
+  }
+
   public static Integer getPort(Integer defaultPort) {
     Integer selectedPort;
     if (Envs.SP_PORT.exists()) {
diff --git a/streampipes-commons/src/main/java/org/apache/streampipes/commons/zip/ZipFileExtractor.java b/streampipes-commons/src/main/java/org/apache/streampipes/commons/zip/ZipFileExtractor.java
index 5ada0f4..12e5af6 100644
--- a/streampipes-commons/src/main/java/org/apache/streampipes/commons/zip/ZipFileExtractor.java
+++ b/streampipes-commons/src/main/java/org/apache/streampipes/commons/zip/ZipFileExtractor.java
@@ -17,10 +17,9 @@
  */
 package org.apache.streampipes.commons.zip;
 
-import java.io.File;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.io.InputStream;
+import java.io.*;
+import java.util.HashMap;
+import java.util.Map;
 import java.util.zip.ZipEntry;
 import java.util.zip.ZipInputStream;
 
@@ -32,6 +31,32 @@
     this.zipInputStream = zipInputStream;
   }
 
+  // TODO used by export feature - extend this to support binaries
+  public Map<String, byte[]> extractZipToMap() throws IOException {
+    byte[] buffer = new byte[1024];
+    Map<String, byte[]> entries = new HashMap<>();
+    ZipInputStream zis = new ZipInputStream(zipInputStream);
+    ZipEntry zipEntry = zis.getNextEntry();
+    while (zipEntry != null) {
+      ByteArrayOutputStream fos = new ByteArrayOutputStream();
+      int len;
+      while ((len = zis.read(buffer)) > 0) {
+        fos.write(buffer, 0, len);
+      }
+      entries.put(sanitizeName(zipEntry.getName()), fos.toByteArray());
+      fos.close();
+      zipEntry = zis.getNextEntry();
+    }
+    zis.closeEntry();
+    zis.close();
+
+    return entries;
+  }
+
+  private String sanitizeName(String name) {
+    return name.split("\\.")[0];
+  }
+
   public void extractZipToFile(String targetFolder) throws IOException {
     File destDir = new File(targetFolder);
     if (!destDir.exists()) {
diff --git a/streampipes-config/pom.xml b/streampipes-config/pom.xml
index 48c4a4f..5e953bb 100644
--- a/streampipes-config/pom.xml
+++ b/streampipes-config/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,17 +32,17 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-serializers-json</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-vocabulary</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-service-discovery</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-connect-api/pom.xml b/streampipes-connect-api/pom.xml
index 75f5c9b..a7bbbd9 100644
--- a/streampipes-connect-api/pom.xml
+++ b/streampipes-connect-api/pom.xml
@@ -17,13 +17,11 @@
   ~
   -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -33,7 +31,7 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-model</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
     </dependencies>
     <properties>
diff --git a/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/IParser.java b/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/IParser.java
index 482bf9f..017adb6 100644
--- a/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/IParser.java
+++ b/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/IParser.java
@@ -19,6 +19,7 @@
 
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.model.connect.grounding.FormatDescription;
+import org.apache.streampipes.model.connect.guess.AdapterGuessInfo;
 import org.apache.streampipes.model.schema.EventSchema;
 
 import java.io.InputStream;
@@ -39,4 +40,12 @@
    * @return
    */
   EventSchema getEventSchema(List<byte[]> oneEvent);
+
+  default boolean supportsPreview() {
+    return false;
+  }
+
+  default AdapterGuessInfo getSchemaAndSample(List<byte[]> eventSample) throws ParseException {
+    throw new RuntimeException("Not yet implemented!");
+  }
 }
diff --git a/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/exception/ParseException.java b/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/exception/ParseException.java
index 2064f29..37c3ed3 100644
--- a/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/exception/ParseException.java
+++ b/streampipes-connect-api/src/main/java/org/apache/streampipes/connect/api/exception/ParseException.java
@@ -19,10 +19,16 @@
 package org.apache.streampipes.connect.api.exception;
 
 public class ParseException extends RuntimeException {
+
     public ParseException() {}
 
     public ParseException(String message)
     {
         super(message);
     }
+
+    public ParseException(String message,
+                          Throwable throwable) {
+        super(message, throwable);
+    }
 }
diff --git a/streampipes-connect-container-master/pom.xml b/streampipes-connect-container-master/pom.xml
index 3d4207c..b1b9c26 100644
--- a/streampipes-connect-container-master/pom.xml
+++ b/streampipes-connect-container-master/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,32 +32,32 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-connect</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-measurement-units</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-pipeline-management</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-storage-couchdb</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-user-management</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-data-explorer</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/AdapterMasterManagement.java b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/AdapterMasterManagement.java
index fc955bb..dc32b88 100644
--- a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/AdapterMasterManagement.java
+++ b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/AdapterMasterManagement.java
@@ -189,7 +189,7 @@
 
         LOG.info("Started adapter " + elementId + " on: " + baseUrl);
       } catch (NoServiceEndpointsAvailableException | URISyntaxException e) {
-        e.printStackTrace();
+        throw new AdapterException("Could not start adapter due to unavailable service endpoint", e);
       }
     }
   }
diff --git a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/DescriptionManagement.java b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/DescriptionManagement.java
index 834c764..da40e49 100644
--- a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/DescriptionManagement.java
+++ b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/DescriptionManagement.java
@@ -18,6 +18,7 @@
 
 package org.apache.streampipes.connect.container.master.management;
 
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
 import org.apache.streampipes.connect.adapter.AdapterRegistry;
 import org.apache.streampipes.connect.api.IFormat;
 import org.apache.streampipes.connect.api.exception.AdapterException;
@@ -25,6 +26,7 @@
 import org.apache.streampipes.model.connect.grounding.FormatDescription;
 import org.apache.streampipes.storage.api.IAdapterStorage;
 import org.apache.streampipes.storage.couchdb.CouchDbStorageManager;
+import org.apache.streampipes.storage.management.StorageDispatcher;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -55,6 +57,15 @@
                 .findFirst();
     }
 
+    public void deleteAdapterDescription(String id) throws SpRuntimeException {
+        var adapterStorage = CouchDbStorageManager.INSTANCE.getAdapterDescriptionStorage();
+        var adapter = adapterStorage.getAdapter(id);
+        if (!isAdapterUsed(adapter)) {
+            adapterStorage.deleteAdapter(id);
+        } else {
+            throw new SpRuntimeException("This adapter is used by an existing instance and cannot be deleted");
+        }
+    }
     public String getAssets(String baseUrl) throws AdapterException {
         return WorkerRestClient.getAssets(baseUrl);
     }
@@ -67,4 +78,12 @@
         return WorkerRestClient.getDocumentationAsset(baseUrl);
     }
 
+    private boolean isAdapterUsed(AdapterDescription adapter) {
+        var allAdapters = StorageDispatcher.INSTANCE.getNoSqlStore().getAdapterInstanceStorage().getAllAdapters();
+
+        return allAdapters
+          .stream()
+          .anyMatch(runningAdapter -> runningAdapter.getAppId().equals(adapter.getAppId()));
+    }
+
 }
diff --git a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/GuessManagement.java b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/GuessManagement.java
index 0d867bb..de66c54 100644
--- a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/GuessManagement.java
+++ b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/GuessManagement.java
@@ -26,18 +26,21 @@
 import org.apache.http.entity.ContentType;
 import org.apache.http.util.EntityUtils;
 import org.apache.streampipes.commons.exceptions.NoServiceEndpointsAvailableException;
-import org.apache.streampipes.connect.api.exception.AdapterException;
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
+import org.apache.streampipes.connect.adapter.model.pipeline.AdapterEventPreviewPipeline;
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.connect.api.exception.WorkerAdapterException;
 import org.apache.streampipes.connect.container.master.util.WorkerPaths;
 import org.apache.streampipes.model.connect.adapter.AdapterDescription;
+import org.apache.streampipes.model.connect.guess.AdapterEventPreview;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
-import org.apache.streampipes.model.message.ErrorMessage;
+import org.apache.streampipes.model.connect.guess.GuessTypeInfo;
 import org.apache.streampipes.serializers.json.JacksonSerializer;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.util.Map;
 
 public class GuessManagement {
 
@@ -48,8 +51,7 @@
         this.workerUrlProvider = new WorkerUrlProvider();
     }
 
-    public GuessSchema guessSchema(AdapterDescription adapterDescription) throws AdapterException, ParseException, WorkerAdapterException {
-        try {
+    public GuessSchema guessSchema(AdapterDescription adapterDescription) throws ParseException, WorkerAdapterException, NoServiceEndpointsAvailableException, IOException {
             String workerUrl = workerUrlProvider.getWorkerBaseUrl(adapterDescription.getAppId());
 
             workerUrl = workerUrl + WorkerPaths.getGuessSchemaPath();
@@ -67,18 +69,14 @@
             String responseString = EntityUtils.toString(httpResponse.getEntity());
 
             if (httpResponse.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
-                return mapper.readValue(responseString, GuessSchema.class);
-            }  else {
-                    ErrorMessage errorMessage = mapper.readValue(responseString, ErrorMessage.class);
-
-                    LOG.error(errorMessage.getElementName());
-                    throw new WorkerAdapterException(errorMessage);
+              return mapper.readValue(responseString, GuessSchema.class);
+            } else {
+              var exception = mapper.readValue(responseString, SpConfigurationException.class);
+              throw new WorkerAdapterException(exception.getMessage(), exception.getCause());
             }
-
-        } catch (IOException | NoServiceEndpointsAvailableException e) {
-            LOG.error(e.getMessage());
-            throw new AdapterException("Error in connect worker: ", e);
-        }
     }
 
+  public Map<String, GuessTypeInfo> performAdapterEventPreview(AdapterEventPreview previewRequest) {
+      return new AdapterEventPreviewPipeline(previewRequest).makePreview();
+  }
 }
diff --git a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerAdministrationManagement.java b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerAdministrationManagement.java
index 9d3b5e3..1124929 100644
--- a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerAdministrationManagement.java
+++ b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerAdministrationManagement.java
@@ -49,7 +49,7 @@
             // only install once adapter description per service group
             boolean alreadyInstalled = alreadyRegisteredAdapters
                     .stream()
-                    .anyMatch(a -> a.getAppId().equals(adapterDescription.getAppId()) && a.getCorrespondingServiceGroup().equals(adapterDescription.getCorrespondingServiceGroup()));
+                    .anyMatch(a -> a.getAppId().equals(adapterDescription.getAppId()));
             if (!alreadyInstalled) {
                 this.adapterDescriptionStorage.storeAdapter(adapterDescription);
             }
diff --git a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerRestClient.java b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerRestClient.java
index 8c9f107..241b93f 100644
--- a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerRestClient.java
+++ b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/management/WorkerRestClient.java
@@ -18,8 +18,12 @@
 
 package org.apache.streampipes.connect.container.master.management;
 
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.commons.io.IOUtils;
+import org.apache.http.HttpResponse;
 import org.apache.http.client.fluent.Request;
 import org.apache.http.entity.ContentType;
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.connect.api.exception.AdapterException;
 import org.apache.streampipes.connect.container.master.util.WorkerPaths;
 import org.apache.streampipes.model.connect.adapter.AdapterDescription;
@@ -37,6 +41,7 @@
 import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
+import java.nio.charset.StandardCharsets;
 import java.util.List;
 
 /**
@@ -99,66 +104,75 @@
 
     public static void startAdapter(String url,
                                     AdapterDescription ad) throws AdapterException {
-        try {
-            logger.info("Trying to start adapter on endpoint: " + url);
-
-            String adapterDescription = JacksonSerializer.getObjectMapper().writeValueAsString(ad);
-
-            String responseString = Request.Post(url)
-                    .bodyString(adapterDescription, ContentType.APPLICATION_JSON)
-                    .connectTimeout(1000)
-                    .socketTimeout(100000)
-                    .execute().returnContent().asString();
-
-            logger.info("Adapter started on endpoint: " + url);
-
-        } catch (IOException e) {
-            logger.error("Adapter did not start", e);
-            throw new AdapterException("Adapter with URL: " + url + " did not start");
-        }
+        logger.info("Trying to start adapter on endpoint {} ", url);
+        triggerAdapterStateChange(ad, url, "started");
     }
 
 
     public static void stopAdapter(AdapterDescription ad,
                                    String url) throws AdapterException {
 
-        // Stop execution of adapter
-        try {
-            logger.info("Trying to stop adapter on endpoint: " + url);
+        logger.info("Trying to stop adapter on endpoint {} ", url);
+        triggerAdapterStateChange(ad, url, "stopped");
+    }
 
+    private static void triggerAdapterStateChange(AdapterDescription ad,
+                                            String url,
+                                            String action) throws AdapterException {
+        try {
             String adapterDescription = JacksonSerializer.getObjectMapper().writeValueAsString(ad);
 
-            // TODO change this to a delete request
-            String responseString = Request.Post(url)
-                    .bodyString(adapterDescription, ContentType.APPLICATION_JSON)
-                    .connectTimeout(1000)
-                    .socketTimeout(100000)
-                    .execute().returnContent().asString();
+            var response = triggerPost(url, adapterDescription);
+            var responseString = getResponseBody(response);
 
-            logger.info("Adapter stopped on endpoint: " + url + " with Response: " + responseString);
+            if (response.getStatusLine().getStatusCode() != 200) {
+                var exception = getSerializer().readValue(responseString, AdapterException.class);
+                throw new AdapterException(exception.getMessage(), exception.getCause());
+            }
+
+            logger.info("Adapter {} on endpoint: " + url + " with Response: " + responseString);
 
         } catch (IOException e) {
-            logger.error("Adapter was not stopped successfully", e);
-            throw new AdapterException("Adapter was not stopped successfully with url: " + url);
+            logger.error("Adapter was not {} successfully", action, e);
+            throw new AdapterException("Adapter was not " + action + " successfully with url " + url, e);
         }
+    }
 
+    private static String getResponseBody(HttpResponse response) throws IOException {
+        return IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
+    }
+
+    private static HttpResponse triggerPost(String url,
+                                            String payload) throws IOException {
+        return Request.Post(url)
+          .bodyString(payload, ContentType.APPLICATION_JSON)
+          .connectTimeout(1000)
+          .socketTimeout(100000)
+          .execute().returnResponse();
     }
 
     public static RuntimeOptionsResponse getConfiguration(String workerEndpoint,
                                                           String appId,
-                                                          RuntimeOptionsRequest runtimeOptionsRequest) throws AdapterException {
+                                                          RuntimeOptionsRequest runtimeOptionsRequest) throws AdapterException, SpConfigurationException {
         String url = workerEndpoint + WorkerPaths.getRuntimeResolvablePath(appId);
 
         try {
             String payload = JacksonSerializer.getObjectMapper().writeValueAsString(runtimeOptionsRequest);
-            String responseString = Request.Post(url)
+            var response = Request.Post(url)
                        .bodyString(payload, ContentType.APPLICATION_JSON)
                        .connectTimeout(1000)
                        .socketTimeout(100000)
-                       .execute().returnContent().asString();
+                       .execute()
+                        .returnResponse();
 
-            return JacksonSerializer.getObjectMapper().readValue(responseString, RuntimeOptionsResponse.class);
+            String responseString = IOUtils.toString(response.getEntity().getContent(), StandardCharsets.UTF_8);
 
+            if (response.getStatusLine().getStatusCode() == 200) {
+                return getSerializer().readValue(responseString, RuntimeOptionsResponse.class);
+            } else {
+                var exception = getSerializer().readValue(responseString, SpConfigurationException.class);
+                throw new SpConfigurationException(exception.getMessage(), exception.getCause());
+            }
         } catch (IOException e) {
             e.printStackTrace();
             throw new AdapterException("Could not resolve runtime configurations from " + url);
@@ -245,5 +259,9 @@
     private static IAdapterStorage getAdapterStorage() {
         return StorageDispatcher.INSTANCE.getNoSqlStore().getAdapterInstanceStorage();
     }
+
+    private static ObjectMapper getSerializer() {
+        return JacksonSerializer.getObjectMapper();
+    }
 }
 
diff --git a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/util/WorkerPaths.java b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/util/WorkerPaths.java
index f3af79b..1a48828 100644
--- a/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/util/WorkerPaths.java
+++ b/streampipes-connect-container-master/src/main/java/org/apache/streampipes/connect/container/master/util/WorkerPaths.java
@@ -60,8 +60,7 @@
     SpServiceUrlProvider serviceUrlProvider = SpServiceUrlProvider.ADAPTER;
     String endpointUrl = new ExtensionsServiceEndpointGenerator(appId, serviceUrlProvider).getEndpointResourceUrl();
     URI uri = new URI(endpointUrl);
-    String baseUrl = uri.getScheme() + "://" + uri.getAuthority();
-    return baseUrl;
+    return uri.getScheme() + "://" + uri.getAuthority();
   }
 
 
diff --git a/streampipes-connect-container-worker/pom.xml b/streampipes-connect-container-worker/pom.xml
index dc1e74b..09dd177 100644
--- a/streampipes-connect-container-worker/pom.xml
+++ b/streampipes-connect-container-worker/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,37 +32,37 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-config</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-connect</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-container</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-client</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-service-extensions-base</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-rest-shared</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-serializers-json</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <!-- External dependencies -->
 
diff --git a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagement.java b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagement.java
index 1c8d606..f250ac7 100644
--- a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagement.java
+++ b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagement.java
@@ -80,11 +80,9 @@
 
         IAdapter<?> adapter = RunningAdapterInstances.INSTANCE.removeAdapter(elementId);
 
-        if (adapter == null) {
-            throw new AdapterException("Adapter with id " + elementId + " was not found in this container and cannot be stopped.");
+        if (adapter != null) {
+            adapter.stopAdapter();
         }
-
-        adapter.stopAdapter();
     }
 
 }
diff --git a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/GuessManagement.java b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/GuessManagement.java
index 41f81de..362c63a 100644
--- a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/GuessManagement.java
+++ b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/management/GuessManagement.java
@@ -60,8 +60,7 @@
 
             throw new ParseException(errorClass + e.getMessage());
         } catch (Exception e) {
-            LOG.error("Unknown Error: " + e.toString());
-            throw new AdapterException(e.toString());
+            throw new AdapterException(e.getMessage(), e);
         }
 
         return guessSchema;
diff --git a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/AdapterWorkerResource.java b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/AdapterWorkerResource.java
index e423590..9aa69c8 100644
--- a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/AdapterWorkerResource.java
+++ b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/AdapterWorkerResource.java
@@ -20,6 +20,7 @@
 
 import org.apache.streampipes.connect.api.exception.AdapterException;
 import org.apache.streampipes.connect.container.worker.management.AdapterWorkerManagement;
+import org.apache.streampipes.model.StreamPipesErrorMessage;
 import org.apache.streampipes.model.connect.adapter.AdapterSetDescription;
 import org.apache.streampipes.model.connect.adapter.AdapterStreamDescription;
 import org.apache.streampipes.model.message.Notifications;
@@ -65,14 +66,13 @@
 
         try {
             adapterManagement.invokeStreamAdapter(adapterStreamDescription);
+            String responseMessage = "Stream adapter with id " + adapterStreamDescription.getUri() + " successfully started";
+            logger.info(responseMessage);
+            return ok(Notifications.success(responseMessage));
         } catch (AdapterException e) {
             logger.error("Error while starting adapter with id " + adapterStreamDescription.getUri(), e);
-            return ok(Notifications.error(e.getMessage()));
+            return serverError(StreamPipesErrorMessage.from(e));
         }
-        String responseMessage = "Stream adapter with id " + adapterStreamDescription.getUri() + " successfully started";
-
-        logger.info(responseMessage);
-        return ok(Notifications.success(responseMessage));
     }
 
     @POST
@@ -82,17 +82,20 @@
     @Produces(MediaType.APPLICATION_JSON)
     public Response stopStreamAdapter(AdapterStreamDescription adapterStreamDescription) {
 
+        String responseMessage;
         try {
-            adapterManagement.stopStreamAdapter(adapterStreamDescription);
+            if (adapterStreamDescription.isRunning()) {
+                adapterManagement.stopStreamAdapter(adapterStreamDescription);
+                responseMessage = "Stream adapter with id " + adapterStreamDescription.getElementId() + " successfully stopped";
+            } else {
+                responseMessage = "Stream adapter with id " + adapterStreamDescription.getElementId() + " seems not to be running";
+            }
+            logger.info(responseMessage);
+            return ok(Notifications.success(responseMessage));
         } catch (AdapterException e) {
-            logger.error("Error while stopping adapter with id " + adapterStreamDescription.getUri(), e);
-            return ok(Notifications.error(e.getMessage()));
+            logger.error("Error while stopping adapter with id " + adapterStreamDescription.getElementId(), e);
+            return serverError(StreamPipesErrorMessage.from(e));
         }
-
-        String responseMessage = "Stream adapter with id " + adapterStreamDescription.getUri() + " successfully stopped";
-
-        logger.info(responseMessage);
-        return ok(Notifications.success(responseMessage));
     }
 
     @POST
diff --git a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/GuessResource.java b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/GuessResource.java
index 39d8784..c357a54 100644
--- a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/GuessResource.java
+++ b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/GuessResource.java
@@ -18,11 +18,11 @@
 
 package org.apache.streampipes.connect.container.worker.rest;
 
+import org.apache.streampipes.connect.api.exception.AdapterException;
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.connect.container.worker.management.GuessManagement;
 import org.apache.streampipes.model.connect.adapter.AdapterDescription;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
-import org.apache.streampipes.model.message.Notifications;
 import org.apache.streampipes.rest.shared.annotation.JacksonSerialized;
 import org.apache.streampipes.rest.shared.impl.AbstractSharedRestInterface;
 import org.slf4j.Logger;
@@ -62,10 +62,10 @@
           return ok(result);
       } catch (ParseException e) {
           logger.error("Error while parsing events: ", e);
-          return serverError(Notifications.error(e.getMessage()));
-      } catch (Exception e) {
-          logger.error("Error while guess schema for AdapterDescription: " + adapterDescription.getElementId(), e);
-          return serverError(Notifications.error(e.getMessage()));
+          return serverError(e);
+      } catch (AdapterException e) {
+          logger.error("Error while guessing schema for AdapterDescription: {}, {}", adapterDescription.getElementId(), e.getMessage());
+          return serverError(e);
       }
 
   }
diff --git a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/RuntimeResolvableResource.java b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/RuntimeResolvableResource.java
index 92a81fd..72a2094 100644
--- a/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/RuntimeResolvableResource.java
+++ b/streampipes-connect-container-worker/src/main/java/org/apache/streampipes/connect/container/worker/rest/RuntimeResolvableResource.java
@@ -18,6 +18,8 @@
 
 package org.apache.streampipes.connect.container.worker.rest;
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
 import org.apache.streampipes.connect.api.Connector;
 import org.apache.streampipes.connect.container.worker.management.RuntimeResovable;
 import org.apache.streampipes.container.api.ResolvesContainerProvidedOptions;
@@ -47,15 +49,21 @@
         RuntimeOptionsResponse response;
         RuntimeResolvableRequestHandler handler = new RuntimeResolvableRequestHandler();
 
-        if (connector instanceof ResolvesContainerProvidedOptions) {
-            response = handler.handleRuntimeResponse((ResolvesContainerProvidedOptions) connector, runtimeOptionsRequest);
-        } else if (connector instanceof SupportsRuntimeConfig) {
-            response = handler.handleRuntimeResponse((SupportsRuntimeConfig) connector, runtimeOptionsRequest);
-        } else {
-            throw new WebApplicationException(javax.ws.rs.core.Response.Status.BAD_REQUEST);
+        try {
+            if (connector instanceof ResolvesContainerProvidedOptions) {
+                response = handler.handleRuntimeResponse((ResolvesContainerProvidedOptions) connector, runtimeOptionsRequest);
+                return ok(response);
+            } else if (connector instanceof SupportsRuntimeConfig) {
+                response = handler.handleRuntimeResponse((SupportsRuntimeConfig) connector, runtimeOptionsRequest);
+                return ok(response);
+            } else {
+                throw new SpRuntimeException("This element does not support dynamic options - is the pipeline element description up to date?");
+            }
+        } catch (SpConfigurationException e) {
+            return javax.ws.rs.core.Response
+              .status(400)
+              .entity(e)
+              .build();
         }
-
-        return ok(response);
     }
-
 }
diff --git a/streampipes-connect-container-worker/src/test/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagementTest.java b/streampipes-connect-container-worker/src/test/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagementTest.java
index 631f3f9..f16a2e7 100644
--- a/streampipes-connect-container-worker/src/test/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagementTest.java
+++ b/streampipes-connect-container-worker/src/test/java/org/apache/streampipes/connect/container/worker/management/AdapterWorkerManagementTest.java
@@ -42,22 +42,6 @@
 public class AdapterWorkerManagementTest {
 
     @Test
-    public void stopStreamAdapterFail() {
-        String expected = "Adapter with id http://test.de was not found in this container and cannot be stopped.";
-        AdapterStreamDescription asd = new GenericAdapterStreamDescription();
-        asd.setUri("http://test.de");
-
-        AdapterWorkerManagement adapterManagement = new AdapterWorkerManagement();
-
-        try {
-            adapterManagement.stopStreamAdapter(asd);
-            fail();
-        } catch (AdapterException e) {
-            assertEquals(expected, e.getMessage());
-        }
-    }
-
-    @Test
     public void stopStreamAdapterSuccess() throws AdapterException {
         TestAdapter testAdapter = getTestAdapterInstance();
         RunningAdapterInstances.INSTANCE.addAdapter("http://t.de/", testAdapter, null);
@@ -69,22 +53,6 @@
     }
 
     @Test
-    public void stopSetAdapterFail() {
-        String expected = "Adapter with id http://test.de was not found in this container and cannot be stopped.";
-        AdapterSetDescription asd = new GenericAdapterSetDescription();
-        asd.setUri("http://test.de");
-
-        AdapterWorkerManagement adapterManagement = new AdapterWorkerManagement();
-
-        try {
-            adapterManagement.stopSetAdapter(asd);
-            fail();
-        } catch (AdapterException e) {
-            assertEquals(expected, e.getMessage());
-        }
-    }
-
-    @Test
     public void stopSetAdapterSuccess() throws AdapterException {
         TestAdapter testAdapter = getTestAdapterInstance();
 
diff --git a/streampipes-connect/pom.xml b/streampipes-connect/pom.xml
index 2f36435..7517b49 100755
--- a/streampipes-connect/pom.xml
+++ b/streampipes-connect/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,72 +32,72 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-config</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-connect-api</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat-json</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat-smile</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat-cbor</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat-fst</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-measurement-units</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-messaging-kafka</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-messaging-jms</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-messaging-mqtt</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-model</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-sdk</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-rest-shared</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-serializers-json</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencis -->
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/Adapter.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/Adapter.java
index 8b6ee89..d5d495a 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/Adapter.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/Adapter.java
@@ -22,29 +22,17 @@
 import org.apache.streampipes.config.backend.BackendConfig;
 import org.apache.streampipes.config.backend.SpProtocol;
 import org.apache.streampipes.connect.adapter.model.pipeline.AdapterPipeline;
-import org.apache.streampipes.connect.adapter.preprocessing.transform.stream.DuplicateFilterPipelineElement;
-import org.apache.streampipes.connect.api.IAdapterPipelineElement;
-import org.apache.streampipes.connect.adapter.preprocessing.elements.*;
+import org.apache.streampipes.connect.adapter.preprocessing.elements.SendToJmsAdapterSink;
+import org.apache.streampipes.connect.adapter.preprocessing.elements.SendToKafkaAdapterSink;
+import org.apache.streampipes.connect.adapter.preprocessing.elements.SendToMqttAdapterSink;
 import org.apache.streampipes.connect.api.IAdapter;
 import org.apache.streampipes.model.connect.adapter.AdapterDescription;
-import org.apache.streampipes.model.connect.rules.TransformationRuleDescription;
-import org.apache.streampipes.model.connect.rules.stream.EventRateTransformationRuleDescription;
-import org.apache.streampipes.model.connect.rules.stream.RemoveDuplicatesTransformationRuleDescription;
-import org.apache.streampipes.model.connect.rules.value.AddTimestampRuleDescription;
-import org.apache.streampipes.model.connect.rules.value.AddValueTransformationRuleDescription;
-import org.apache.streampipes.model.connect.rules.value.CorrectionValueTransformationRuleDescription;
 import org.apache.streampipes.model.grounding.JmsTransportProtocol;
 import org.apache.streampipes.model.grounding.KafkaTransportProtocol;
 import org.apache.streampipes.model.grounding.MqttTransportProtocol;
 import org.apache.streampipes.model.grounding.TransportProtocol;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
 
 public abstract class Adapter<T extends AdapterDescription> implements IAdapter<T> {
-    Logger logger = LoggerFactory.getLogger(Adapter.class);
 
     private boolean debug;
 
@@ -100,97 +88,7 @@
     }
 
     private AdapterPipeline getAdapterPipeline(T adapterDescription) {
-
-        List<IAdapterPipelineElement> pipelineElements = new ArrayList<>();
-
-        // Must be before the schema transformations to ensure that user can move this event property
-        AddTimestampRuleDescription timestampTransformationRuleDescription = getTimestampRule(adapterDescription);
-        if (timestampTransformationRuleDescription != null) {
-            pipelineElements.add(new AddTimestampPipelineElement(
-                    timestampTransformationRuleDescription.getRuntimeKey()));
-        }
-
-        AddValueTransformationRuleDescription valueTransformationRuleDescription = getAddValueRule(adapterDescription);
-        if (valueTransformationRuleDescription != null) {
-            pipelineElements.add(new AddValuePipelineElement(
-                    valueTransformationRuleDescription.getRuntimeKey(),
-                    valueTransformationRuleDescription.getStaticValue()));
-        }
-
-
-        // first transform schema before transforming vales
-        // value rules should use unique keys for of new schema
-        pipelineElements.add(new TransformSchemaAdapterPipelineElement(adapterDescription.getSchemaRules()));
-        pipelineElements.add(new TransformValueAdapterPipelineElement(adapterDescription.getValueRules()));
-
-
-        RemoveDuplicatesTransformationRuleDescription duplicatesTransformationRuleDescription = getRemoveDuplicateRule(adapterDescription);
-        if (duplicatesTransformationRuleDescription != null) {
-            pipelineElements.add(new DuplicateFilterPipelineElement(duplicatesTransformationRuleDescription.getFilterTimeWindow()));
-        }
-
-        TransformStreamAdapterElement transformStreamAdapterElement = new TransformStreamAdapterElement();
-        EventRateTransformationRuleDescription eventRateTransformationRuleDescription = getEventRateTransformationRule(adapterDescription);
-        if (eventRateTransformationRuleDescription != null) {
-            transformStreamAdapterElement.addStreamTransformationRuleDescription(eventRateTransformationRuleDescription);
-        }
-        pipelineElements.add(transformStreamAdapterElement);
-
-        // Needed when adapter is (
-        if (adapterDescription.getEventGrounding() != null && adapterDescription.getEventGrounding().getTransportProtocol() != null
-                && adapterDescription.getEventGrounding().getTransportProtocol().getBrokerHostname() != null) {
-            return new AdapterPipeline(pipelineElements, getAdapterSink(adapterDescription));
-        }
-
-        return new AdapterPipeline(pipelineElements);
-    }
-
-    private SendToBrokerAdapterSink<?> getAdapterSink(AdapterDescription adapterDescription) {
-        SpProtocol prioritizedProtocol =
-                BackendConfig.INSTANCE.getMessagingSettings().getPrioritizedProtocols().get(0);
-
-        if (GroundingService.isPrioritized(prioritizedProtocol, JmsTransportProtocol.class)) {
-            return new SendToJmsAdapterSink(adapterDescription);
-        }
-        else if (GroundingService.isPrioritized(prioritizedProtocol, KafkaTransportProtocol.class)) {
-            return new SendToKafkaAdapterSink(adapterDescription);
-        }
-        else {
-            return new SendToMqttAdapterSink(adapterDescription);
-        }
-    }
-
-    private RemoveDuplicatesTransformationRuleDescription getRemoveDuplicateRule(T adapterDescription) {
-        return getRule(adapterDescription, RemoveDuplicatesTransformationRuleDescription.class);
-    }
-
-    private EventRateTransformationRuleDescription getEventRateTransformationRule(T adapterDescription) {
-        return getRule(adapterDescription, EventRateTransformationRuleDescription.class);
-    }
-
-    private AddTimestampRuleDescription getTimestampRule(T adapterDescription) {
-        return getRule(adapterDescription, AddTimestampRuleDescription.class);
-    }
-
-    private AddValueTransformationRuleDescription getAddValueRule(T adapterDescription) {
-        return getRule(adapterDescription, AddValueTransformationRuleDescription.class);
-    }
-
-    private CorrectionValueTransformationRuleDescription getCorrectionValueRule(T adapterDescription) {
-        return getRule(adapterDescription, CorrectionValueTransformationRuleDescription.class);
-    }
-
-    private <G extends TransformationRuleDescription> G getRule(T adapterDescription, Class<G> type) {
-
-        if (adapterDescription != null) {
-            for (TransformationRuleDescription tr : adapterDescription.getRules()) {
-                if (type.isInstance(tr)) {
-                    return type.cast(tr);
-                }
-            }
-        }
-
-        return null;
+        return new AdapterPipelineGenerator().generatePipeline(adapterDescription);
     }
 
     @Override
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/AdapterPipelineGenerator.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/AdapterPipelineGenerator.java
new file mode 100644
index 0000000..76ecd9f
--- /dev/null
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/AdapterPipelineGenerator.java
@@ -0,0 +1,150 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.connect.adapter;
+
+import org.apache.streampipes.config.backend.BackendConfig;
+import org.apache.streampipes.connect.adapter.model.pipeline.AdapterPipeline;
+import org.apache.streampipes.connect.adapter.preprocessing.elements.*;
+import org.apache.streampipes.connect.adapter.preprocessing.transform.stream.DuplicateFilterPipelineElement;
+import org.apache.streampipes.connect.api.IAdapterPipelineElement;
+import org.apache.streampipes.model.connect.adapter.AdapterDescription;
+import org.apache.streampipes.model.connect.rules.TransformationRuleDescription;
+import org.apache.streampipes.model.connect.rules.schema.SchemaTransformationRuleDescription;
+import org.apache.streampipes.model.connect.rules.stream.EventRateTransformationRuleDescription;
+import org.apache.streampipes.model.connect.rules.stream.RemoveDuplicatesTransformationRuleDescription;
+import org.apache.streampipes.model.connect.rules.value.*;
+import org.apache.streampipes.model.grounding.JmsTransportProtocol;
+import org.apache.streampipes.model.grounding.KafkaTransportProtocol;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class AdapterPipelineGenerator {
+
+  public AdapterPipeline generatePipeline(AdapterDescription adapterDescription) {
+
+    var pipelineElements = makeAdapterPipelineElements(adapterDescription.getRules());
+
+    var duplicatesTransformationRuleDescription = getRemoveDuplicateRule(adapterDescription.getRules());
+    if (duplicatesTransformationRuleDescription != null) {
+      pipelineElements.add(new DuplicateFilterPipelineElement(duplicatesTransformationRuleDescription.getFilterTimeWindow()));
+    }
+
+    var transformStreamAdapterElement = new TransformStreamAdapterElement();
+    var eventRateTransformationRuleDescription = getEventRateTransformationRule(adapterDescription.getRules());
+    if (eventRateTransformationRuleDescription != null) {
+      transformStreamAdapterElement.addStreamTransformationRuleDescription(eventRateTransformationRuleDescription);
+    }
+    pipelineElements.add(transformStreamAdapterElement);
+
+    // TODO decide what was meant with this comment
+    // Needed when adapter is (
+    if (adapterDescription.getEventGrounding() != null && adapterDescription.getEventGrounding().getTransportProtocol() != null
+      && adapterDescription.getEventGrounding().getTransportProtocol().getBrokerHostname() != null) {
+      return new AdapterPipeline(pipelineElements, getAdapterSink(adapterDescription));
+    }
+
+    return new AdapterPipeline(pipelineElements);
+  }
+
+  public List<IAdapterPipelineElement> makeAdapterPipelineElements(List<TransformationRuleDescription> rules) {
+    List<IAdapterPipelineElement> pipelineElements = new ArrayList<>();
+
+    // Must be before the schema transformations to ensure that user can move this event property
+    var timestampTransformationRuleDescription = getTimestampRule(rules);
+    if (timestampTransformationRuleDescription != null) {
+      pipelineElements.add(new AddTimestampPipelineElement(
+        timestampTransformationRuleDescription.getRuntimeKey()));
+    }
+
+    var valueTransformationRuleDescription = getAddValueRule(rules);
+    if (valueTransformationRuleDescription != null) {
+      pipelineElements.add(new AddValuePipelineElement(
+        valueTransformationRuleDescription.getRuntimeKey(),
+        valueTransformationRuleDescription.getStaticValue()));
+    }
+
+    // first transform schema before transforming vales
+    // value rules should use unique keys for of new schema
+    pipelineElements.add(new TransformSchemaAdapterPipelineElement(getSchemaRules(rules)));
+    pipelineElements.add(new TransformValueAdapterPipelineElement(getValueRules(rules)));
+
+    return pipelineElements;
+  }
+
+  private SendToBrokerAdapterSink<?> getAdapterSink(AdapterDescription adapterDescription) {
+    var prioritizedProtocol =
+      BackendConfig.INSTANCE.getMessagingSettings().getPrioritizedProtocols().get(0);
+
+    if (GroundingService.isPrioritized(prioritizedProtocol, JmsTransportProtocol.class)) {
+      return new SendToJmsAdapterSink(adapterDescription);
+    }
+    else if (GroundingService.isPrioritized(prioritizedProtocol, KafkaTransportProtocol.class)) {
+      return new SendToKafkaAdapterSink(adapterDescription);
+    }
+    else {
+      return new SendToMqttAdapterSink(adapterDescription);
+    }
+  }
+
+  private RemoveDuplicatesTransformationRuleDescription getRemoveDuplicateRule(List<TransformationRuleDescription> rules) {
+    return getRule(rules, RemoveDuplicatesTransformationRuleDescription.class);
+  }
+
+  private EventRateTransformationRuleDescription getEventRateTransformationRule(List<TransformationRuleDescription> rules) {
+    return getRule(rules, EventRateTransformationRuleDescription.class);
+  }
+
+  private AddTimestampRuleDescription getTimestampRule(List<TransformationRuleDescription> rules) {
+    return getRule(rules, AddTimestampRuleDescription.class);
+  }
+
+  private AddValueTransformationRuleDescription getAddValueRule(List<TransformationRuleDescription> rules) {
+    return getRule(rules, AddValueTransformationRuleDescription.class);
+  }
+
+  private <G extends TransformationRuleDescription> G getRule(List<TransformationRuleDescription> rules,
+                                                              Class<G> type) {
+
+    if (rules != null) {
+      for (TransformationRuleDescription tr : rules) {
+        if (type.isInstance(tr)) {
+          return type.cast(tr);
+        }
+      }
+    }
+
+    return null;
+  }
+
+  private List<TransformationRuleDescription> getValueRules(List<TransformationRuleDescription> rules) {
+    return rules
+      .stream()
+      .filter(r -> r instanceof ValueTransformationRuleDescription && !(r instanceof AddTimestampRuleDescription))
+      .collect(Collectors.toList());
+  }
+
+  private List<TransformationRuleDescription> getSchemaRules(List<TransformationRuleDescription> rules) {
+    return rules
+      .stream()
+      .filter(r -> r instanceof SchemaTransformationRuleDescription)
+      .collect(Collectors.toList());
+  }
+}
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/csv/CsvParser.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/csv/CsvParser.java
index 4f6b873..0b3e6be 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/csv/CsvParser.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/csv/CsvParser.java
@@ -19,20 +19,23 @@
 package org.apache.streampipes.connect.adapter.format.csv;
 
 
-import org.apache.streampipes.connect.api.EmitBinaryEvent;
 import org.apache.streampipes.connect.adapter.model.generic.Parser;
 import org.apache.streampipes.connect.adapter.sdk.ParameterExtractor;
+import org.apache.streampipes.connect.adapter.util.DatatypeUtils;
+import org.apache.streampipes.connect.api.EmitBinaryEvent;
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.model.connect.grounding.FormatDescription;
+import org.apache.streampipes.model.connect.guess.AdapterGuessInfo;
+import org.apache.streampipes.model.connect.guess.GuessTypeInfo;
 import org.apache.streampipes.model.schema.EventPropertyPrimitive;
 import org.apache.streampipes.model.schema.EventSchema;
-import org.apache.streampipes.vocabulary.XSD;
 
 import java.io.BufferedReader;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.InputStreamReader;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 
 
@@ -81,7 +84,13 @@
     }
 
     @Override
-    public EventSchema getEventSchema(List<byte[]> oneEvent) {
+    public boolean supportsPreview() {
+        return true;
+    }
+
+    @Override
+    public AdapterGuessInfo getSchemaAndSample(List<byte[]> oneEvent) {
+        var sample = new HashMap<String, GuessTypeInfo>();
         String[] keys;
         String[] data;
 
@@ -99,28 +108,20 @@
         EventSchema resultSchema = new EventSchema();
         for (int i = 0; i < keys.length; i++) {
             EventPropertyPrimitive p = new EventPropertyPrimitive();
+            var runtimeType = DatatypeUtils.getXsdDatatype(data[i], true);
+            var convertedValue = DatatypeUtils.convertValue(data[i], runtimeType);
             p.setRuntimeName(keys[i]);
-            p.setRuntimeType(getTypeString(data[i]));
+            p.setRuntimeType(runtimeType);
+            sample.put(keys[i], new GuessTypeInfo(DatatypeUtils.getCanonicalTypeClassName(data[i], true), convertedValue));
             resultSchema.addEventProperty(p);
         }
 
-        return resultSchema;
+        return new AdapterGuessInfo(resultSchema, sample);
     }
 
-    private String getTypeString(String o) {
-
-        try {
-            Double.parseDouble(o);
-            return XSD._float.toString();
-        } catch (NumberFormatException e) {
-
-        }
-
-        if (o.equalsIgnoreCase("true") || o.equalsIgnoreCase("false")) {
-            return XSD._boolean.toString();
-        }
-
-        return XSD._string.toString();
+    @Override
+    public EventSchema getEventSchema(List<byte[]> oneEvent) {
+        return getSchemaAndSample(oneEvent).getEventSchema();
     }
 
 
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/AbstractJsonFormat.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/AbstractJsonFormat.java
index a7116ae..1da9b16 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/AbstractJsonFormat.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/AbstractJsonFormat.java
@@ -21,7 +21,6 @@
 import org.apache.streampipes.connect.api.IFormat;
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.dataformat.json.JsonDataFormatDefinition;
-import org.apache.streampipes.model.schema.EventSchema;
 
 import java.util.Map;
 
@@ -30,8 +29,6 @@
 
   @Override
   public Map<String, Object> parse(byte[] object) throws ParseException {
-    EventSchema resultSchema = new EventSchema();
-
     JsonDataFormatDefinition jsonDefinition = new JsonDataFormatDefinition();
 
     Map<String, Object> result = null;
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/object/JsonObjectParser.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/object/JsonObjectParser.java
index 3419bc9..06c5cf5 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/object/JsonObjectParser.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/json/object/JsonObjectParser.java
@@ -20,24 +20,26 @@
 
 
 import com.fasterxml.jackson.databind.ObjectMapper;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
 import org.apache.streampipes.commons.exceptions.SpRuntimeException;
-import org.apache.streampipes.connect.api.EmitBinaryEvent;
-import org.apache.streampipes.connect.adapter.model.generic.Parser;
 import org.apache.streampipes.connect.adapter.format.util.JsonEventProperty;
+import org.apache.streampipes.connect.adapter.model.generic.Parser;
+import org.apache.streampipes.connect.api.EmitBinaryEvent;
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.dataformat.json.JsonDataFormatDefinition;
 import org.apache.streampipes.model.connect.grounding.FormatDescription;
+import org.apache.streampipes.model.connect.guess.AdapterGuessInfo;
+import org.apache.streampipes.model.connect.guess.GuessTypeInfo;
 import org.apache.streampipes.model.schema.EventProperty;
 import org.apache.streampipes.model.schema.EventSchema;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.io.IOException;
 import java.io.InputStream;
-import java.util.ArrayList;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.stream.Collectors;
 
 public class JsonObjectParser extends Parser {
 
@@ -75,9 +77,17 @@
 
     @Override
     public EventSchema getEventSchema(List<byte[]> oneEvent) {
-        EventSchema resultSchema = new EventSchema();
+        return getSchemaAndSample(oneEvent).getEventSchema();
+    }
 
-//        resultSchema.setEventProperties(Arrays.asList(EpProperties.timestampProperty("timestamp")));
+    @Override
+    public boolean supportsPreview() {
+        return true;
+    }
+
+    @Override
+    public AdapterGuessInfo getSchemaAndSample(List<byte[]> eventSample) throws ParseException {
+        EventSchema resultSchema = new EventSchema();
 
         JsonDataFormatDefinition jsonDefinition = new JsonDataFormatDefinition();
 
@@ -85,107 +95,22 @@
         Map<String, Object> exampleEvent = null;
 
         try {
-            exampleEvent = jsonDefinition.toMap(oneEvent.get(0));
-        } catch (SpRuntimeException e) {
-            e.printStackTrace();
-        }
+            exampleEvent = jsonDefinition.toMap(eventSample.get(0));
+            var sample = exampleEvent
+              .entrySet()
+              .stream()
+              .collect(Collectors.toMap(Map.Entry::getKey, e ->
+                new GuessTypeInfo(e.getValue().getClass().getCanonicalName(), e.getValue())));
 
-        for (Map.Entry<String, Object> entry : exampleEvent.entrySet())
-        {
-//            System.out.println(entry.getKey() + "/" + entry.getValue());
-            EventProperty p = JsonEventProperty.getEventProperty(entry.getKey(), entry.getValue());
+            for (Map.Entry<String, Object> entry : exampleEvent.entrySet()) {
+                EventProperty p = JsonEventProperty.getEventProperty(entry.getKey(), entry.getValue());
 
-            resultSchema.addEventProperty(p);
-
-        }
-
-        return resultSchema;
-    }
-
-    public Map<String, Object> parseObject(javax.json.stream.JsonParser jsonParser, boolean root, int start) {
-        // this variable is needed to skip the first object start
-        String mapKey = "";
-        Map<String, Object> result = new HashMap<>();
-        List<Object> arr = null;
-
-        while (jsonParser.hasNext()) {
-            javax.json.stream.JsonParser.Event event = jsonParser.next();
-            switch (event) {
-                case KEY_NAME:
-                    mapKey = jsonParser.getString();
-                    logger.debug("key: " + mapKey );
-                    break;
-                case START_OBJECT:
-                    if (start == 0) {
-                        Map<String, Object> ob = parseObject(jsonParser, false, 0);
-                        if (arr == null) {
-                            result.put(mapKey, ob);
-                        } else {
-                            arr.add(ob);
-                        }
-                    } else {
-                        start--;
-                    }
-                    logger.debug("start object");
-                    break;
-                case END_OBJECT:
-
-                    logger.debug("end object");
-                    return result;
-                case START_ARRAY:
-                    arr = new ArrayList<>();
-                    logger.debug("start array");
-                    break;
-                case END_ARRAY:
-                    // Check if just the end of array is entered
-                    if (result.keySet().size() == 0 && mapKey.equals("")) {
-                        return null;
-                    }
-                    result.put(mapKey, arr);
-                    arr = null;
-                    logger.debug("end array");
-                    break;
-                case VALUE_TRUE:
-                    if (arr == null) {
-                        result.put(mapKey, true);
-                    } else {
-                        arr.add(true);
-                    }
-                    logger.debug("value: true");
-                    break;
-                case VALUE_FALSE:
-                    if (arr == null) {
-                        result.put(mapKey, false);
-                    } else {
-                        arr.add(false);
-                    }
-                    logger.debug("value: false");
-                    break;
-                case VALUE_STRING:
-                    if (arr == null) {
-                        result.put(mapKey, jsonParser.getString());
-                    } else {
-                        arr.add(jsonParser.getString());
-                    }
-                    logger.debug("value string: " + jsonParser.getString());
-                    break;
-                case VALUE_NUMBER:
-                    if (arr == null) {
-                        result.put(mapKey, jsonParser.getBigDecimal());
-                    } else {
-                        arr.add(jsonParser.getBigDecimal());
-                    }
-                    logger.debug("value number: " + jsonParser.getBigDecimal());
-                    break;
-                case VALUE_NULL:
-                    logger.debug("value null");
-                    break;
-                default:
-                    logger.error("Error: " + event + " event is not handled in the JSON parser");
-                    break;
+                resultSchema.addEventProperty(p);
             }
-        }
 
-        return result;
+            return new AdapterGuessInfo(resultSchema, sample);
+        } catch (SpRuntimeException e) {
+            throw new ParseException("Could not serialize event, did you choose the correct format?", e);
+        }
     }
 }
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/util/JsonEventProperty.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/util/JsonEventProperty.java
index 6721638..a203190 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/util/JsonEventProperty.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/format/util/JsonEventProperty.java
@@ -55,7 +55,7 @@
             resultProperty = new EventPropertyPrimitive();
             resultProperty.setRuntimeName(key);
             ((EventPropertyPrimitive) resultProperty).setRuntimeType(XSD._string.toString());
-        } else if (o.getClass().equals(Integer.class) || o.getClass().equals(Double.class)|| o.getClass().equals(Long.class)) {
+        } else if (o.getClass().equals(Integer.class) || o.getClass().equals(Double.class) || o.getClass().equals(Float.class) || o.getClass().equals(Long.class)) {
             resultProperty = new EventPropertyPrimitive();
             resultProperty.setRuntimeName(key);
             ((EventPropertyPrimitive) resultProperty).setRuntimeType(XSD._float.toString());
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/guess/SchemaGuesser.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/guess/SchemaGuesser.java
index 53be8ab..29f534e 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/guess/SchemaGuesser.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/guess/SchemaGuesser.java
@@ -18,16 +18,24 @@
 
 package org.apache.streampipes.connect.adapter.guess;
 
+import org.apache.streampipes.model.connect.guess.AdapterGuessInfo;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
 import org.apache.streampipes.model.schema.EventSchema;
 
 public class SchemaGuesser {
 
-    public static GuessSchema guessSchma(EventSchema eventSchema) {
+    public static GuessSchema guessSchema(EventSchema eventSchema) {
         GuessSchema result = new GuessSchema();
 
         result.setEventSchema(eventSchema);
 
         return result;
     }
+
+    public static GuessSchema guessSchema(AdapterGuessInfo guessInfo) {
+        var result = guessSchema(guessInfo.getEventSchema());
+        result.setEventPreview(guessInfo.getEventPreview());
+
+        return result;
+    }
 }
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/model/pipeline/AdapterEventPreviewPipeline.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/model/pipeline/AdapterEventPreviewPipeline.java
new file mode 100644
index 0000000..3e36ad5
--- /dev/null
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/model/pipeline/AdapterEventPreviewPipeline.java
@@ -0,0 +1,81 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.streampipes.connect.adapter.model.pipeline;
+
+import org.apache.streampipes.connect.adapter.AdapterPipelineGenerator;
+import org.apache.streampipes.connect.api.IAdapterPipeline;
+import org.apache.streampipes.connect.api.IAdapterPipelineElement;
+import org.apache.streampipes.model.connect.guess.AdapterEventPreview;
+import org.apache.streampipes.model.connect.guess.GuessTypeInfo;
+
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+public class AdapterEventPreviewPipeline implements IAdapterPipeline {
+
+  private List<IAdapterPipelineElement> pipelineElements;
+  private Map<String, GuessTypeInfo> event;
+
+  public AdapterEventPreviewPipeline(AdapterEventPreview previewRequest) {
+    this.pipelineElements = new AdapterPipelineGenerator().makeAdapterPipelineElements(previewRequest.getRules());
+    this.event = previewRequest.getInputData();
+  }
+
+  @Override
+  public void process(Map<String, Object> event) {
+    for (IAdapterPipelineElement pe : this.pipelineElements) {
+      event = pe.process(event);
+    }
+  }
+
+  @Override
+  public List<IAdapterPipelineElement> getPipelineElements() {
+    return null;
+  }
+
+  @Override
+  public void setPipelineElements(List<IAdapterPipelineElement> pipelineElements) {
+
+  }
+
+  @Override
+  public void changePipelineSink(IAdapterPipelineElement pipelineSink) {
+
+  }
+
+  @Override
+  public IAdapterPipelineElement getPipelineSink() {
+    return null;
+  }
+
+  public Map<String, GuessTypeInfo> makePreview() {
+    Map<String, Object> ev = this.event
+      .entrySet()
+      .stream()
+      .collect(Collectors.toMap(Map.Entry::getKey, e -> e.getValue().getValue()));
+    this.process(ev);
+
+    return ev
+      .entrySet()
+      .stream()
+      .collect(Collectors.toMap(Map.Entry::getKey, e-> new GuessTypeInfo(e.getValue().getClass().getCanonicalName(), e.getValue())));
+  }
+}
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformSchemaAdapterPipelineElement.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformSchemaAdapterPipelineElement.java
index 735933f..e795af3 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformSchemaAdapterPipelineElement.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformSchemaAdapterPipelineElement.java
@@ -27,6 +27,7 @@
 import org.apache.streampipes.model.connect.rules.*;
 import org.apache.streampipes.model.connect.rules.schema.*;
 
+import javax.xml.crypto.dsig.Transform;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -36,7 +37,7 @@
     private SchemaEventTransformer eventTransformer;
     Logger logger = LoggerFactory.getLogger(TransformSchemaAdapterPipelineElement.class);
 
-    public TransformSchemaAdapterPipelineElement(List<SchemaTransformationRuleDescription> transformationRuleDescriptions) {
+    public TransformSchemaAdapterPipelineElement(List<? extends TransformationRuleDescription> transformationRuleDescriptions) {
         List<TransformationRule> rules = new ArrayList<>();
 
         // transforms description to actual rules
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformValueAdapterPipelineElement.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformValueAdapterPipelineElement.java
index 137966c..abb1bd2 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformValueAdapterPipelineElement.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/elements/TransformValueAdapterPipelineElement.java
@@ -18,16 +18,16 @@
 
 package org.apache.streampipes.connect.adapter.preprocessing.elements;
 
-import org.apache.streampipes.model.connect.rules.value.CorrectionValueTransformationRuleDescription;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-import org.apache.streampipes.connect.api.IAdapterPipelineElement;
 import org.apache.streampipes.connect.adapter.preprocessing.Util;
 import org.apache.streampipes.connect.adapter.preprocessing.transform.value.*;
+import org.apache.streampipes.connect.api.IAdapterPipelineElement;
 import org.apache.streampipes.model.connect.rules.TransformationRuleDescription;
+import org.apache.streampipes.model.connect.rules.value.ChangeDatatypeTransformationRuleDescription;
+import org.apache.streampipes.model.connect.rules.value.CorrectionValueTransformationRuleDescription;
 import org.apache.streampipes.model.connect.rules.value.TimestampTranfsformationRuleDescription;
 import org.apache.streampipes.model.connect.rules.value.UnitTransformRuleDescription;
-import org.apache.streampipes.model.connect.rules.value.ValueTransformationRuleDescription;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -35,20 +35,20 @@
 
 public class TransformValueAdapterPipelineElement implements IAdapterPipelineElement {
 
-    private ValueEventTransformer eventTransformer;
-    private Logger logger = LoggerFactory.getLogger(TransformValueAdapterPipelineElement.class);
+    private final ValueEventTransformer eventTransformer;
+    private final static Logger logger = LoggerFactory.getLogger(TransformValueAdapterPipelineElement.class);
 
-    public TransformValueAdapterPipelineElement(List<ValueTransformationRuleDescription> transformationRuleDescriptions) {
+    public TransformValueAdapterPipelineElement(List<? extends TransformationRuleDescription> transformationRuleDescriptions) {
         List<ValueTransformationRule> rules = new ArrayList<>();
 
         // transforms description to actual rules
         for (TransformationRuleDescription ruleDescription : transformationRuleDescriptions) {
             if (ruleDescription instanceof UnitTransformRuleDescription) {
-                UnitTransformRuleDescription tmp = (UnitTransformRuleDescription) ruleDescription;
+                var tmp = (UnitTransformRuleDescription) ruleDescription;
                 rules.add(new UnitTransformationRule(Util.toKeyArray(tmp.getRuntimeKey()),
                         tmp.getFromUnitRessourceURL(), tmp.getToUnitRessourceURL()));
-            } else if(ruleDescription instanceof TimestampTranfsformationRuleDescription) {
-                TimestampTranfsformationRuleDescription tmp = (TimestampTranfsformationRuleDescription) ruleDescription;
+            } else if (ruleDescription instanceof TimestampTranfsformationRuleDescription) {
+                var tmp = (TimestampTranfsformationRuleDescription) ruleDescription;
                 TimestampTranformationRuleMode mode = null;
                 switch (tmp.getMode()) {
                     case "formatString": mode = TimestampTranformationRuleMode.FORMAT_STRING;
@@ -58,9 +58,12 @@
                 rules.add(new TimestampTranformationRule(Util.toKeyArray(tmp.getRuntimeKey()), mode,
                         tmp.getFormatString(), tmp.getMultiplier()));
             }
-            else if(ruleDescription instanceof CorrectionValueTransformationRuleDescription) {
-                CorrectionValueTransformationRuleDescription tmp = (CorrectionValueTransformationRuleDescription) ruleDescription;
+            else if (ruleDescription instanceof CorrectionValueTransformationRuleDescription) {
+                var tmp = (CorrectionValueTransformationRuleDescription) ruleDescription;
                 rules.add(new CorrectionValueTransformationRule(Util.toKeyArray(tmp.getRuntimeKey()), tmp.getCorrectionValue(), tmp.getOperator()));
+            } else if (ruleDescription instanceof ChangeDatatypeTransformationRuleDescription) {
+                var tmp = (ChangeDatatypeTransformationRuleDescription) ruleDescription;
+                rules.add(new DatatypeTransformationRule(tmp.getRuntimeKey(), tmp.getOriginalDatatypeXsd(), tmp.getTargetDatatypeXsd()));
             }
 
             else {
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/transform/value/DatatypeTransformationRule.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/transform/value/DatatypeTransformationRule.java
new file mode 100644
index 0000000..d8c6d8a
--- /dev/null
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/transform/value/DatatypeTransformationRule.java
@@ -0,0 +1,52 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.connect.adapter.preprocessing.transform.value;
+
+import org.apache.streampipes.connect.adapter.util.DatatypeUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+
+public class DatatypeTransformationRule implements ValueTransformationRule {
+
+  private static final Logger LOG = LoggerFactory.getLogger(DatatypeTransformationRule.class);
+
+  private String eventKey;
+  private String originalDatatypeXsd;
+  private String targetDatatypeXsd;
+
+  public DatatypeTransformationRule(String eventKey, String originalDatatypeXsd, String targetDatatypeXsd) {
+    this.eventKey = eventKey;
+    this.originalDatatypeXsd = originalDatatypeXsd;
+    this.targetDatatypeXsd = targetDatatypeXsd;
+  }
+
+  @Override
+  public Map<String, Object> transform(Map<String, Object> event) {
+    Object value = event.get(eventKey);
+    Object transformedValue = transformDatatype(value);
+    event.put(eventKey, transformedValue);
+    return event;
+  }
+
+  public Object transformDatatype(Object value) {
+    return DatatypeUtils.convertValue(value, targetDatatypeXsd);
+  }
+}
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/transform/value/ValueEventTransformer.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/transform/value/ValueEventTransformer.java
index 939cbad..c965c89 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/transform/value/ValueEventTransformer.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/preprocessing/transform/value/ValueEventTransformer.java
@@ -26,14 +26,16 @@
 
 public class ValueEventTransformer implements ValueTransformationRule {
 
-    private List<UnitTransformationRule> unitTransformationRules;
-    private List<TimestampTranformationRule> timestampTransformationRules;
-    private List<CorrectionValueTransformationRule> correctionValueTransformationRules;
+    private final List<UnitTransformationRule> unitTransformationRules;
+    private final List<TimestampTranformationRule> timestampTransformationRules;
+    private final List<CorrectionValueTransformationRule> correctionValueTransformationRules;
+    private final List<DatatypeTransformationRule> datatypeTransformationRules;
 
     public ValueEventTransformer(List<ValueTransformationRule> rules) {
         this.unitTransformationRules = new ArrayList<>();
         this.timestampTransformationRules = new ArrayList<>();
         this.correctionValueTransformationRules = new ArrayList<>();
+        this.datatypeTransformationRules = new ArrayList<>();
 
         for (TransformationRule rule : rules) {
             if (rule instanceof UnitTransformationRule) {
@@ -42,16 +44,12 @@
                 this.timestampTransformationRules.add((TimestampTranformationRule) rule);
             } else if (rule instanceof CorrectionValueTransformationRule) {
                 this.correctionValueTransformationRules.add((CorrectionValueTransformationRule) rule);
+            } else if (rule instanceof DatatypeTransformationRule) {
+                this.datatypeTransformationRules.add((DatatypeTransformationRule) rule);
             }
         }
     }
 
-/*
-    public ValueEventTransformer(List<UnitTransformationRule> unitTransformationRule) {
-        this.unitTransformationRules = new ArrayList<>();
-    }
-*/
-
     @Override
     public Map<String, Object> transform(Map<String, Object> event) {
 
@@ -63,36 +61,14 @@
             event = rule.transform(event);
         }
 
+        for (var rule: datatypeTransformationRules) {
+            event = rule.transform(event);
+        }
+
         for (CorrectionValueTransformationRule rule : correctionValueTransformationRules) {
             event = rule.transform(event);
         }
 
-
         return event;
     }
-
-
-    public List<UnitTransformationRule> getUnitTransformationRules() {
-        return unitTransformationRules;
-    }
-
-    public void setUnitTransformationRules(List<UnitTransformationRule> unitTransformationRules) {
-        this.unitTransformationRules = unitTransformationRules;
-    }
-
-    public List<TimestampTranformationRule> getTimestampTransformationRules() {
-        return timestampTransformationRules;
-    }
-
-    public void setTimestampTransformationRules(List<TimestampTranformationRule> timestampTransformationRules) {
-        this.timestampTransformationRules = timestampTransformationRules;
-    }
-
-    public List<CorrectionValueTransformationRule> getCorrectionValueTransformationRules() {
-        return correctionValueTransformationRules;
-    }
-
-    public void setCorrectionValueTransformationRules(List<CorrectionValueTransformationRule> correctionValueTransformationRules) {
-        this.correctionValueTransformationRules = correctionValueTransformationRules;
-    }
 }
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/util/DatatypeUtils.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/util/DatatypeUtils.java
new file mode 100644
index 0000000..f87bab3
--- /dev/null
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/util/DatatypeUtils.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.connect.adapter.util;
+
+import org.apache.commons.lang3.math.NumberUtils;
+import org.apache.streampipes.vocabulary.XSD;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+public class DatatypeUtils {
+
+  private static final Logger LOG = LoggerFactory.getLogger(DatatypeUtils.class);
+
+  public static Object convertValue(Object value,
+                                    String targetDatatypeXsd) {
+    var stringValue = String.valueOf(value);
+    if (XSD._string.toString().equals(targetDatatypeXsd)) {
+      return stringValue;
+    } else {
+      try {
+        if (XSD._double.toString().equals(targetDatatypeXsd)) {
+          return Double.parseDouble(stringValue);
+        } else if (XSD._float.toString().equals(targetDatatypeXsd)) {
+          return Float.parseFloat(stringValue);
+        } else if (XSD._boolean.toString().equals(targetDatatypeXsd)) {
+          return Boolean.parseBoolean(stringValue);
+        } else if (XSD._integer.toString().equals(targetDatatypeXsd)) {
+          var floatingNumber = Float.parseFloat(stringValue);
+          return Integer.parseInt(String.valueOf(Math.round(floatingNumber)));
+        } else if (XSD._long.toString().equals(targetDatatypeXsd)) {
+          var floatingNumber = Double.parseDouble(stringValue);
+          return Long.parseLong(String.valueOf(Math.round(floatingNumber)));
+        }
+      } catch (NumberFormatException e) {
+        LOG.error("Number format exception {}", value);
+        return value;
+      }
+    }
+
+    return value;
+  }
+
+  public static String getCanonicalTypeClassName(String value,
+                                                 boolean preferFloat) {
+    return getTypeClass(value, preferFloat).getCanonicalName();
+  }
+
+  public static String getXsdDatatype(String value,
+                                      boolean preferFloat) {
+    var clazz = getTypeClass(value, preferFloat);
+    if (clazz.equals(Integer.class)) {
+      return XSD._integer.toString();
+    } else if (clazz.equals(Long.class)) {
+      return XSD._long.toString();
+    } else if (clazz.equals(Float.class)) {
+      return XSD._float.toString();
+    } else if (clazz.equals(Double.class)) {
+      return XSD._double.toString();
+    } else if (clazz.equals(Boolean.class)) {
+      return XSD._boolean.toString();
+    } else {
+      return XSD._string.toString();
+    }
+  }
+
+  public static Class<?> getTypeClass(String value,
+                                      boolean preferFloat) {
+    if (NumberUtils.isParsable(value)) {
+      try {
+        Integer.parseInt(value);
+        return preferFloat ? Float.class : Integer.class;
+      } catch (NumberFormatException ignored) {
+      }
+
+      try {
+        Long.parseLong(value);
+        return preferFloat ? Float.class : Long.class;
+      } catch (NumberFormatException ignored) {
+      }
+
+      try {
+        Double.parseDouble(value);
+        return Float.class;
+      } catch (NumberFormatException ignored) {
+      }
+
+    }
+
+    if (value.equalsIgnoreCase("true") || value.equalsIgnoreCase("false")) {
+      return Boolean.class;
+    }
+
+    return String.class;
+  }
+
+  public static void main(String[] args) {
+    long max = Long.MAX_VALUE;
+    String className = getCanonicalTypeClassName(String.valueOf(max), true);
+    System.out.println(className);
+    System.out.println(convertValue(max, className));
+  }
+}
diff --git a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/util/PollingSettings.java b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/util/PollingSettings.java
index 79dea5b..177702c 100644
--- a/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/util/PollingSettings.java
+++ b/streampipes-connect/src/main/java/org/apache/streampipes/connect/adapter/util/PollingSettings.java
@@ -15,6 +15,7 @@
  * limitations under the License.
  *
  */
+
 package org.apache.streampipes.connect.adapter.util;
 
 import java.util.concurrent.TimeUnit;
diff --git a/streampipes-container-extensions/pom.xml b/streampipes-container-extensions/pom.xml
index 0166eb2..55c0192 100644
--- a/streampipes-container-extensions/pom.xml
+++ b/streampipes-container-extensions/pom.xml
@@ -17,13 +17,11 @@
   -->
 
 
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -34,17 +32,17 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-connect-container-worker</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-container-standalone</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-service-extensions-base</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-container-standalone/pom.xml b/streampipes-container-standalone/pom.xml
index b3ac21d..8bac498 100644
--- a/streampipes-container-standalone/pom.xml
+++ b/streampipes-container-standalone/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
        <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,12 +32,12 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-container</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-service-extensions-base</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-container-standalone/src/main/java/org/apache/streampipes/container/standalone/init/StandaloneModelSubmitter.java b/streampipes-container-standalone/src/main/java/org/apache/streampipes/container/standalone/init/StandaloneModelSubmitter.java
index a23482c..037a6fb 100644
--- a/streampipes-container-standalone/src/main/java/org/apache/streampipes/container/standalone/init/StandaloneModelSubmitter.java
+++ b/streampipes-container-standalone/src/main/java/org/apache/streampipes/container/standalone/init/StandaloneModelSubmitter.java
@@ -24,6 +24,7 @@
 import org.apache.streampipes.container.model.PeConfig;
 import org.apache.streampipes.container.model.SpServiceDefinition;
 import org.apache.streampipes.service.extensions.base.StreamPipesExtensionsServiceBase;
+import org.apache.streampipes.service.extensions.base.WebSecurityConfig;
 import org.apache.streampipes.svcdiscovery.api.model.SpServiceTag;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -37,7 +38,7 @@
 @Deprecated
 @Configuration
 @EnableAutoConfiguration
-@Import({ PipelineElementContainerResourceConfig.class })
+@Import({ PipelineElementContainerResourceConfig.class, WebSecurityConfig.class})
 public abstract class StandaloneModelSubmitter extends StreamPipesExtensionsServiceBase {
 
     private static final Logger LOG =
diff --git a/streampipes-container/pom.xml b/streampipes-container/pom.xml
index 5bc38c9..53bd31d 100644
--- a/streampipes-container/pom.xml
+++ b/streampipes-container/pom.xml
@@ -21,7 +21,7 @@
 	<parent>
 		<groupId>org.apache.streampipes</groupId>
 		<artifactId>streampipes-parent</artifactId>
-		<version>0.70.0-SNAPSHOT</version>
+		<version>0.71.0-SNAPSHOT</version>
 	</parent>
 
 	<artifactId>streampipes-container</artifactId>
@@ -33,44 +33,38 @@
 		<dependency>
 			<groupId>org.apache.streampipes</groupId>
 			<artifactId>streampipes-connect-api</artifactId>
-			<version>0.70.0-SNAPSHOT</version>
+			<version>0.71.0-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.streampipes</groupId>
 			<artifactId>streampipes-dataformat</artifactId>
-			<version>0.70.0-SNAPSHOT</version>
+			<version>0.71.0-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.streampipes</groupId>
 			<artifactId>streampipes-messaging</artifactId>
-			<version>0.70.0-SNAPSHOT</version>
+			<version>0.71.0-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.streampipes</groupId>
 			<artifactId>streampipes-sdk</artifactId>
-			<version>0.70.0-SNAPSHOT</version>
+			<version>0.71.0-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.streampipes</groupId>
 			<artifactId>streampipes-serializers-json</artifactId>
-			<version>0.70.0-SNAPSHOT</version>
+			<version>0.71.0-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.streampipes</groupId>
 			<artifactId>streampipes-service-discovery</artifactId>
-			<version>0.70.0-SNAPSHOT</version>
+			<version>0.71.0-SNAPSHOT</version>
 		</dependency>
 		<dependency>
 			<groupId>org.apache.streampipes</groupId>
 			<artifactId>streampipes-rest-shared</artifactId>
-			<version>0.70.0-SNAPSHOT</version>
+			<version>0.71.0-SNAPSHOT</version>
 		</dependency>
-<!--		<dependency>-->
-<!--			<groupId>org.apache.streampipes</groupId>-->
-<!--			<artifactId>streampipes-connect-container-worker</artifactId>-->
-<!--			<version>0.68.0-SNAPSHOT</version>-->
-<!--		</dependency>-->
-
 
 		<!-- External dependencies -->
 		<dependency>
diff --git a/streampipes-container/src/main/java/org/apache/streampipes/container/api/InvocablePipelineElementResource.java b/streampipes-container/src/main/java/org/apache/streampipes/container/api/InvocablePipelineElementResource.java
index 9936bd2..780b704 100644
--- a/streampipes-container/src/main/java/org/apache/streampipes/container/api/InvocablePipelineElementResource.java
+++ b/streampipes-container/src/main/java/org/apache/streampipes/container/api/InvocablePipelineElementResource.java
@@ -19,6 +19,7 @@
 package org.apache.streampipes.container.api;
 
 import org.apache.streampipes.commons.constants.Envs;
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.commons.exceptions.SpRuntimeException;
 import org.apache.streampipes.container.declarer.Declarer;
 import org.apache.streampipes.container.declarer.InvocableDeclarer;
@@ -106,14 +107,21 @@
     D declarer = getDeclarerById(elementId);
     RuntimeOptionsResponse responseOptions;
 
-    if (declarer instanceof ResolvesContainerProvidedOptions) {
-      responseOptions = new RuntimeResolvableRequestHandler().handleRuntimeResponse((ResolvesContainerProvidedOptions) declarer, req);
-      return ok(responseOptions);
-    } else if (declarer instanceof SupportsRuntimeConfig) {
-      responseOptions = new RuntimeResolvableRequestHandler().handleRuntimeResponse((SupportsRuntimeConfig) declarer, req);
-      return ok(responseOptions);
-    } else {
-      throw new WebApplicationException(javax.ws.rs.core.Response.Status.BAD_REQUEST);
+    try {
+      if (declarer instanceof ResolvesContainerProvidedOptions) {
+        responseOptions = new RuntimeResolvableRequestHandler().handleRuntimeResponse((ResolvesContainerProvidedOptions) declarer, req);
+        return ok(responseOptions);
+      } else if (declarer instanceof SupportsRuntimeConfig) {
+          responseOptions = new RuntimeResolvableRequestHandler().handleRuntimeResponse((SupportsRuntimeConfig) declarer, req);
+          return ok(responseOptions);
+      } else {
+        return javax.ws.rs.core.Response.status(500).build();
+      }
+    } catch (SpConfigurationException e) {
+      return javax.ws.rs.core.Response
+        .status(400)
+        .entity(e)
+        .build();
     }
   }
 
@@ -131,8 +139,7 @@
                               (elementId);
       return ok(resolvesOutput.resolveOutputStrategy
               (runtimeOptionsRequest, getExtractor(runtimeOptionsRequest)));
-    } catch (SpRuntimeException e) {
-      e.printStackTrace();
+    } catch (SpRuntimeException | SpConfigurationException e) {
       return ok(new Response(elementId, false));
     }
   }
diff --git a/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOptions.java b/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOptions.java
index a32a1de..110d95e 100644
--- a/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOptions.java
+++ b/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOptions.java
@@ -17,6 +17,7 @@
  */
 package org.apache.streampipes.container.api;
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.model.staticproperty.Option;
 import org.apache.streampipes.sdk.extractor.StaticPropertyExtractor;
 
@@ -29,5 +30,5 @@
 public interface ResolvesContainerProvidedOptions {
 
   List<Option> resolveOptions(String staticPropertyInternalName,
-                              StaticPropertyExtractor parameterExtractor);
+                              StaticPropertyExtractor parameterExtractor) throws SpConfigurationException;
 }
diff --git a/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOutputStrategy.java b/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOutputStrategy.java
index 83ed622..ab44539 100644
--- a/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOutputStrategy.java
+++ b/streampipes-container/src/main/java/org/apache/streampipes/container/api/ResolvesContainerProvidedOutputStrategy.java
@@ -17,7 +17,7 @@
  */
 package org.apache.streampipes.container.api;
 
-import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.model.base.InvocableStreamPipesEntity;
 import org.apache.streampipes.model.schema.EventSchema;
 import org.apache.streampipes.sdk.extractor.AbstractParameterExtractor;
@@ -25,5 +25,5 @@
 public interface ResolvesContainerProvidedOutputStrategy<T extends InvocableStreamPipesEntity, P
         extends AbstractParameterExtractor<T>> {
 
-  EventSchema resolveOutputStrategy(T processingElement, P parameterExtractor) throws SpRuntimeException;
+  EventSchema resolveOutputStrategy(T processingElement, P parameterExtractor) throws SpConfigurationException;
 }
diff --git a/streampipes-container/src/main/java/org/apache/streampipes/container/api/RuntimeResolvableRequestHandler.java b/streampipes-container/src/main/java/org/apache/streampipes/container/api/RuntimeResolvableRequestHandler.java
index aba1322..91e18f9 100644
--- a/streampipes-container/src/main/java/org/apache/streampipes/container/api/RuntimeResolvableRequestHandler.java
+++ b/streampipes-container/src/main/java/org/apache/streampipes/container/api/RuntimeResolvableRequestHandler.java
@@ -18,6 +18,7 @@
 
 package org.apache.streampipes.container.api;
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.model.runtime.RuntimeOptionsRequest;
 import org.apache.streampipes.model.runtime.RuntimeOptionsResponse;
 import org.apache.streampipes.model.staticproperty.Option;
@@ -31,7 +32,7 @@
 
   // for backwards compatibility
   public RuntimeOptionsResponse handleRuntimeResponse(ResolvesContainerProvidedOptions resolvesOptions,
-                                                      RuntimeOptionsRequest req) {
+                                                      RuntimeOptionsRequest req) throws SpConfigurationException {
     List<Option> availableOptions =
             resolvesOptions.resolveOptions(req.getRequestId(),
                     makeExtractor(req));
@@ -43,7 +44,7 @@
   }
 
   public RuntimeOptionsResponse handleRuntimeResponse(SupportsRuntimeConfig declarer,
-                                                      RuntimeOptionsRequest req) {
+                                                      RuntimeOptionsRequest req) throws SpConfigurationException {
     StaticProperty result = declarer.resolveConfiguration(
             req.getRequestId(),
             makeExtractor(req));
diff --git a/streampipes-container/src/main/java/org/apache/streampipes/container/api/SupportsRuntimeConfig.java b/streampipes-container/src/main/java/org/apache/streampipes/container/api/SupportsRuntimeConfig.java
index 10cacd1..4ebd768 100644
--- a/streampipes-container/src/main/java/org/apache/streampipes/container/api/SupportsRuntimeConfig.java
+++ b/streampipes-container/src/main/java/org/apache/streampipes/container/api/SupportsRuntimeConfig.java
@@ -18,12 +18,13 @@
 
 package org.apache.streampipes.container.api;
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.model.staticproperty.StaticProperty;
 import org.apache.streampipes.sdk.extractor.StaticPropertyExtractor;
 
 public interface SupportsRuntimeConfig {
 
   StaticProperty resolveConfiguration(String staticPropertyInternalName,
-                                      StaticPropertyExtractor extractor);
+                                      StaticPropertyExtractor extractor) throws SpConfigurationException;
 
 }
diff --git a/streampipes-container/src/main/java/org/apache/streampipes/container/model/SpServiceDefinitionBuilder.java b/streampipes-container/src/main/java/org/apache/streampipes/container/model/SpServiceDefinitionBuilder.java
index 3ad1308..0e0f290 100644
--- a/streampipes-container/src/main/java/org/apache/streampipes/container/model/SpServiceDefinitionBuilder.java
+++ b/streampipes-container/src/main/java/org/apache/streampipes/container/model/SpServiceDefinitionBuilder.java
@@ -28,6 +28,7 @@
 import org.slf4j.LoggerFactory;
 
 import java.util.Arrays;
+import java.util.List;
 
 public class SpServiceDefinitionBuilder {
 
@@ -80,6 +81,11 @@
     return this;
   }
 
+  public SpServiceDefinitionBuilder addConfigs(List<ConfigItem> configItems) {
+    configItems.stream().forEach(configItem -> this.serviceDefinition.addConfig(configItem));
+    return this;
+  }
+
   public SpServiceDefinitionBuilder registerPipelineElement(Declarer<?> declarer) {
     this.serviceDefinition.addDeclarer(declarer);
     return this;
diff --git a/streampipes-data-explorer-commons/pom.xml b/streampipes-data-explorer-commons/pom.xml
new file mode 100644
index 0000000..c42e050
--- /dev/null
+++ b/streampipes-data-explorer-commons/pom.xml
@@ -0,0 +1,65 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  ~
+  -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>streampipes-parent</artifactId>
+        <groupId>org.apache.streampipes</groupId>
+        <version>0.71.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streampipes-data-explorer-commons</artifactId>
+    <dependencies>
+        <!-- StreamPipes -->
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-service-discovery-api</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-model</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-client</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-commons</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+
+
+        <!-- Others -->
+        <dependency>
+            <groupId>org.lightcouch</groupId>
+            <artifactId>lightcouch</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.influxdb</groupId>
+            <artifactId>influxdb-java</artifactId>
+        </dependency>
+    </dependencies>
+
+
+
+</project>
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/DataExplorerUtils.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/DataExplorerUtils.java
new file mode 100644
index 0000000..4fde783
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/DataExplorerUtils.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.streampipes.dataexplorer.commons;
+
+import org.apache.streampipes.client.StreamPipesClient;
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.dataexplorer.commons.influx.InfluxNameSanitizer;
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.model.schema.EventProperty;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class DataExplorerUtils {
+    /**
+     * Sanitizes the event schema and stores the DataLakeMeasurement to the couchDB
+     *
+     * @param client StreamPipes client to store measure
+     * @param measure DataLakeMeasurement
+     */
+    public static DataLakeMeasure sanitizeAndRegisterAtDataLake(StreamPipesClient client,
+                                                     DataLakeMeasure measure) throws SpRuntimeException {
+        sanitizeDataLakeMeasure(measure);
+        registerAtDataLake(client, measure);
+
+        return measure;
+    }
+
+    private static void registerAtDataLake(StreamPipesClient client,
+                                                     DataLakeMeasure measure) throws SpRuntimeException {
+        client
+          .customRequest()
+          .sendPost("api/v4/datalake/measure/", measure);
+    }
+
+
+    private static void sanitizeDataLakeMeasure(DataLakeMeasure measure) throws SpRuntimeException {
+
+        // Removes selected timestamp from event schema
+        removeTimestampsFromEventSchema(measure);
+
+        // Removes all spaces with _ and validates that no special terms are used as runtime names
+        measure.getEventSchema()
+                .getEventProperties()
+                .forEach(ep -> ep.setRuntimeName(InfluxNameSanitizer.renameReservedKeywords(ep.getRuntimeName())));
+
+    }
+
+    private static void removeTimestampsFromEventSchema(DataLakeMeasure measure) {
+        List<EventProperty> eventPropertiesWithoutTimestamp = measure.getEventSchema().getEventProperties()
+          .stream()
+          .filter(eventProperty -> !measure.getTimestampField().endsWith(eventProperty.getRuntimeName()))
+          .collect(Collectors.toList());
+        measure.getEventSchema().setEventProperties(eventPropertiesWithoutTimestamp);
+    }
+
+}
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/DataExplorerWriter.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/DataExplorerWriter.java
new file mode 100644
index 0000000..925a08b
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/DataExplorerWriter.java
@@ -0,0 +1,65 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.dataexplorer.commons;
+
+import org.apache.streampipes.dataexplorer.commons.configs.DataExplorerConfigurations;
+import org.apache.streampipes.dataexplorer.commons.influx.InfluxConnectionSettings;
+import org.influxdb.InfluxDB;
+import org.influxdb.InfluxDBFactory;
+import org.influxdb.dto.Point;
+
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+@Deprecated
+public class DataExplorerWriter {
+    private InfluxDB influxDB;
+
+    // TODO return a connection here
+    public void connect(InfluxConnectionSettings dataExplorerConnectionSettings) {
+        this.influxDB = InfluxDBFactory.connect(dataExplorerConnectionSettings.getInfluxDbHost() + ":" + dataExplorerConnectionSettings.getInfluxDbPort(),
+                dataExplorerConnectionSettings.getUser(), dataExplorerConnectionSettings.getPassword());
+        this.influxDB.setDatabase(DataExplorerConfigurations.DATA_LAKE_DATABASE_NAME);
+    }
+
+    public void close() {
+        this.influxDB.close();
+    }
+
+    public void write(Map<String, Object> data,
+                      String measurement) {
+        Point.Builder builder = Point.measurement(measurement)
+                .time((Long) data.get("timestamp"), TimeUnit.MILLISECONDS);
+
+        data.remove("timestamp");
+
+        for (String key : data.keySet()) {
+            if (data.get(key) instanceof Double || data.get(key) == null) {
+                builder.addField(key, (Double) data.get(key));
+            } else if (data.get(key) instanceof Integer) {
+                builder.addField(key, (Integer) data.get(key));
+            } else {
+                builder.tag(key, (String) data.get(key));
+            }
+        }
+
+        this.influxDB.write(builder.build());
+    }
+
+}
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/TimeSeriesStore.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/TimeSeriesStore.java
new file mode 100644
index 0000000..27fdcb3
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/TimeSeriesStore.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.dataexplorer.commons;
+
+import org.apache.streampipes.client.StreamPipesClient;
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.dataexplorer.commons.image.ImageStore;
+import org.apache.streampipes.dataexplorer.commons.influx.InfluxStore;
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.model.runtime.Event;
+import org.apache.streampipes.svcdiscovery.api.SpConfig;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+
+public class TimeSeriesStore {
+
+    private static final Logger LOG = LoggerFactory.getLogger(TimeSeriesStore.class);
+
+    private ImageStore imageStore;
+    private final InfluxStore influxStore;
+
+
+    public TimeSeriesStore(SpConfig config,
+                           StreamPipesClient client,
+                           DataLakeMeasure measure,
+                           boolean enableImageStore) {
+
+        measure = DataExplorerUtils.sanitizeAndRegisterAtDataLake(client, measure);
+
+        if (enableImageStore) {
+            // TODO check if event properties are replaces correctly
+            this.imageStore = new ImageStore(measure, config);
+        }
+
+        this.influxStore = new InfluxStore(measure, config);
+
+    }
+
+    public boolean onEvent(Event event) throws SpRuntimeException {
+        // Store all images in image store and replace image with internal id
+        if (imageStore != null) {
+            this.imageStore.onEvent(event);
+        }
+
+        // Store event in time series database
+        this.influxStore.onEvent(event);
+
+        return true;
+    }
+
+
+    public boolean alterRetentionTime(DataLakeMeasure dataLakeMeasure) {
+        return true;
+    }
+
+    public void close() throws SpRuntimeException  {
+        if (imageStore != null) {
+            try {
+                this.imageStore.close();
+            } catch (IOException e) {
+                LOG.error("Could not close couchDB connection");
+                throw new SpRuntimeException(e);
+            }
+        }
+
+        this.influxStore.close();
+    }
+}
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/CouchDbConfigurations.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/CouchDbConfigurations.java
new file mode 100644
index 0000000..8079e9d
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/CouchDbConfigurations.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.streampipes.dataexplorer.commons.configs;
+
+import org.apache.streampipes.svcdiscovery.api.model.ConfigItem;
+
+import java.util.Arrays;
+import java.util.List;
+
+public class CouchDbConfigurations {
+
+    public static List<ConfigItem> getDefaults() {
+        return Arrays.asList(
+                ConfigItem.from(CouchDbEnvKeys.COUCHDB_HOST, "couchdb", "Hostname for CouchDB to store image blobs"),
+                ConfigItem.from(CouchDbEnvKeys.COUCHDB_PORT, 5984, ""),
+                ConfigItem.from(CouchDbEnvKeys.COUCHDB_PROTOCOL, "http", "")
+        );
+    }
+
+}
\ No newline at end of file
diff --git a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/CouchDbEnvKeys.java
similarity index 72%
copy from ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
copy to streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/CouchDbEnvKeys.java
index 58ba04b..69a5257 100644
--- a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/CouchDbEnvKeys.java
@@ -16,3 +16,10 @@
  *
  */
 
+package org.apache.streampipes.dataexplorer.commons.configs;
+
+public class CouchDbEnvKeys {
+    public final static String COUCHDB_HOST = "SP_COUCHDB_HOST";
+    public final static String COUCHDB_PORT = "SP_COUCHDB_PORT";
+    public final static String COUCHDB_PROTOCOL = "SP_COUCHDB_PROTOCOL";
+}
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/DataExplorerConfigurations.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/DataExplorerConfigurations.java
new file mode 100644
index 0000000..e0e483c
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/DataExplorerConfigurations.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+package org.apache.streampipes.dataexplorer.commons.configs;
+
+import org.apache.streampipes.svcdiscovery.api.model.ConfigItem;
+
+import java.util.Arrays;
+import java.util.List;
+
+
+public class DataExplorerConfigurations {
+    public final static String DATA_LAKE_DATABASE_NAME = "sp";
+
+    public static List<ConfigItem> getDefaults() {
+
+        return Arrays.asList(
+                ConfigItem.from(DataExplorerEnvKeys.DATA_LAKE_HOST, "influxdb", "Hostname for the StreamPipes data lake database"),
+                ConfigItem.from(DataExplorerEnvKeys.DATA_LAKE_PROTOCOL, "http", "Protocol for the StreamPipes data lake database"),
+                ConfigItem.from(DataExplorerEnvKeys.DATA_LAKE_PORT, 8086, "Port for the StreamPipes data lake database"),
+                ConfigItem.from(DataExplorerEnvKeys.DATA_LAKE_USERNAME, "default", "Username for the StreamPipes data lake database"),
+                ConfigItem.from(DataExplorerEnvKeys.DATA_LAKE_PASSWORD, "default", "Password for the StreamPipes data lake database"),
+                ConfigItem.from(DataExplorerEnvKeys.DATA_LAKE_DATABASE_NAME, DATA_LAKE_DATABASE_NAME, "Database name for the StreamPipes data lake database")
+        );
+    }
+
+}
\ No newline at end of file
diff --git a/streampipes-extensions/streampipes-sinks-internal-jvm/src/main/java/org/apache/streampipes/sinks/internal/jvm/config/ConfigKeys.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/DataExplorerEnvKeys.java
similarity index 80%
rename from streampipes-extensions/streampipes-sinks-internal-jvm/src/main/java/org/apache/streampipes/sinks/internal/jvm/config/ConfigKeys.java
rename to streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/DataExplorerEnvKeys.java
index 4d6f0d9..cd4d17c 100644
--- a/streampipes-extensions/streampipes-sinks-internal-jvm/src/main/java/org/apache/streampipes/sinks/internal/jvm/config/ConfigKeys.java
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/configs/DataExplorerEnvKeys.java
@@ -15,10 +15,9 @@
  * limitations under the License.
  *
  */
+package org.apache.streampipes.dataexplorer.commons.configs;
 
-package org.apache.streampipes.sinks.internal.jvm.config;
-
-public class ConfigKeys {
+public class DataExplorerEnvKeys {
     public final static String DATA_LAKE_HOST = "SP_DATA_LAKE_HOST";
     public final static String DATA_LAKE_PROTOCOL = "SP_DATA_LAKE_PROTOCOL";
     public final static String DATA_LAKE_PORT = "SP_DATA_LAKE_PORT";
@@ -26,7 +25,4 @@
     public final static String DATA_LAKE_PASSWORD = "SP_DATA_LAKE_PASSWORD";
     public final static String DATA_LAKE_DATABASE_NAME = "SP_DATA_LAKE_DATABASE_NAME";
 
-    public final static String COUCHDB_HOST = "SP_COUCHDB_HOST";
-    public final static String COUCHDB_PORT = "SP_COUCHDB_PORT";
-    public final static String COUCHDB_PROTOCOL = "SP_COUCHDB_PROTOCOL";
 }
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/image/ImageStore.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/image/ImageStore.java
new file mode 100644
index 0000000..21c51c2
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/image/ImageStore.java
@@ -0,0 +1,85 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.dataexplorer.commons.image;
+
+import org.apache.commons.codec.binary.Base64;
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.dataexplorer.commons.configs.CouchDbEnvKeys;
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.model.runtime.Event;
+import org.apache.streampipes.model.schema.EventProperty;
+import org.apache.streampipes.svcdiscovery.api.SpConfig;
+import org.lightcouch.CouchDbClient;
+import org.lightcouch.CouchDbProperties;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.util.List;
+import java.util.UUID;
+
+public class ImageStore {
+
+  private static final Logger LOG = LoggerFactory.getLogger(ImageStore.class);
+  private static final String DB_NAME = "images";
+
+  private List<EventProperty> imageProperties;
+  private CouchDbClient couchDbClient;
+
+  public ImageStore(DataLakeMeasure measure, SpConfig config) {
+    this.couchDbClient = new CouchDbClient(from(config));
+    this.imageProperties = ImageStoreUtils.getImageProperties(measure);
+  }
+
+  public void onEvent(Event event) throws SpRuntimeException{
+    this.imageProperties.forEach(eventProperty -> {
+      String imageDocId = UUID.randomUUID().toString();
+      String image = event.getFieldByRuntimeName(eventProperty.getRuntimeName()).getAsPrimitive().getAsString();
+
+      byte[] data = Base64.decodeBase64(image);
+      storeImage(data, imageDocId);
+      event.updateFieldBySelector("s0::" + eventProperty.getRuntimeName(), imageDocId);
+    });
+  }
+
+  public void storeImage(byte[] imageBytes,
+                         String imageDocId) {
+    this.couchDbClient.saveAttachment(
+      new ByteArrayInputStream(imageBytes),
+      imageDocId,
+      "image/jpeg",
+      imageDocId,
+      null);
+
+  }
+
+  public void close() throws IOException {
+    this.couchDbClient.close();
+  }
+
+  private static CouchDbProperties from(SpConfig config) {
+    String couchDbProtocol = config.getString(CouchDbEnvKeys.COUCHDB_PROTOCOL);
+    String couchDbHost = config.getString(CouchDbEnvKeys.COUCHDB_HOST);
+    int couchDbPort = config.getInteger(CouchDbEnvKeys.COUCHDB_PORT);
+
+    return new CouchDbProperties(DB_NAME, true, couchDbProtocol,
+      couchDbHost, couchDbPort, null, null);
+  }
+}
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/image/ImageStoreUtils.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/image/ImageStoreUtils.java
new file mode 100644
index 0000000..ba3ab6c
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/image/ImageStoreUtils.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.dataexplorer.commons.image;
+
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.model.schema.EventProperty;
+import org.apache.streampipes.vocabulary.SPSensor;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class ImageStoreUtils {
+
+    public static List<EventProperty> getImageProperties(DataLakeMeasure measure) {
+        return  measure.getEventSchema().getEventProperties().stream()
+                .filter(eventProperty -> eventProperty.getDomainProperties() != null &&
+            eventProperty.getDomainProperties().size() > 0 &&
+            eventProperty.getDomainProperties().get(0).toString().equals(SPSensor.IMAGE))
+            .collect(Collectors.toList());
+    }
+}
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxConnectionSettings.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxConnectionSettings.java
new file mode 100644
index 0000000..a6c1538
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxConnectionSettings.java
@@ -0,0 +1,75 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.dataexplorer.commons.influx;
+
+import org.apache.streampipes.dataexplorer.commons.configs.DataExplorerEnvKeys;
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.svcdiscovery.api.SpConfig;
+
+public class InfluxConnectionSettings {
+
+  private final Integer influxDbPort;
+  private final String influxDbHost;
+  private final String databaseName;
+  private final String user;
+  private final String password;
+
+  public static InfluxConnectionSettings from(SpConfig configStore) {
+
+    return new InfluxConnectionSettings(
+            configStore.getString(DataExplorerEnvKeys.DATA_LAKE_PROTOCOL) + "://" + configStore.getString(DataExplorerEnvKeys.DATA_LAKE_HOST),
+            configStore.getInteger(DataExplorerEnvKeys.DATA_LAKE_PORT),
+            configStore.getString(DataExplorerEnvKeys.DATA_LAKE_DATABASE_NAME),
+            configStore.getString(DataExplorerEnvKeys.DATA_LAKE_USERNAME),
+            configStore.getString(DataExplorerEnvKeys.DATA_LAKE_PASSWORD));
+  }
+
+
+  private InfluxConnectionSettings(String influxDbHost,
+                                   Integer influxDbPort,
+                                   String databaseName,
+                                   String user,
+                                   String password) {
+    this.influxDbHost = influxDbHost;
+    this.influxDbPort = influxDbPort;
+    this.databaseName = databaseName;
+    this.user = user;
+    this.password = password;
+  }
+
+  public Integer getInfluxDbPort() {
+    return influxDbPort;
+  }
+
+  public String getInfluxDbHost() {
+    return influxDbHost;
+  }
+
+  public String getDatabaseName() {
+    return databaseName;
+  }
+
+  public String getUser() {
+    return user;
+  }
+
+  public String getPassword() {
+    return password;
+  }
+}
diff --git a/streampipes-extensions/streampipes-sinks-internal-jvm/src/main/java/org/apache/streampipes/sinks/internal/jvm/datalake/InfluxDbReservedKeywords.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxDbReservedKeywords.java
similarity index 97%
rename from streampipes-extensions/streampipes-sinks-internal-jvm/src/main/java/org/apache/streampipes/sinks/internal/jvm/datalake/InfluxDbReservedKeywords.java
rename to streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxDbReservedKeywords.java
index 26e7a14..754fc7c 100644
--- a/streampipes-extensions/streampipes-sinks-internal-jvm/src/main/java/org/apache/streampipes/sinks/internal/jvm/datalake/InfluxDbReservedKeywords.java
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxDbReservedKeywords.java
@@ -16,7 +16,7 @@
  *
  */
 
-package org.apache.streampipes.sinks.internal.jvm.datalake;
+package org.apache.streampipes.dataexplorer.commons.influx;
 
 import java.util.Arrays;
 import java.util.List;
diff --git a/streampipes-extensions/streampipes-processors-enricher-flink/src/main/java/org/apache/streampipes/processors/enricher/flink/processor/math/operation/OperationDivide.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxNameSanitizer.java
similarity index 68%
copy from streampipes-extensions/streampipes-processors-enricher-flink/src/main/java/org/apache/streampipes/processors/enricher/flink/processor/math/operation/OperationDivide.java
copy to streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxNameSanitizer.java
index b648a20..302ea45 100644
--- a/streampipes-extensions/streampipes-processors-enricher-flink/src/main/java/org/apache/streampipes/processors/enricher/flink/processor/math/operation/OperationDivide.java
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxNameSanitizer.java
@@ -16,12 +16,16 @@
  *
  */
 
-package org.apache.streampipes.processors.enricher.flink.processor.math.operation;
+package org.apache.streampipes.dataexplorer.commons.influx;
 
-public class OperationDivide implements Operation {
+public class InfluxNameSanitizer {
 
-    @Override
-    public Double operate(Double valLeft, Double valRight) {
-        return valLeft / valRight;
+    public static String renameReservedKeywords(String runtimeName) {
+        if (InfluxDbReservedKeywords.keywordList.stream().anyMatch(k -> k.equalsIgnoreCase(runtimeName))) {
+            return runtimeName + "_";
+        } else {
+            return runtimeName;
+        }
     }
+
 }
diff --git a/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxStore.java b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxStore.java
new file mode 100644
index 0000000..429f62e
--- /dev/null
+++ b/streampipes-data-explorer-commons/src/main/java/org/apache/streampipes/dataexplorer/commons/influx/InfluxStore.java
@@ -0,0 +1,221 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.dataexplorer.commons.influx;
+
+import org.apache.streampipes.commons.exceptions.SpRuntimeException;
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.model.runtime.Event;
+import org.apache.streampipes.model.runtime.field.PrimitiveField;
+import org.apache.streampipes.model.schema.EventProperty;
+import org.apache.streampipes.model.schema.EventPropertyPrimitive;
+import org.apache.streampipes.model.schema.PropertyScope;
+import org.apache.streampipes.svcdiscovery.api.SpConfig;
+import org.apache.streampipes.vocabulary.XSD;
+import org.influxdb.InfluxDB;
+import org.influxdb.InfluxDBFactory;
+import org.influxdb.dto.Point;
+import org.influxdb.dto.Pong;
+import org.influxdb.dto.Query;
+import org.influxdb.dto.QueryResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+
+public class InfluxStore {
+
+  private static final Logger LOG = LoggerFactory.getLogger(InfluxStore.class);
+
+  private InfluxDB influxDb = null;
+  DataLakeMeasure measure;
+
+  Map<String, String> sanitizedRuntimeNames = new HashMap<>();
+
+  public InfluxStore(DataLakeMeasure measure,
+                     SpConfig configStore) throws SpRuntimeException {
+
+    this.measure = measure;
+    InfluxConnectionSettings settings = InfluxConnectionSettings.from(configStore);
+
+    // store sanitized target property runtime names in local variable
+    measure.getEventSchema()
+        .getEventProperties()
+        .forEach(ep -> sanitizedRuntimeNames.put(ep.getRuntimeName(),
+            InfluxNameSanitizer.renameReservedKeywords(ep.getRuntimeName())));
+
+    connect(settings);
+  }
+
+  /**
+   * Connects to the InfluxDB Server, sets the database and initializes the batch-behaviour
+   *
+   * @throws SpRuntimeException If not connection can be established or if the database could not
+   *                            be found
+   */
+  private void connect(InfluxConnectionSettings settings) throws SpRuntimeException {
+    // Connecting to the server
+    // "http://" must be in front
+    String urlAndPort = settings.getInfluxDbHost() + ":" + settings.getInfluxDbPort();
+    influxDb = InfluxDBFactory.connect(urlAndPort, settings.getUser(), settings.getPassword());
+
+    // Checking, if server is available
+    Pong response = influxDb.ping();
+    if (response.getVersion().equalsIgnoreCase("unknown")) {
+      throw new SpRuntimeException("Could not connect to InfluxDb Server: " + urlAndPort);
+    }
+
+    String databaseName = settings.getDatabaseName();
+    // Checking whether the database exists
+    if (!databaseExists(databaseName)) {
+      LOG.info("Database '" + databaseName + "' not found. Gets created ...");
+      createDatabase(databaseName);
+    }
+
+    // setting up the database
+    influxDb.setDatabase(databaseName);
+    int batchSize = 2000;
+    int flushDuration = 500;
+    influxDb.enableBatch(batchSize, flushDuration, TimeUnit.MILLISECONDS);
+  }
+
+  private boolean databaseExists(String dbName) {
+    QueryResult queryResult = influxDb.query(new Query("SHOW DATABASES", ""));
+    for (List<Object> a : queryResult.getResults().get(0).getSeries().get(0).getValues()) {
+      if (a.get(0).equals(dbName)) {
+        return true;
+      }
+    }
+    return false;
+  }
+
+  /**
+   * Creates a new database with the given name
+   *
+   * @param dbName The name of the database which should be created
+   */
+  private void createDatabase(String dbName) throws SpRuntimeException {
+    if (!dbName.matches("^[a-zA-Z_]\\w*$")) {
+      throw new SpRuntimeException(
+          "Database name '" + dbName + "' not allowed. Allowed names: ^[a-zA-Z_][a-zA-Z0-9_]*$");
+    }
+    influxDb.query(new Query("CREATE DATABASE \"" + dbName + "\"", ""));
+  }
+
+  /**
+   * Saves an event to the connected InfluxDB database
+   *
+   * @param event The event which should be saved
+   * @throws SpRuntimeException If the column name (key-value of the event map) is not allowed
+   */
+  public void onEvent(Event event) throws SpRuntimeException {
+    if (event == null) {
+      throw new SpRuntimeException("event is null");
+    }
+
+    Long timestampValue = event.getFieldBySelector(measure.getTimestampField()).getAsPrimitive().getAsLong();
+    Point.Builder point =
+        Point.measurement(measure.getMeasureName()).time((long) timestampValue, TimeUnit.MILLISECONDS);
+
+    for (EventProperty ep : measure.getEventSchema().getEventProperties()) {
+      if (ep instanceof EventPropertyPrimitive) {
+        String runtimeName = ep.getRuntimeName();
+
+        // timestamp should not be added as a field
+        if (!measure.getTimestampField().endsWith(runtimeName)) {
+          String sanitizedRuntimeName = sanitizedRuntimeNames.get(runtimeName);
+
+          try {
+            PrimitiveField eventPropertyPrimitiveField = event.getFieldByRuntimeName(runtimeName).getAsPrimitive();
+            if (eventPropertyPrimitiveField.getRawValue() == null) {
+              LOG.warn("Field value for {} is null, ignoring value.", sanitizedRuntimeName);
+            } else {
+
+              // store property as tag when the field is a dimension property
+              if (PropertyScope.DIMENSION_PROPERTY.name().equals(ep.getPropertyScope())) {
+                point.tag(sanitizedRuntimeName, eventPropertyPrimitiveField.getAsString());
+              } else {
+                handleMeasurementProperty(
+                    point,
+                    (EventPropertyPrimitive) ep,
+                    sanitizedRuntimeName,
+                    eventPropertyPrimitiveField);
+              }
+            }
+          } catch (SpRuntimeException iae) {
+            LOG.warn("Field {} was missing in event and will be ignored", runtimeName, iae);
+          }
+
+        }
+
+      }
+    }
+
+    influxDb.write(point.build());
+  }
+
+  private void handleMeasurementProperty(Point.Builder p,
+                                         EventPropertyPrimitive ep,
+                                         String preparedRuntimeName,
+                                         PrimitiveField eventPropertyPrimitiveField) {
+    try {
+      // Store property according to property type
+      String runtimeType = ep.getRuntimeType();
+      if (XSD._integer.toString().equals(runtimeType)) {
+        try {
+          p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsInt());
+        } catch (NumberFormatException ef) {
+          p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsFloat());
+        }
+      } else if (XSD._float.toString().equals(runtimeType)) {
+        p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsFloat());
+      } else if (XSD._double.toString().equals(runtimeType)) {
+        p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsDouble());
+      } else if (XSD._boolean.toString().equals(runtimeType)) {
+        p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsBoolean());
+      } else if (XSD._long.toString().equals(runtimeType)) {
+        try {
+          p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsLong());
+        } catch (NumberFormatException ef) {
+          p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsFloat());
+        }
+      } else {
+        p.addField(preparedRuntimeName, eventPropertyPrimitiveField.getAsString());
+      }
+    } catch (NumberFormatException e) {
+      LOG.warn("Wrong number format for field {}, ignoring.", preparedRuntimeName);
+    }
+  }
+
+  /**
+   * Shuts down the connection to the InfluxDB server
+   */
+  public void close() throws SpRuntimeException {
+    influxDb.flush();
+    try {
+      Thread.sleep(1000);
+    } catch (InterruptedException e) {
+      throw new SpRuntimeException(e);
+    }
+    influxDb.close();
+  }
+
+}
diff --git a/streampipes-data-explorer/pom.xml b/streampipes-data-explorer/pom.xml
index 4079bfe..78d1676 100644
--- a/streampipes-data-explorer/pom.xml
+++ b/streampipes-data-explorer/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -31,17 +31,17 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-config</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-model</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-storage-management</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <dependency>
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeManagementV4.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeManagementV4.java
index b5c68df..f9918fc 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeManagementV4.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeManagementV4.java
@@ -36,7 +36,13 @@
 import org.apache.streampipes.model.datalake.DataLakeMeasure;
 import org.apache.streampipes.model.datalake.DataLakeRetentionPolicy;
 import org.apache.streampipes.model.datalake.SpQueryResult;
+import org.apache.streampipes.model.schema.EventProperty;
+import org.apache.streampipes.model.schema.EventPropertyList;
+import org.apache.streampipes.model.schema.EventPropertyNested;
+import org.apache.streampipes.model.schema.EventPropertyPrimitive;
+import org.apache.streampipes.storage.api.IDataLakeStorage;
 import org.apache.streampipes.storage.couchdb.utils.Utils;
+import org.apache.streampipes.storage.management.StorageDispatcher;
 import org.influxdb.InfluxDB;
 import org.influxdb.dto.Query;
 import org.influxdb.dto.QueryResult;
@@ -53,16 +59,15 @@
 import java.time.temporal.ChronoField;
 import java.time.temporal.ChronoUnit;
 import java.time.temporal.TemporalAccessor;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
+import java.util.*;
 import java.util.stream.Collectors;
 
 import static org.apache.streampipes.dataexplorer.v4.SupportedDataLakeQueryParameters.*;
 
 public class DataLakeManagementV4 {
 
+    public static final String FOR_ID_KEY = "forId";
+
     private static final DateTimeFormatter formatter = new DateTimeFormatterBuilder()
             .appendPattern("uuuu[-MM[-dd]]['T'HH[:mm[:ss[.SSSSSSSSS][.SSSSSSSS][.SSSSSSS][.SSSSSS][.SSSSS][.SSSS][.SSS][.SS][.S]]]][XXX]")
             .parseDefaulting(ChronoField.NANO_OF_SECOND, 0)
@@ -73,6 +78,10 @@
         return DataExplorerUtils.getInfos();
     }
 
+    public DataLakeMeasure getById(String measureId) {
+        return getDataLakeStorage().findOne(measureId);
+    }
+
     public SpQueryResult getData(ProvidedQueryParams queryParams) throws IllegalArgumentException {
         if (queryParams.has(QP_AUTO_AGGREGATE)) {
             queryParams = new AutoAggregationHandler(queryParams).makeAutoAggregationQueryParams();
@@ -84,7 +93,12 @@
             return new DataExplorerQueryV4(queryParts, maximumAmountOfEvents).executeQuery();
         }
 
-        return new DataExplorerQueryV4(queryParts).executeQuery();
+        if (queryParams.getProvidedParams().containsKey(FOR_ID_KEY)) {
+            String forWidgetId = queryParams.getProvidedParams().get(FOR_ID_KEY);
+            return new DataExplorerQueryV4(queryParts, forWidgetId).executeQuery();
+        } else {
+            return new DataExplorerQueryV4(queryParts).executeQuery();
+        }
     }
 
     public void getDataAsStream(ProvidedQueryParams params, String format, OutputStream outputStream) throws IOException {
@@ -148,6 +162,11 @@
             }
 
             boolean isFirstDataObject = true;
+            String delimiter = ",";
+
+            if (params.has(QP_CSV_DELIMITER)) {
+                delimiter = params.getAsString(QP_CSV_DELIMITER).equals("comma") ? "," : ";";
+            }
 
             do {
                 params.update(SupportedDataLakeQueryParameters.QP_PAGE, String.valueOf(i));
@@ -158,7 +177,7 @@
                         boolean isFirst = true;
                         for (int i1 = 0; i1 < dataResult.getHeaders().size(); i1++) {
                             if (!isFirst) {
-                                outputStream.write(toBytes(";"));
+                                outputStream.write(toBytes(delimiter));
                             }
                             isFirst = false;
                             outputStream.write(toBytes(dataResult.getHeaders().get(i1)));
@@ -174,7 +193,7 @@
                         for (int i1 = 0; i1 < row.size(); i1++) {
                             Object element = row.get(i1);
                             if (!isFirstInRow) {
-                                outputStream.write(toBytes(";"));
+                                outputStream.write(toBytes(delimiter));
                             }
                             isFirstInRow = false;
                             if (i1 == 0) {
@@ -307,9 +326,10 @@
                                             String fields) {
         InfluxDB influxDB = DataExplorerUtils.getInfluxDBClient();
         Map<String, Object> tags = new HashMap<>();
-        List<String> fieldList = Arrays.asList(fields.split(","));
-        fieldList.forEach(f -> {
-                String q = "SHOW TAG VALUES ON \"" + BackendConfig.INSTANCE.getInfluxDatabaseName() + "\" FROM \"" +measurementId + "\" WITH KEY = \"" +f + "\"";
+        if (fields != null && !("".equals(fields))) {
+            List<String> fieldList = Arrays.asList(fields.split(","));
+            fieldList.forEach(f -> {
+                String q = "SHOW TAG VALUES ON \"" + BackendConfig.INSTANCE.getInfluxDatabaseName() + "\" FROM \"" + measurementId + "\" WITH KEY = \"" + f + "\"";
                 Query query = new Query(q);
                 QueryResult queryResult = influxDB.query(query);
                 if (queryResult.getResults() != null) {
@@ -323,8 +343,69 @@
                         });
                     });
                 }
-        });
+            });
+        }
 
         return tags;
     }
+
+
+    // TODO validate method
+    public DataLakeMeasure addDataLake(DataLakeMeasure measure) {
+        List<DataLakeMeasure> dataLakeMeasureList = getDataLakeStorage().getAllDataLakeMeasures();
+        Optional<DataLakeMeasure> optional = dataLakeMeasureList.stream().filter(entry -> entry.getMeasureName().equals(measure.getMeasureName())).findFirst();
+
+        if (optional.isPresent()) {
+            DataLakeMeasure oldEntry = optional.get();
+            if (!compareEventProperties(oldEntry.getEventSchema().getEventProperties(), measure.getEventSchema().getEventProperties())) {
+                return oldEntry;
+            }
+        } else {
+            measure.setSchemaVersion(DataLakeMeasure.CURRENT_SCHEMA_VERSION);
+            getDataLakeStorage().storeDataLakeMeasure(measure);
+            return measure;
+        }
+
+        return measure;
+    }
+
+    private boolean compareEventProperties(List<EventProperty> prop1, List<EventProperty> prop2) {
+        if (prop1.size() != prop2.size()) {
+            return false;
+        }
+
+        return prop1.stream().allMatch(prop -> {
+
+            for (EventProperty property : prop2) {
+                if (prop.getRuntimeName().equals(property.getRuntimeName())) {
+
+                    //primitive
+                    if (prop instanceof EventPropertyPrimitive && property instanceof EventPropertyPrimitive) {
+                        if (((EventPropertyPrimitive) prop)
+                          .getRuntimeType()
+                          .equals(((EventPropertyPrimitive) property).getRuntimeType())) {
+                            return true;
+                        }
+
+                        //list
+                    } else if (prop instanceof EventPropertyList && property instanceof EventPropertyList) {
+                        return compareEventProperties(Collections.singletonList(((EventPropertyList) prop).getEventProperty()),
+                          Collections.singletonList(((EventPropertyList) property).getEventProperty()));
+
+                        //nested
+                    } else if (prop instanceof EventPropertyNested && property instanceof EventPropertyNested) {
+                        return compareEventProperties(((EventPropertyNested) prop).getEventProperties(),
+                          ((EventPropertyNested) property).getEventProperties());
+                    }
+                }
+            }
+            return false;
+
+        });
+    }
+
+
+    private IDataLakeStorage getDataLakeStorage() {
+        return StorageDispatcher.INSTANCE.getNoSqlStore().getDataLakeStorage();
+    }
 }
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeNoUserManagementV3.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeNoUserManagementV3.java
index 3f9c7bd..5b411a0 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeNoUserManagementV3.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/DataLakeNoUserManagementV3.java
@@ -28,8 +28,11 @@
 import java.util.Optional;
 
 
+@Deprecated
 public class DataLakeNoUserManagementV3 {
 
+
+  @Deprecated
   public boolean addDataLake(String measure, EventSchema eventSchema) {
     List<DataLakeMeasure> dataLakeMeasureList = getDataLakeStorage().getAllDataLakeMeasures();
     Optional<DataLakeMeasure> optional = dataLakeMeasureList.stream().filter(entry -> entry.getMeasureName().equals(measure)).findFirst();
@@ -39,7 +42,9 @@
         return false;
       }
     } else {
-      getDataLakeStorage().storeDataLakeMeasure(new DataLakeMeasure(measure, eventSchema));
+      DataLakeMeasure dataLakeMeasure = new DataLakeMeasure(measure, eventSchema);
+      dataLakeMeasure.setSchemaVersion(DataLakeMeasure.CURRENT_SCHEMA_VERSION);
+      getDataLakeStorage().storeDataLakeMeasure(dataLakeMeasure);
     }
     return true;
   }
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/query/DeleteDataQuery.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/query/DeleteDataQuery.java
index 73d6be6..4570927 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/query/DeleteDataQuery.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/query/DeleteDataQuery.java
@@ -30,7 +30,7 @@
 
   @Override
   protected void getQuery(DataExplorerQueryBuilder queryBuilder) {
-    queryBuilder.add("DROP MEASUREMENT " + measure.getMeasureName());
+    queryBuilder.add("DROP MEASUREMENT \"" + measure.getMeasureName() + "\"");
   }
 
   @Override
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryBuilder.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryBuilder.java
new file mode 100644
index 0000000..b4ea310
--- /dev/null
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryBuilder.java
@@ -0,0 +1,181 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.dataexplorer.sdk;
+
+import org.apache.streampipes.config.backend.BackendConfig;
+import org.apache.streampipes.dataexplorer.v4.params.ColumnFunction;
+import org.influxdb.dto.Query;
+import org.influxdb.querybuilder.Ordering;
+import org.influxdb.querybuilder.SelectionQueryImpl;
+import org.influxdb.querybuilder.clauses.*;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import static org.influxdb.querybuilder.BuiltQuery.QueryBuilder.*;
+
+public class DataLakeQueryBuilder {
+
+  private final String measurementId;
+  private final SelectionQueryImpl selectionQuery;
+  private final List<Clause> whereClauses;
+  private final List<Clause> groupByClauses;
+  private Ordering ordering;
+  private int limit = Integer.MIN_VALUE;
+
+  public static DataLakeQueryBuilder create(String measurementId) {
+    return new DataLakeQueryBuilder(measurementId);
+  }
+
+  private DataLakeQueryBuilder(String measurementId) {
+    this.measurementId = measurementId;
+    this.selectionQuery = select();
+    this.whereClauses = new ArrayList<>();
+    this.groupByClauses = new ArrayList<>();
+  }
+
+  public DataLakeQueryBuilder withSimpleColumn(String columnName) {
+    this.selectionQuery.column(columnName);
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withAggregatedColumn(String columnName,
+                                                   ColumnFunction columnFunction,
+                                                   String targetName) {
+    if (columnFunction == ColumnFunction.COUNT) {
+      this.selectionQuery.count(columnName).as(targetName);
+    } else if (columnFunction == ColumnFunction.MEAN) {
+      this.selectionQuery.mean(columnName).as(targetName);
+    } else if (columnFunction == ColumnFunction.MIN) {
+      this.selectionQuery.min(columnName).as(targetName);
+    } else if (columnFunction == ColumnFunction.MAX) {
+      this.selectionQuery.max(columnName).as(targetName);
+    } else if (columnFunction == ColumnFunction.FIRST) {
+      this.selectionQuery.function("FIRST", columnName).as(targetName);
+    } else if (columnFunction == ColumnFunction.LAST) {
+      this.selectionQuery.function("LAST", columnName).as(targetName);
+    }
+
+    // TODO implement all column functions
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withStartTime(long startTime) {
+    this.whereClauses.add(new SimpleClause("time", ">=", startTime * 1000000));
+    return this;
+  }
+
+
+  public DataLakeQueryBuilder withEndTime(long endTime) {
+    return withEndTime(endTime, true);
+  }
+
+  public DataLakeQueryBuilder withEndTime(long endTime,
+                                          boolean includeEndTime) {
+    String operator = includeEndTime ? "<=" : "<";
+    this.whereClauses.add(new SimpleClause("time", operator, endTime * 1000000));
+    return this;
+  }
+
+  public DataLakeQueryBuilder withTimeBoundary(long startTime,
+                                               long endTime) {
+    this.withStartTime(startTime);
+    this.withEndTime(endTime);
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withFilter(String field,
+                                         String operator,
+                                         Object value) {
+    this.whereClauses.add(new SimpleClause(field, operator, value));
+    return this;
+  }
+
+  public DataLakeQueryBuilder withExclusiveFilter(String field,
+                                                  String operator,
+                                                  List<?> values) {
+    List<ConjunctionClause> or = new ArrayList<>();
+    values.forEach(value -> {
+      or.add(new OrConjunction(new SimpleClause(field, operator, value)));
+    });
+
+    NestedClause nestedClause = new NestedClause(or);
+    this.whereClauses.add(nestedClause);
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withFilter(NestedClause clause) {
+    this.whereClauses.add(clause);
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withGroupByTime(String timeInterval) {
+
+    this.groupByClauses.add(new RawTextClause("time(" + timeInterval + ")"));
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withGroupBy(String column) {
+
+    this.groupByClauses.add(new RawTextClause(column));
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withOrderBy(DataLakeQueryOrdering ordering) {
+    if (DataLakeQueryOrdering.ASC.equals(ordering)) {
+      this.ordering =  asc();
+    } else {
+      this.ordering =  desc();
+    }
+
+    return this;
+  }
+
+  public DataLakeQueryBuilder withLimit(int limit) {
+    this.limit = limit;
+
+    return this;
+  }
+
+  public Query build() {
+    var selectQuery = this.selectionQuery.from(BackendConfig.INSTANCE.getInfluxDatabaseName(), "\"" +measurementId + "\"");
+    this.whereClauses.forEach(selectQuery::where);
+
+    if (this.groupByClauses.size() > 0) {
+      selectQuery.groupBy(this.groupByClauses.toArray());
+    }
+
+    if (this.ordering != null) {
+     selectQuery.orderBy(this.ordering);
+    }
+
+    if (this.limit != Integer.MIN_VALUE) {
+      selectQuery.limit(this.limit);
+    }
+
+    return selectQuery;
+  }
+}
diff --git a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryConstants.java
similarity index 70%
copy from ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
copy to streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryConstants.java
index 58ba04b..f6664f4 100644
--- a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryConstants.java
@@ -16,3 +16,15 @@
  *
  */
 
+package org.apache.streampipes.dataexplorer.sdk;
+
+public class DataLakeQueryConstants {
+
+  public static final String GE = ">=";
+  public static final String LE = "<=";
+  public static final String LT = "<";
+  public static final String GT = ">";
+  public static final String EQ = "=";
+  public static final String NEQ = "!=";
+
+}
diff --git a/ui/src/app/pipeline-details/pipeline-details.component.scss b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryOrdering.java
similarity index 88%
copy from ui/src/app/pipeline-details/pipeline-details.component.scss
copy to streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryOrdering.java
index a375af7..6952d0d 100644
--- a/ui/src/app/pipeline-details/pipeline-details.component.scss
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/sdk/DataLakeQueryOrdering.java
@@ -16,7 +16,8 @@
  *
  */
 
-.md-padding {
-  padding: 10px;
-}
+package org.apache.streampipes.dataexplorer.sdk;
 
+public enum DataLakeQueryOrdering {
+    ASC, DESC
+}
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/SupportedDataLakeQueryParameters.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/SupportedDataLakeQueryParameters.java
index 3a08624..1eadd81 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/SupportedDataLakeQueryParameters.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/SupportedDataLakeQueryParameters.java
@@ -33,6 +33,7 @@
   public static final String QP_AGGREGATION_FUNCTION = "aggregationFunction";
   public static final String QP_TIME_INTERVAL = "timeInterval";
   public static final String QP_FORMAT = "format";
+  public static final String QP_CSV_DELIMITER = "delimiter";
   public static final String QP_COUNT_ONLY = "countOnly";
   public static final String QP_AUTO_AGGREGATE = "autoAggregate";
   public static final String QP_FILTER = "filter";
@@ -50,6 +51,7 @@
           QP_AGGREGATION_FUNCTION,
           QP_TIME_INTERVAL,
           QP_FORMAT,
+          QP_CSV_DELIMITER,
           QP_COUNT_ONLY,
           QP_AUTO_AGGREGATE,
           QP_FILTER,
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/SelectColumn.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/SelectColumn.java
index 3b2a5f4..d2d0ba9 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/SelectColumn.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/SelectColumn.java
@@ -71,16 +71,16 @@
 
   private String makeField() {
     if (this.simpleField) {
-      return this.originalField;
+      return "\"" + this.originalField + "\"";
     } else {
-      return this.columnFunction.toDbName() + "(" + this.originalField + ")";
+      return this.columnFunction.toDbName() + "(\"" + this.originalField + "\")";
     }
   }
 
   public String toQueryString() {
     String field = makeField();
     if (this.rename) {
-      return field + " AS " + this.targetField;
+      return field + " AS \"" + this.targetField + "\"";
     } else {
       return field;
     }
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/WhereStatementParams.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/WhereStatementParams.java
index e3dbd84..a0af420 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/WhereStatementParams.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/params/WhereStatementParams.java
@@ -103,6 +103,8 @@
   private String returnCondition(String inputCondition) {
     if (NumberUtils.isCreatable(inputCondition) || Boolean.parseBoolean(inputCondition)) {
       return inputCondition;
+    } else if (inputCondition.equals("\"\"")) {
+      return inputCondition;
     } else {
       return "'" + inputCondition + "'";
     }
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/DataExplorerQueryV4.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/DataExplorerQueryV4.java
index c1a5121..30a87d4 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/DataExplorerQueryV4.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/DataExplorerQueryV4.java
@@ -44,6 +44,20 @@
 
     protected int maximumAmountOfEvents;
 
+    private boolean appendId = false;
+    private String forId;
+
+    public DataExplorerQueryV4() {
+
+    }
+
+    public DataExplorerQueryV4(Map<String, QueryParamsV4> params,
+                               String forId) {
+        this(params);
+        this.appendId = true;
+        this.forId = forId;
+    }
+
     public DataExplorerQueryV4(Map<String, QueryParamsV4> params) {
         this.params = params;
         this.maximumAmountOfEvents = -1;
@@ -61,7 +75,8 @@
         if (this.maximumAmountOfEvents != -1) {
             QueryBuilder countQueryBuilder = QueryBuilder.create(BackendConfig.INSTANCE.getInfluxDatabaseName());
             Query countQuery = countQueryBuilder.build(queryElements, true);
-            Double amountOfQueryResults = getAmountOfResults(influxDB.query(countQuery));
+            QueryResult countQueryResult = influxDB.query(countQuery);
+            Double amountOfQueryResults = getAmountOfResults(countQueryResult);
             if (amountOfQueryResults > this.maximumAmountOfEvents) {
                 SpQueryResult tooMuchData = new SpQueryResult();
                 tooMuchData.setSpQueryStatus(SpQueryStatus.TOO_MUCH_DATA);
@@ -84,6 +99,15 @@
         return dataResult;
     }
 
+    public SpQueryResult executeQuery(Query query) {
+        InfluxDB influxDB = DataExplorerUtils.getInfluxDBClient();
+        QueryResult result = influxDB.query(query);
+        SpQueryResult dataResult = postQuery(result);
+        influxDB.close();
+
+        return dataResult;
+    }
+
     private double getAmountOfResults(QueryResult countQueryResult) {
         if (countQueryResult.getResults().get(0).getSeries() != null &&
                 countQueryResult.getResults().get(0).getSeries().get(0).getValues() != null) {
@@ -120,6 +144,11 @@
                 result.addDataResult(series);
             });
         }
+
+        if (this.appendId) {
+            result.setForId(this.forId);
+        }
+
         return result;
     }
 
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/elements/SelectFromStatement.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/elements/SelectFromStatement.java
index 731d348..9630367 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/elements/SelectFromStatement.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/query/elements/SelectFromStatement.java
@@ -23,6 +23,7 @@
 import java.util.StringJoiner;
 
 public class SelectFromStatement extends QueryElement<SelectFromStatementParams> {
+
   public SelectFromStatement(SelectFromStatementParams selectFromStatementParams) {
     super(selectFromStatementParams);
   }
@@ -30,11 +31,11 @@
   @Override
   protected String buildStatement(SelectFromStatementParams params) {
     if (params.isSelectWildcard()) {
-      return "SELECT * FROM " + params.getIndex();
+      return "SELECT * FROM " + escapeIndex(params.getIndex());
     } else {
       StringJoiner joiner = new StringJoiner(",");
       String queryPrefix = "SELECT ";
-      String queryAppendix = " FROM " +params.getIndex();
+      String queryAppendix = " FROM " +escapeIndex(params.getIndex());
 
       params.getSelectedColumns().forEach(column -> {
         joiner.add(column.toQueryString());
@@ -42,12 +43,9 @@
 
       return queryPrefix + joiner + queryAppendix;
     }
-//        if (selectFromStatementParams.isCountOnly()) {
-//            return QueryTemplatesV4.selectCountFrom(selectFromStatementParams.getIndex(), selectFromStatementParams.getSelectedColumns());
-//        } else if (selectFromStatementParams.getAggregationFunction() == null) {
-//            return QueryTemplatesV4.selectFrom(selectFromStatementParams.getIndex(), selectFromStatementParams.getSelectedColumns());
-//        } else {
-//            return QueryTemplatesV4.selectAggregationFrom(selectFromStatementParams.getIndex(), selectFromStatementParams.getSelectedColumns(), selectFromStatementParams.getAggregationFunction());
-//        }
+  }
+
+  private String escapeIndex(String index) {
+    return "\"" + index + "\"";
   }
 }
diff --git a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/template/QueryTemplatesV4.java b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/template/QueryTemplatesV4.java
index 0023cc2..dd681d6 100644
--- a/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/template/QueryTemplatesV4.java
+++ b/streampipes-data-explorer/src/main/java/org/apache/streampipes/dataexplorer/v4/template/QueryTemplatesV4.java
@@ -42,11 +42,11 @@
             joiner.add(builder);
         }
 
-        return "SELECT " + joiner + " FROM " + index;
+        return "SELECT " + joiner + " FROM \"" + index + "\"";
     }
 
     public static String deleteFrom(String index) {
-        return "DELETE FROM " + index;
+        return "DELETE FROM \"" + index + "\"";
     }
 
     public static String whereTimeWithin(long startDate, long endDate) {
diff --git a/streampipes-data-export/pom.xml b/streampipes-data-export/pom.xml
new file mode 100644
index 0000000..8dc44bf
--- /dev/null
+++ b/streampipes-data-export/pom.xml
@@ -0,0 +1,52 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  ~
+  -->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <parent>
+        <artifactId>streampipes-parent</artifactId>
+        <groupId>org.apache.streampipes</groupId>
+        <version>0.71.0-SNAPSHOT</version>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>streampipes-data-export</artifactId>
+
+    <dependencies>
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-model</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-pipeline-management</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-resource-management</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.streampipes</groupId>
+            <artifactId>streampipes-storage-management</artifactId>
+            <version>0.71.0-SNAPSHOT</version>
+        </dependency>
+    </dependencies>
+
+</project>
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/AssetLinkCollector.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/AssetLinkCollector.java
new file mode 100644
index 0000000..ba3e16d
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/AssetLinkCollector.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export;
+
+import org.apache.streampipes.model.assets.AssetLink;
+import org.apache.streampipes.model.assets.SpAsset;
+import org.apache.streampipes.model.assets.SpAssetModel;
+
+import java.util.HashSet;
+import java.util.Set;
+
+public class AssetLinkCollector {
+
+  private SpAssetModel assetModel;
+
+  public AssetLinkCollector(SpAssetModel assetModel) {
+    this.assetModel = assetModel;
+  }
+
+  public Set<AssetLink> collectAssetLinks() {
+    var assetLinks = new HashSet<>(assetModel.getAssetLinks());
+    assetModel.getAssets().forEach(asset -> addLinks(assetLinks, asset));
+
+    return assetLinks;
+  }
+
+  private void addLinks(HashSet<AssetLink> assetLinks,
+                        SpAsset asset) {
+    assetLinks.addAll(asset.getAssetLinks());
+    if (asset.getAssets() != null) {
+      asset.getAssets().forEach(a -> addLinks(assetLinks, a));
+    }
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/AssetLinkResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/AssetLinkResolver.java
new file mode 100644
index 0000000..fb24122
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/AssetLinkResolver.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.streampipes.export.constants.ResolvableAssetLinks;
+import org.apache.streampipes.export.resolver.*;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.assets.AssetLink;
+import org.apache.streampipes.model.assets.SpAssetModel;
+import org.apache.streampipes.model.export.AssetExportConfiguration;
+import org.apache.streampipes.storage.management.StorageDispatcher;
+
+import java.io.IOException;
+import java.util.Map;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+public class AssetLinkResolver {
+
+  private final String assetId;
+  private final ObjectMapper mapper;
+
+  public AssetLinkResolver(String assetId) {
+    this.assetId = assetId;
+    this.mapper = SerializationUtils.getDefaultObjectMapper();
+  }
+
+  public AssetExportConfiguration resolveResources() {
+
+    try {
+      var asset = getAsset();
+      var assetLinks = new AssetLinkCollector(asset).collectAssetLinks();
+      var exportConfig = new AssetExportConfiguration();
+      exportConfig.setAssetId(this.assetId);
+      exportConfig.setAssetName(asset.getAssetName());
+      exportConfig.setAdapters(new AdapterResolver().resolve(getLinks(assetLinks, ResolvableAssetLinks.ADAPTER)));
+      exportConfig.setDashboards(new DashboardResolver().resolve(getLinks(assetLinks, ResolvableAssetLinks.DASHBOARD)));
+      exportConfig.setDataViews(new DataViewResolver().resolve(getLinks(assetLinks, ResolvableAssetLinks.DATA_VIEW)));
+      exportConfig.setDataSources(new DataSourceResolver().resolve(getLinks(assetLinks, ResolvableAssetLinks.DATA_SOURCE)));
+      exportConfig.setPipelines(new PipelineResolver().resolve(getLinks(assetLinks, ResolvableAssetLinks.PIPELINE)));
+      exportConfig.setDataLakeMeasures(new MeasurementResolver().resolve(getLinks(assetLinks, ResolvableAssetLinks.MEASUREMENT)));
+      exportConfig.setFiles(new FileResolver().resolve(getLinks(assetLinks, ResolvableAssetLinks.FILE)));
+
+      return exportConfig;
+    } catch (IOException e) {
+      e.printStackTrace();
+      return new AssetExportConfiguration();
+    }
+  }
+
+  private Set<AssetLink> getLinks(Set<AssetLink> assetLinks,
+                                  String queryHint) {
+    return assetLinks
+      .stream()
+      .filter(link -> link.getQueryHint().equals(queryHint))
+      .collect(Collectors.toSet());
+  }
+
+  private SpAssetModel getAsset() throws IOException {
+    return deserialize(StorageDispatcher.INSTANCE.getNoSqlStore().getGenericStorage().findOne(this.assetId));
+  }
+
+  private SpAssetModel deserialize(Map<String, Object> asset) {
+    return this.mapper.convertValue(asset, SpAssetModel.class);
+  }
+
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/ExportManager.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/ExportManager.java
new file mode 100644
index 0000000..1ceaa54
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/ExportManager.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export;
+
+import org.apache.streampipes.export.generator.ExportPackageGenerator;
+import org.apache.streampipes.model.export.ExportConfiguration;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class ExportManager {
+  
+  public static ExportConfiguration getExportPreview(List<String> selectedAssetIds) {
+    var exportConfig = new ExportConfiguration();
+    var assetExportConfigurations = selectedAssetIds
+      .stream()
+      .map(assetId -> new AssetLinkResolver(assetId).resolveResources())
+      .collect(Collectors.toList());
+
+    exportConfig.setAssetExportConfiguration(assetExportConfigurations);
+
+    return exportConfig;
+  }
+
+  public static byte[] getExportPackage(ExportConfiguration exportConfiguration) throws IOException {
+    return new ExportPackageGenerator(exportConfiguration).generateExportPackage();
+  }
+
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/ImportManager.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/ImportManager.java
new file mode 100644
index 0000000..58bfe95
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/ImportManager.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export;
+
+import org.apache.streampipes.export.dataimport.PerformImportGenerator;
+import org.apache.streampipes.export.dataimport.PreviewImportGenerator;
+import org.apache.streampipes.model.export.AssetExportConfiguration;
+
+import java.io.IOException;
+import java.io.InputStream;
+
+public class ImportManager {
+
+  public static AssetExportConfiguration getImportPreview(InputStream packageZipStream) throws IOException {
+    return new PreviewImportGenerator().generate(packageZipStream);
+  }
+
+  public static void performImport(InputStream packageZipStream,
+                                   AssetExportConfiguration exportConfiguration,
+                                   String ownerSid) throws IOException {
+    new PerformImportGenerator(exportConfiguration, ownerSid).generate(packageZipStream);
+  }
+}
diff --git a/ui/src/app/pipeline-details/pipeline-details.component.scss b/streampipes-data-export/src/main/java/org/apache/streampipes/export/constants/ExportConstants.java
similarity index 85%
copy from ui/src/app/pipeline-details/pipeline-details.component.scss
copy to streampipes-data-export/src/main/java/org/apache/streampipes/export/constants/ExportConstants.java
index a375af7..80ec171 100644
--- a/ui/src/app/pipeline-details/pipeline-details.component.scss
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/constants/ExportConstants.java
@@ -16,7 +16,9 @@
  *
  */
 
-.md-padding {
-  padding: 10px;
-}
+package org.apache.streampipes.export.constants;
 
+public class ExportConstants {
+
+  public static final String MANIFEST = "manifest";
+}
diff --git a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss b/streampipes-data-export/src/main/java/org/apache/streampipes/export/constants/ResolvableAssetLinks.java
similarity index 63%
copy from ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
copy to streampipes-data-export/src/main/java/org/apache/streampipes/export/constants/ResolvableAssetLinks.java
index 58ba04b..35588da 100644
--- a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/constants/ResolvableAssetLinks.java
@@ -16,3 +16,15 @@
  *
  */
 
+package org.apache.streampipes.export.constants;
+
+public class ResolvableAssetLinks {
+
+  public static final String DATA_VIEW = "data-view";
+  public static final String DASHBOARD = "dashboard";
+  public static final String MEASUREMENT = "measurement";
+  public static final String ADAPTER = "adapter";
+  public static final String DATA_SOURCE = "data-source";
+  public static final String PIPELINE = "pipeline";
+  public static final String FILE = "file";
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/ImportGenerator.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/ImportGenerator.java
new file mode 100644
index 0000000..23bd41d
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/ImportGenerator.java
@@ -0,0 +1,170 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.dataimport;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.streampipes.commons.zip.ZipFileExtractor;
+import org.apache.streampipes.export.constants.ExportConstants;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.export.StreamPipesApplicationPackage;
+import org.lightcouch.DocumentConflictException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.util.Map;
+
+public abstract class ImportGenerator<T> {
+
+  private static final Logger LOG = LoggerFactory.getLogger(ImportGenerator.class);
+
+  protected ObjectMapper spMapper;
+  protected ObjectMapper defaultMapper;
+
+  public ImportGenerator() {
+    this.spMapper = SerializationUtils.getSpObjectMapper();
+    this.defaultMapper = SerializationUtils.getDefaultObjectMapper();
+  }
+
+  public T generate(InputStream inputStream) throws IOException {
+    Map<String, byte[]> previewFiles = new ZipFileExtractor(inputStream).extractZipToMap();
+
+    var manifest = getManifest(previewFiles);
+
+    for (String assetId : manifest.getAssets()) {
+      try {
+        handleAsset(previewFiles, assetId);
+      } catch (DocumentConflictException | IOException e) {
+        LOG.warn("Skipping import of asset model {} (already present with the same id)", assetId);
+      }
+    }
+
+    for (String adapterId : manifest.getAdapters()) {
+      try {
+        handleAdapter(asString(previewFiles.get(adapterId)), adapterId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of adapter {} (already present with the same id)", adapterId);
+      }
+    }
+
+    for (String dashboardId : manifest.getDashboards()) {
+      try {
+      handleDashboard(asString(previewFiles.get(dashboardId)), dashboardId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of dashboard {} (already present with the same id)", dashboardId);
+      }
+    }
+
+    for (String dataViewId : manifest.getDataViews()) {
+      try {
+      handleDataView(asString(previewFiles.get(dataViewId)), dataViewId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of data view {} (already present with the same id)", dataViewId);
+      }
+    }
+
+    for (String dataSourceId : manifest.getDataSources()) {
+      try {
+      handleDataSource(asString(previewFiles.get(dataSourceId)), dataSourceId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of data source {} (already present with the same id)", dataSourceId);
+      }
+    }
+
+    for (String pipelineId : manifest.getPipelines()) {
+      try {
+      handlePipeline(asString(previewFiles.get(pipelineId)), pipelineId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of pipeline {} (already present with the same id)", pipelineId);
+      }
+    }
+
+    for (String measurementId : manifest.getDataLakeMeasures()) {
+      try {
+      handleDataLakeMeasure(asString(previewFiles.get(measurementId)), measurementId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of data lake measure {} (already present with the same id)", measurementId);
+      }
+    }
+
+    for (String dashboardWidgetId : manifest.getDashboardWidgets()) {
+      try {
+      handleDashboardWidget(asString(previewFiles.get(dashboardWidgetId)), dashboardWidgetId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of dashboard widget {} (already present with the same id)", dashboardWidgetId);
+      }
+    }
+
+    for (String dataViewWidgetId : manifest.getDataViewWidgets()) {
+      try {
+      handleDataViewWidget(asString(previewFiles.get(dataViewWidgetId)), dataViewWidgetId);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of data view widget {} (already present with the same id)", dataViewWidgetId);
+      }
+    }
+
+    for (String fileMetadataId : manifest.getFiles()) {
+      try {
+      handleFile(asString(previewFiles.get(fileMetadataId)), fileMetadataId, previewFiles);
+      } catch (DocumentConflictException e) {
+        LOG.warn("Skipping import of file {} (already present with the same id)", fileMetadataId);
+      }
+    }
+
+    afterResourcesCreated();
+
+    return getReturnObject();
+  }
+
+  protected String asString(byte[] bytes) {
+    return new String(bytes, StandardCharsets.UTF_8);
+  }
+
+  private StreamPipesApplicationPackage getManifest(Map<String, byte[]> previewFiles) throws JsonProcessingException {
+    return this.defaultMapper.readValue(asString(previewFiles.get(ExportConstants.MANIFEST)), StreamPipesApplicationPackage.class);
+  }
+
+  protected abstract void handleAsset(Map<String, byte[]> previewFiles, String assetId) throws IOException;
+
+  protected abstract void handleAdapter(String document, String adapterId) throws JsonProcessingException;
+
+  protected abstract void handleDashboard(String document, String dashboardId) throws JsonProcessingException;
+
+  protected abstract void handleDataView(String document, String dataViewId) throws JsonProcessingException;
+
+  protected abstract void handleDataSource(String document, String dataSourceId) throws JsonProcessingException;
+
+  protected abstract void handlePipeline(String document, String pipelineId) throws JsonProcessingException;
+
+  protected abstract void handleDataLakeMeasure(String document, String dataLakeMeasureId) throws JsonProcessingException;
+
+  protected abstract void handleDashboardWidget(String document, String dashboardWidgetId) throws JsonProcessingException;
+
+  protected abstract void handleDataViewWidget(String document, String dataViewWidgetId) throws JsonProcessingException;
+
+  protected abstract void handleFile(String document, String fileMetadataId, Map<String, byte[]> zipContent) throws IOException;
+
+  protected abstract T getReturnObject();
+
+  protected abstract void afterResourcesCreated();
+
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/PerformImportGenerator.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/PerformImportGenerator.java
new file mode 100644
index 0000000..f4345f9
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/PerformImportGenerator.java
@@ -0,0 +1,154 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.dataimport;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.model.PermissionInfo;
+import org.apache.streampipes.export.resolver.*;
+import org.apache.streampipes.manager.file.FileHandler;
+import org.apache.streampipes.model.SpDataStream;
+import org.apache.streampipes.model.connect.adapter.AdapterDescription;
+import org.apache.streampipes.model.dashboard.DashboardModel;
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.model.export.AssetExportConfiguration;
+import org.apache.streampipes.model.export.ExportItem;
+import org.apache.streampipes.model.pipeline.Pipeline;
+import org.apache.streampipes.resource.management.PermissionResourceManager;
+import org.apache.streampipes.storage.api.INoSqlStorage;
+import org.apache.streampipes.storage.management.StorageDispatcher;
+
+import java.io.ByteArrayInputStream;
+import java.io.IOException;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+public class PerformImportGenerator extends ImportGenerator<Void> {
+
+  private AssetExportConfiguration config;
+  private INoSqlStorage storage;
+  private Set<PermissionInfo> permissionsToStore = new HashSet<>();
+  private String ownerSid;
+
+  public PerformImportGenerator(AssetExportConfiguration config,
+                                String ownerSid) {
+    this.config = config;
+    this.storage = StorageDispatcher.INSTANCE.getNoSqlStore();
+    this.ownerSid = ownerSid;
+  }
+
+  @Override
+  protected void handleAsset(Map<String, byte[]> previewFiles, String assetId) throws IOException {
+    storage.getGenericStorage().create(asString(previewFiles.get(assetId)));
+  }
+
+  @Override
+  protected void handleAdapter(String document, String adapterId) throws JsonProcessingException {
+    if (shouldStore(adapterId, config.getAdapters())) {
+      new AdapterResolver().writeDocument(document, config.isOverrideBrokerSettings());
+      permissionsToStore.add(new PermissionInfo(adapterId, AdapterDescription.class));
+    }
+  }
+
+  @Override
+  protected void handleDashboard(String document, String dashboardId) throws JsonProcessingException {
+    if (shouldStore(dashboardId, config.getDashboards())) {
+      new DashboardResolver().writeDocument(document);
+      permissionsToStore.add(new PermissionInfo(dashboardId, DashboardModel.class));
+    }
+  }
+
+  @Override
+  protected void handleDataView(String document, String dataViewId) throws JsonProcessingException {
+    if (shouldStore(dataViewId, config.getDataViews())) {
+      new DataViewResolver().writeDocument(document);
+      permissionsToStore.add(new PermissionInfo(dataViewId, DashboardModel.class));
+    }
+  }
+
+  @Override
+  protected void handleDataSource(String document, String dataSourceId) throws JsonProcessingException {
+    if (shouldStore(dataSourceId, config.getDataSources())) {
+      new DataSourceResolver().writeDocument(document, config.isOverrideBrokerSettings());
+      permissionsToStore.add(new PermissionInfo(dataSourceId, SpDataStream.class));
+    }
+  }
+
+  @Override
+  protected void handlePipeline(String document, String pipelineId) throws JsonProcessingException {
+    if (shouldStore(pipelineId, config.getPipelines())) {
+      new PipelineResolver().writeDocument(document, config.isOverrideBrokerSettings());
+      permissionsToStore.add(new PermissionInfo(pipelineId, Pipeline.class));
+    }
+  }
+
+  @Override
+  protected void handleDataLakeMeasure(String document, String dataLakeMeasureId) throws JsonProcessingException {
+    if (shouldStore(dataLakeMeasureId, config.getDataLakeMeasures())) {
+      new MeasurementResolver().writeDocument(document);
+      permissionsToStore.add(new PermissionInfo(dataLakeMeasureId, DataLakeMeasure.class));
+    }
+  }
+
+  @Override
+  protected void handleDashboardWidget(String document, String dashboardWidgetId) throws JsonProcessingException {
+    new DashboardWidgetResolver().writeDocument(document);
+  }
+
+  @Override
+  protected void handleDataViewWidget(String document, String dataViewWidget) throws JsonProcessingException {
+    new DataViewWidgetResolver().writeDocument(document);
+  }
+
+  @Override
+  protected void handleFile(String document,
+                            String fileMetadataId,
+                            Map<String, byte[]> zipContent) throws IOException {
+    var resolver = new FileResolver();
+    var fileMetadata = resolver.readDocument(document);
+    resolver.writeDocument(document);
+    byte[] file = zipContent.get(fileMetadata.getInternalFilename().substring(0, fileMetadata.getInternalFilename().lastIndexOf(".")));
+    new FileHandler().storeFile(fileMetadata.getInternalFilename(), new ByteArrayInputStream(file));
+  }
+
+  @Override
+  protected Void getReturnObject() {
+    return null;
+  }
+
+  @Override
+  protected void afterResourcesCreated() {
+    var resourceManager = new PermissionResourceManager();
+    this.permissionsToStore
+      .forEach(info -> resourceManager.createDefault(
+        info.getInstanceId(),
+        info.getInstanceClass(),
+        this.ownerSid,
+        true));
+  }
+
+  private boolean shouldStore(String adapterId,
+                              Set<ExportItem> adapters) {
+    return adapters
+      .stream()
+      .filter(item -> item.getResourceId().equals(adapterId))
+      .allMatch(ExportItem::isSelected);
+  }
+
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/PreviewImportGenerator.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/PreviewImportGenerator.java
new file mode 100644
index 0000000..80c4894
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/dataimport/PreviewImportGenerator.java
@@ -0,0 +1,109 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.dataimport;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.streampipes.export.resolver.*;
+import org.apache.streampipes.model.export.AssetExportConfiguration;
+import org.apache.streampipes.model.export.ExportItem;
+
+import java.util.Map;
+import java.util.function.Consumer;
+
+public class PreviewImportGenerator extends ImportGenerator<AssetExportConfiguration> {
+
+  private AssetExportConfiguration importConfig;
+
+  public PreviewImportGenerator() {
+    super();
+    this.importConfig = new AssetExportConfiguration();
+
+  }
+
+  private void addExportItem(String id,
+                             String name,
+                             Consumer<ExportItem> addAdapter) {
+    var item = new ExportItem(id, name, true);
+    addAdapter.accept(item);
+  }
+
+
+  @Override
+  protected void handleAsset(Map<String, byte[]> previewFiles, String assetId) throws JsonProcessingException {
+    Map<String, Object> assetDescription = this.defaultMapper.readValue(asString(previewFiles.get(assetId)), new TypeReference<Map<String, Object>>() {});
+    importConfig.addAsset(new ExportItem(assetId, String.valueOf(assetDescription.get("assetName")), true));
+  }
+
+  @Override
+  protected void handleAdapter(String document, String adapterId) throws JsonProcessingException {
+    addExportItem(adapterId, new AdapterResolver().readDocument(document).getName(), importConfig::addAdapter);
+  }
+
+  @Override
+  protected void handleDashboard(String document, String dashboardId) throws JsonProcessingException {
+    addExportItem(dashboardId, new DashboardResolver().readDocument(document).getName(), importConfig::addDashboard);
+  }
+
+  @Override
+  protected void handleDataView(String document, String dataViewId) throws JsonProcessingException {
+    addExportItem(dataViewId, new DataViewResolver().readDocument(document).getName(), importConfig::addDataView);
+  }
+
+  @Override
+  protected void handleDataSource(String document, String dataSourceId) throws JsonProcessingException {
+    addExportItem(dataSourceId, new DataSourceResolver().readDocument(document).getName(), importConfig::addDataSource);
+  }
+
+  @Override
+  protected void handlePipeline(String document, String pipelineId) throws JsonProcessingException {
+    addExportItem(pipelineId, new PipelineResolver().readDocument(document).getName(), importConfig::addPipeline);
+  }
+
+  @Override
+  protected void handleDataLakeMeasure(String document, String measurementId) throws JsonProcessingException {
+    addExportItem(measurementId, new MeasurementResolver().readDocument(document).getMeasureName(), importConfig::addDataLakeMeasure);
+  }
+
+  @Override
+  protected void handleDashboardWidget(String document, String dashboardWidgetId) {
+
+  }
+
+  @Override
+  protected void handleDataViewWidget(String document, String dataViewWidget) {
+
+  }
+
+  @Override
+  protected void handleFile(String document,
+                            String fileMetadataId,
+                            Map<String, byte[]> zipContent) throws JsonProcessingException {
+    addExportItem(fileMetadataId, new FileResolver().readDocument(document).getOriginalFilename(), importConfig::addFile);
+  }
+
+  @Override
+  protected AssetExportConfiguration getReturnObject() {
+    return this.importConfig;
+  }
+
+  @Override
+  protected void afterResourcesCreated() {
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/generator/ExportPackageGenerator.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/generator/ExportPackageGenerator.java
new file mode 100644
index 0000000..8073dd5
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/generator/ExportPackageGenerator.java
@@ -0,0 +1,177 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.generator;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.streampipes.commons.exceptions.ElementNotFoundException;
+import org.apache.streampipes.export.resolver.*;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.manager.file.FileManager;
+import org.apache.streampipes.model.export.AssetExportConfiguration;
+import org.apache.streampipes.model.export.ExportConfiguration;
+import org.apache.streampipes.model.export.ExportItem;
+import org.apache.streampipes.model.export.StreamPipesApplicationPackage;
+import org.apache.streampipes.storage.management.StorageDispatcher;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Consumer;
+import java.util.stream.Collectors;
+
+public class ExportPackageGenerator {
+
+  private static final Logger LOG = LoggerFactory.getLogger(ExportPackageGenerator.class);
+
+  private final ExportConfiguration exportConfiguration;
+  private ObjectMapper defaultMapper;
+  private ObjectMapper spMapper;
+
+  public ExportPackageGenerator(ExportConfiguration exportConfiguration) {
+    this.exportConfiguration = exportConfiguration;
+    this.defaultMapper = SerializationUtils.getDefaultObjectMapper();
+    this.spMapper = SerializationUtils.getSpObjectMapper();
+  }
+
+  public byte[] generateExportPackage() throws IOException {
+    ZipFileBuilder builder = ZipFileBuilder.create();
+    var manifest = new StreamPipesApplicationPackage();
+
+    addAssets(builder, exportConfiguration
+      .getAssetExportConfiguration()
+      .stream()
+      .map(AssetExportConfiguration::getAssetId)
+      .collect(Collectors.toList()), manifest);
+
+    this.exportConfiguration.getAssetExportConfiguration().forEach(config -> {
+
+      config.getAdapters().forEach(item -> addDoc(builder,
+        item,
+        new AdapterResolver(),
+        manifest::addAdapter));
+
+      config.getDashboards().forEach(item -> {
+        var resolver = new DashboardResolver();
+        addDoc(builder,
+          item,
+          resolver,
+          manifest::addDashboard);
+
+        var widgets = resolver.getWidgets(item.getResourceId());
+        var widgetResolver = new DashboardWidgetResolver();
+        widgets.forEach(widgetId -> addDoc(builder, widgetId, widgetResolver, manifest::addDashboardWidget));
+      });
+
+      config.getDataSources().forEach(item -> addDoc(builder,
+        item,
+        new DataSourceResolver(),
+        manifest::addDataSource));
+
+      config.getDataLakeMeasures().forEach(item -> addDoc(builder,
+        item,
+        new MeasurementResolver(),
+        manifest::addDataLakeMeasure));
+
+      config.getPipelines().forEach(item -> addDoc(builder,
+        item,
+        new PipelineResolver(),
+        manifest::addPipeline));
+
+      config.getDataViews().forEach(item -> {
+        var resolver = new DataViewResolver();
+        addDoc(builder,
+          item,
+          resolver,
+          manifest::addDataView);
+
+        var widgets = resolver.getWidgets(item.getResourceId());
+        var widgetResolver = new DataViewWidgetResolver();
+        widgets.forEach(widgetId -> addDoc(builder, widgetId, widgetResolver, manifest::addDataViewWidget));
+      });
+
+      config.getFiles().forEach(item -> {
+        var fileResolver = new FileResolver();
+        String filename = fileResolver.findDocument(item.getResourceId()).getInternalFilename();
+        addDoc(builder, item, new FileResolver(), manifest::addFile);
+        try {
+          builder.addBinary(filename, Files.readAllBytes(FileManager.getFile(filename).toPath()));
+        } catch (IOException e) {
+          e.printStackTrace();
+        }
+      });
+    });
+
+    builder.addManifest(defaultMapper.writeValueAsString(manifest));
+
+
+    return builder.buildZip();
+  }
+
+  private void addDoc(ZipFileBuilder builder,
+                      String resourceId,
+                      AbstractResolver<?> resolver,
+                      Consumer<String> function) {
+    addDoc(builder, new ExportItem(resourceId, "", true), resolver, function);
+  }
+
+  private void addDoc(ZipFileBuilder builder,
+                      ExportItem exportItem,
+                      AbstractResolver<?> resolver,
+                      Consumer<String> function) {
+    try {
+      var resourceId = exportItem.getResourceId();
+      var sanitizedResourceId = sanitize(resourceId);
+      builder.addText(sanitizedResourceId, resolver.getSerializedDocument(resourceId));
+      function.accept(sanitizedResourceId);
+    } catch (JsonProcessingException | ElementNotFoundException e) {
+      LOG.warn(
+        "Could not find document with resource id {} with resolver {}",
+        exportItem.getResourceId(),
+        resolver.getClass().getCanonicalName(),
+        e);
+    }
+  }
+
+  private String sanitize(String resourceId) {
+    return resourceId.replaceAll(":", "").replaceAll("\\.", "");
+  }
+
+  private void addAssets(ZipFileBuilder builder,
+                         List<String> assetIds,
+                         StreamPipesApplicationPackage manifest) {
+    assetIds.forEach(assetId -> {
+      try {
+        var asset = getAsset(assetId);
+        asset.remove("_rev");
+        builder.addText(String.valueOf(asset.get("_id")), this.defaultMapper.writeValueAsString(asset));
+        manifest.addAsset(String.valueOf(asset.get("_id")));
+      } catch (IOException e) {
+        e.printStackTrace();
+      }
+    });
+  }
+
+  private Map<String, Object> getAsset(String assetId) throws IOException {
+    return StorageDispatcher.INSTANCE.getNoSqlStore().getGenericStorage().findOne(assetId);
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/generator/ZipFileBuilder.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/generator/ZipFileBuilder.java
new file mode 100644
index 0000000..59edaa8
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/generator/ZipFileBuilder.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.generator;
+
+import org.apache.streampipes.export.constants.ExportConstants;
+
+import java.io.*;
+import java.nio.charset.StandardCharsets;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.zip.ZipEntry;
+import java.util.zip.ZipOutputStream;
+
+public class ZipFileBuilder {
+
+  private final Map<String, byte[]> binaryEntries;
+  private final Map<String, String> textEntries;
+  private final Map<String, File> fileEntries;
+  private String manifest;
+
+  public static ZipFileBuilder create() {
+    return new ZipFileBuilder();
+  }
+
+  private ZipFileBuilder() {
+    this.binaryEntries = new HashMap<>();
+    this.fileEntries = new HashMap<>();
+    this.textEntries = new HashMap<>();
+  }
+
+  public ZipFileBuilder addText(String filename,
+                                String content) {
+    this.textEntries.put(filename, content);
+
+    return this;
+  }
+
+  public ZipFileBuilder addBinary(String filename,
+                                  byte[] content) {
+    this.binaryEntries.put(filename, content);
+
+    return this;
+  }
+
+  public ZipFileBuilder addFile(String filename,
+                                File file) {
+    this.fileEntries.put(filename, file);
+
+    return this;
+  }
+
+  public ZipFileBuilder addManifest(String manifest) {
+    this.manifest = manifest;
+
+    return this;
+  }
+
+  public byte[] buildZip() throws IOException {
+    return makeZip();
+  }
+
+  private byte[] makeZip() throws IOException {
+    byte[] buffer = new byte[1024];
+
+    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
+    ZipOutputStream out = new ZipOutputStream(outputStream);
+
+    for (String documentKey : this.textEntries.keySet()) {
+      byte[] document = asBytes(this.textEntries.get(documentKey));
+      addZipEntry(documentKey + ".json", document, out, buffer);
+    }
+
+    for(String binary : this.binaryEntries.keySet()) {
+      addZipEntry(binary, this.binaryEntries.get(binary), out, buffer);
+    }
+
+    addZipEntry(ExportConstants.MANIFEST + ".json", asBytes(manifest), out, buffer);
+    out.closeEntry();
+    out.close();
+    return outputStream.toByteArray();
+  }
+
+  private byte[] asBytes(String document) {
+    return document.getBytes(StandardCharsets.UTF_8);
+  }
+
+  private void addZipEntry(String filename,
+                           byte[] document,
+                           ZipOutputStream out,
+                           byte[] buffer) throws IOException {
+    ZipEntry ze = new ZipEntry(filename);
+    out.putNextEntry(ze);
+
+    try (InputStream in = new ByteArrayInputStream(document)) {
+      int len;
+      while ((len = in.read(buffer)) > 0) {
+        out.write(buffer, 0, len);
+      }
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/model/PermissionInfo.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/model/PermissionInfo.java
new file mode 100644
index 0000000..5bf2c8b
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/model/PermissionInfo.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.model;
+
+public class PermissionInfo {
+
+  private String instanceId;
+  private Class<?> instanceClass;
+
+  public PermissionInfo(String instanceId, Class<?> className) {
+    this.instanceId = instanceId;
+    this.instanceClass = className;
+  }
+
+  public String getInstanceId() {
+    return instanceId;
+  }
+
+  public void setInstanceId(String instanceId) {
+    this.instanceId = instanceId;
+  }
+
+  public Class<?> getInstanceClass() {
+    return instanceClass;
+  }
+
+  public void setInstanceClass(Class<?> instanceClass) {
+    this.instanceClass = instanceClass;
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/AbstractResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/AbstractResolver.java
new file mode 100644
index 0000000..d4e0ed2
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/AbstractResolver.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.streampipes.commons.exceptions.ElementNotFoundException;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.assets.AssetLink;
+import org.apache.streampipes.model.export.ExportItem;
+import org.apache.streampipes.storage.api.INoSqlStorage;
+import org.apache.streampipes.storage.management.StorageDispatcher;
+import org.lightcouch.DocumentConflictException;
+
+import java.util.Objects;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+public abstract class AbstractResolver<T> {
+
+  protected ObjectMapper spMapper;
+  protected ObjectMapper defaultMapper;
+
+  public AbstractResolver() {
+    this.spMapper = SerializationUtils.getSpObjectMapper();
+    this.defaultMapper = SerializationUtils.getDefaultObjectMapper();
+  }
+
+  public Set<ExportItem> resolve(Set<AssetLink> assetLinks) {
+    return assetLinks
+      .stream()
+      .map(link -> findDocument(link.getResourceId()))
+      .filter(this::existsDoc)
+      .map(this::convert)
+      .collect(Collectors.toSet());
+  }
+
+  protected boolean existsDoc(T doc) {
+    return Objects.nonNull(doc);
+  }
+
+  public String getSerializedDocument(String resourceId) throws JsonProcessingException, ElementNotFoundException {
+    var document = findDocument(resourceId);
+    if (document != null) {
+      return SerializationUtils.getSpObjectMapper().writeValueAsString(modifyDocumentForExport(document));
+    } else {
+      throw new ElementNotFoundException("Could not find element with resource id " + resourceId);
+    }
+  }
+
+  protected INoSqlStorage getNoSqlStore() {
+    return StorageDispatcher.INSTANCE.getNoSqlStore();
+  }
+
+  public abstract T findDocument(String resourceId);
+
+  public abstract T modifyDocumentForExport(T doc);
+
+  public abstract T readDocument(String serializedDoc) throws JsonProcessingException;
+
+  public abstract ExportItem convert(T document);
+
+  public abstract void writeDocument(String document) throws JsonProcessingException, DocumentConflictException;
+
+  protected abstract T deserializeDocument(String document) throws JsonProcessingException;
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/AdapterResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/AdapterResolver.java
new file mode 100644
index 0000000..d3b7b2c
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/AdapterResolver.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.EventGroundingProcessor;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.connect.adapter.AdapterDescription;
+import org.apache.streampipes.model.connect.adapter.AdapterStreamDescription;
+import org.apache.streampipes.model.export.ExportItem;
+
+public class AdapterResolver extends AbstractResolver<AdapterDescription> {
+
+  @Override
+  public AdapterDescription findDocument(String resourceId) {
+    return getNoSqlStore().getAdapterInstanceStorage().getAdapter(resourceId);
+  }
+
+  @Override
+  public AdapterDescription modifyDocumentForExport(AdapterDescription doc) {
+    doc.setRev(null);
+    doc.setSelectedEndpointUrl(null);
+    if (doc instanceof AdapterStreamDescription) {
+      ((AdapterStreamDescription) doc).setRunning(false);
+    }
+
+    return doc;
+  }
+
+  @Override
+  public AdapterDescription readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, AdapterDescription.class);
+  }
+
+  @Override
+  public ExportItem convert(AdapterDescription document) {
+    return new ExportItem(document.getElementId(), document.getName(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getAdapterInstanceStorage().storeAdapter(deserializeDocument(document));
+  }
+
+  public void writeDocument(String document,
+                            boolean overrideDocument) throws JsonProcessingException {
+    var adapterDescription = deserializeDocument(document);
+    if (overrideDocument) {
+      EventGroundingProcessor.applyOverride(adapterDescription.getEventGrounding().getTransportProtocol());
+    }
+    getNoSqlStore().getAdapterInstanceStorage().storeAdapter(adapterDescription);
+  }
+
+  @Override
+  protected AdapterDescription deserializeDocument(String document) throws JsonProcessingException {
+    return this.spMapper.readValue(document, AdapterDescription.class);
+  }
+
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DashboardResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DashboardResolver.java
new file mode 100644
index 0000000..a756c03
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DashboardResolver.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.dashboard.DashboardItem;
+import org.apache.streampipes.model.dashboard.DashboardModel;
+import org.apache.streampipes.model.export.ExportItem;
+
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+public class DashboardResolver extends AbstractResolver<DashboardModel> {
+
+  @Override
+  public DashboardModel findDocument(String resourceId) {
+    return getNoSqlStore().getDashboardStorage().getDashboard(resourceId);
+  }
+
+  @Override
+  public DashboardModel modifyDocumentForExport(DashboardModel doc) {
+    doc.setCouchDbRev(null);
+    return doc;
+  }
+
+  @Override
+  protected boolean existsDoc(DashboardModel doc) {
+    return Objects.nonNull(doc) && doc.getCouchDbId() != null;
+  }
+
+  @Override
+  public DashboardModel readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, DashboardModel.class);
+  }
+
+  @Override
+  public ExportItem convert(DashboardModel document) {
+    return new ExportItem(document.getCouchDbId(), document.getName(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getDashboardStorage().storeDashboard(deserializeDocument(document));
+  }
+
+  @Override
+  protected DashboardModel deserializeDocument(String document) throws JsonProcessingException {
+    return this.spMapper.readValue(document, DashboardModel.class);
+  }
+
+  public List<String> getWidgets(String resourceId) {
+    var document = findDocument(resourceId);
+    return document.getWidgets().stream().map(DashboardItem::getId).collect(Collectors.toList());
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DashboardWidgetResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DashboardWidgetResolver.java
new file mode 100644
index 0000000..91d4876
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DashboardWidgetResolver.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.dashboard.DashboardWidgetModel;
+import org.apache.streampipes.model.export.ExportItem;
+
+public class DashboardWidgetResolver extends AbstractResolver<DashboardWidgetModel> {
+
+  @Override
+  public DashboardWidgetModel findDocument(String resourceId) {
+    return getNoSqlStore().getDashboardWidgetStorage().getDashboardWidget(resourceId);
+  }
+
+  @Override
+  public DashboardWidgetModel modifyDocumentForExport(DashboardWidgetModel doc) {
+    doc.setRev(null);
+    return doc;
+  }
+
+  @Override
+  public DashboardWidgetModel readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, DashboardWidgetModel.class);
+  }
+
+  @Override
+  public ExportItem convert(DashboardWidgetModel document) {
+    return new ExportItem(document.getId(), document.getVisualizationName(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getDashboardWidgetStorage().storeDashboardWidget(deserializeDocument(document));
+  }
+
+  @Override
+  protected DashboardWidgetModel deserializeDocument(String document) throws JsonProcessingException {
+    return this.spMapper.readValue(document, DashboardWidgetModel.class);
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataSourceResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataSourceResolver.java
new file mode 100644
index 0000000..14effbb
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataSourceResolver.java
@@ -0,0 +1,70 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.EventGroundingProcessor;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.SpDataStream;
+import org.apache.streampipes.model.export.ExportItem;
+
+public class DataSourceResolver extends AbstractResolver<SpDataStream> {
+
+  @Override
+  public SpDataStream findDocument(String resourceId) {
+    return getNoSqlStore().getDataStreamStorage().getElementById(resourceId);
+  }
+
+  @Override
+  public SpDataStream modifyDocumentForExport(SpDataStream doc) {
+    doc.setRev(null);
+    return doc;
+  }
+
+  @Override
+  public SpDataStream readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, SpDataStream.class);
+  }
+
+  @Override
+  public ExportItem convert(SpDataStream document) {
+    return new ExportItem(document.getElementId(), document.getName(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getDataStreamStorage().createElement(deserializeDocument(document));
+  }
+
+  public void writeDocument(String document,
+                            boolean overrideDocument) throws JsonProcessingException {
+    var dataStream = deserializeDocument(document);
+    if (overrideDocument) {
+      if (dataStream.getEventGrounding() != null) {
+        EventGroundingProcessor.applyOverride(dataStream.getEventGrounding().getTransportProtocol());
+      }
+    }
+    getNoSqlStore().getDataStreamStorage().createElement(dataStream);
+  }
+
+  @Override
+  protected SpDataStream deserializeDocument(String document) throws JsonProcessingException {
+    return this.spMapper.readValue(document, SpDataStream.class);
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataViewResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataViewResolver.java
new file mode 100644
index 0000000..4c81c42
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataViewResolver.java
@@ -0,0 +1,74 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.dashboard.DashboardItem;
+import org.apache.streampipes.model.dashboard.DashboardModel;
+import org.apache.streampipes.model.export.ExportItem;
+
+import java.util.List;
+import java.util.Objects;
+import java.util.stream.Collectors;
+
+public class DataViewResolver extends AbstractResolver<DashboardModel> {
+
+  @Override
+  public DashboardModel findDocument(String resourceId) {
+    return getNoSqlStore().getDataExplorerDashboardStorage().getDashboard(resourceId);
+  }
+
+  @Override
+  public DashboardModel modifyDocumentForExport(DashboardModel doc) {
+    doc.setCouchDbRev(null);
+    return doc;
+  }
+
+  @Override
+  protected boolean existsDoc(DashboardModel doc) {
+    return Objects.nonNull(doc) && doc.getCouchDbId() != null;
+  }
+
+  @Override
+  public DashboardModel readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, DashboardModel.class);
+  }
+
+  @Override
+  public ExportItem convert(DashboardModel document) {
+    return new ExportItem(document.getCouchDbId(), document.getName(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getDataExplorerDashboardStorage().storeDashboard(deserializeDocument(document));
+  }
+
+  @Override
+  protected DashboardModel deserializeDocument(String document) throws JsonProcessingException {
+    return this.spMapper.readValue(document, DashboardModel.class);
+  }
+
+  public List<String> getWidgets(String resourceId) {
+    var document = findDocument(resourceId);
+    return document.getWidgets().stream().map(DashboardItem::getId).collect(Collectors.toList());
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataViewWidgetResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataViewWidgetResolver.java
new file mode 100644
index 0000000..0414b37
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/DataViewWidgetResolver.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.datalake.DataExplorerWidgetModel;
+import org.apache.streampipes.model.export.ExportItem;
+
+public class DataViewWidgetResolver extends AbstractResolver<DataExplorerWidgetModel> {
+
+  @Override
+  public DataExplorerWidgetModel findDocument(String resourceId) {
+    return getNoSqlStore().getDataExplorerWidgetStorage().getDataExplorerWidget(resourceId);
+  }
+
+  @Override
+  public DataExplorerWidgetModel modifyDocumentForExport(DataExplorerWidgetModel doc) {
+    doc.setRev(null);
+    return doc;
+  }
+
+  @Override
+  public DataExplorerWidgetModel readDocument(String serializedDoc) throws JsonProcessingException {
+   return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, DataExplorerWidgetModel.class);
+  }
+
+  @Override
+  public ExportItem convert(DataExplorerWidgetModel document) {
+    return new ExportItem(document.getId(), document.getWidgetId(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getDataExplorerWidgetStorage().storeDataExplorerWidget(deserializeDocument(document));
+  }
+
+  @Override
+  protected DataExplorerWidgetModel deserializeDocument(String document) throws JsonProcessingException {
+    return this.defaultMapper.readValue(document, DataExplorerWidgetModel.class);
+  }
+
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/FileResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/FileResolver.java
new file mode 100644
index 0000000..e0e2ae8
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/FileResolver.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.export.ExportItem;
+import org.apache.streampipes.model.file.FileMetadata;
+
+public class FileResolver extends AbstractResolver<FileMetadata> {
+
+  @Override
+  public FileMetadata findDocument(String resourceId) {
+    return getNoSqlStore().getFileMetadataStorage().getMetadataById(resourceId);
+  }
+
+  @Override
+  public FileMetadata modifyDocumentForExport(FileMetadata doc) {
+    doc.setRev(null);
+    return doc;
+  }
+
+  @Override
+  public FileMetadata readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, FileMetadata.class);
+  }
+
+  @Override
+  public ExportItem convert(FileMetadata document) {
+    return new ExportItem(document.getFileId(), document.getOriginalFilename(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getFileMetadataStorage().addFileMetadata(deserializeDocument(document));
+  }
+
+  @Override
+  protected FileMetadata deserializeDocument(String document) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(document, FileMetadata.class);
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/MeasurementResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/MeasurementResolver.java
new file mode 100644
index 0000000..c5a86df
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/MeasurementResolver.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.datalake.DataLakeMeasure;
+import org.apache.streampipes.model.export.ExportItem;
+
+public class MeasurementResolver extends AbstractResolver<DataLakeMeasure> {
+
+  @Override
+  public DataLakeMeasure findDocument(String resourceId) {
+    return getNoSqlStore().getDataLakeStorage().findOne(resourceId);
+  }
+
+  @Override
+  public DataLakeMeasure modifyDocumentForExport(DataLakeMeasure doc) {
+    doc.setRev(null);
+    return doc;
+  }
+
+  @Override
+  public DataLakeMeasure readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, DataLakeMeasure.class);
+  }
+
+  @Override
+  public ExportItem convert(DataLakeMeasure document) {
+    return new ExportItem(document.getElementId(), document.getMeasureName(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getDataLakeStorage().storeDataLakeMeasure(deserializeDocument(document));
+  }
+
+  @Override
+  protected DataLakeMeasure deserializeDocument(String document) throws JsonProcessingException {
+    return this.spMapper.readValue(document, DataLakeMeasure.class);
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/PipelineResolver.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/PipelineResolver.java
new file mode 100644
index 0000000..2117f54
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/resolver/PipelineResolver.java
@@ -0,0 +1,91 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.resolver;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import org.apache.streampipes.export.utils.EventGroundingProcessor;
+import org.apache.streampipes.export.utils.SerializationUtils;
+import org.apache.streampipes.model.SpDataSet;
+import org.apache.streampipes.model.export.ExportItem;
+import org.apache.streampipes.model.pipeline.Pipeline;
+
+import java.util.stream.Collectors;
+
+public class PipelineResolver extends AbstractResolver<Pipeline> {
+
+  @Override
+  public Pipeline findDocument(String resourceId) {
+    return getNoSqlStore().getPipelineStorageAPI().getPipeline(resourceId);
+  }
+
+  @Override
+  public Pipeline modifyDocumentForExport(Pipeline doc) {
+    doc.setRev(null);
+    doc.setRestartOnSystemReboot(false);
+    doc.setRunning(false);
+    doc.setSepas(doc.getSepas().stream().peek(s -> s.setSelectedEndpointUrl(null)).collect(Collectors.toList()));
+    doc.setActions(doc.getActions().stream().peek(s -> s.setSelectedEndpointUrl(null)).collect(Collectors.toList()));
+    doc.setStreams(doc.getStreams()
+      .stream()
+      .filter(s -> s instanceof SpDataSet).peek(s -> ((SpDataSet) s).setSelectedEndpointUrl(null))
+      .collect(Collectors.toList()));
+    return doc;
+  }
+
+  @Override
+  public Pipeline readDocument(String serializedDoc) throws JsonProcessingException {
+    return SerializationUtils.getSpObjectMapper().readValue(serializedDoc, Pipeline.class);
+  }
+
+  @Override
+  public ExportItem convert(Pipeline document) {
+    return new ExportItem(document.getPipelineId(), document.getName(), true);
+  }
+
+  @Override
+  public void writeDocument(String document) throws JsonProcessingException {
+    getNoSqlStore().getPipelineStorageAPI().storePipeline(deserializeDocument(document));
+  }
+
+  public void writeDocument(String document,
+                            boolean overrideDocument) throws JsonProcessingException {
+    var pipeline = deserializeDocument(document);
+    if (overrideDocument) {
+      pipeline.setSepas(pipeline.getSepas().stream().peek(processor -> {
+        processor.getInputStreams().forEach(is -> EventGroundingProcessor.applyOverride(is.getEventGrounding().getTransportProtocol()));
+        EventGroundingProcessor.applyOverride(processor.getOutputStream().getEventGrounding().getTransportProtocol());
+      }).collect(Collectors.toList()));
+
+      pipeline.setStreams(pipeline.getStreams().stream().peek(stream -> {
+        EventGroundingProcessor.applyOverride(stream.getEventGrounding().getTransportProtocol());
+      }).collect(Collectors.toList()));
+
+      pipeline.setActions(pipeline.getActions().stream().peek(sink -> {
+        sink.getInputStreams().forEach(is -> EventGroundingProcessor.applyOverride(is.getEventGrounding().getTransportProtocol()));
+      }).collect(Collectors.toList()));
+
+    }
+    getNoSqlStore().getPipelineStorageAPI().storePipeline(pipeline);
+  }
+
+  @Override
+  protected Pipeline deserializeDocument(String document) throws JsonProcessingException {
+    return this.spMapper.readValue(document, Pipeline.class);
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/utils/EventGroundingProcessor.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/utils/EventGroundingProcessor.java
new file mode 100644
index 0000000..15578de
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/utils/EventGroundingProcessor.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.utils;
+
+import org.apache.streampipes.config.backend.BackendConfig;
+import org.apache.streampipes.model.grounding.KafkaTransportProtocol;
+import org.apache.streampipes.model.grounding.MqttTransportProtocol;
+import org.apache.streampipes.model.grounding.TransportProtocol;
+
+public class EventGroundingProcessor {
+
+  public static void applyOverride(TransportProtocol protocol) {
+    if (protocol instanceof KafkaTransportProtocol) {
+      protocol.setBrokerHostname(BackendConfig.INSTANCE.getKafkaHost());
+      ((KafkaTransportProtocol) protocol).setKafkaPort(BackendConfig.INSTANCE.getKafkaPort());
+    } else if (protocol instanceof MqttTransportProtocol) {
+      protocol.setBrokerHostname(BackendConfig.INSTANCE.getMqttHost());
+      ((MqttTransportProtocol) protocol).setPort(BackendConfig.INSTANCE.getMqttPort());
+    }
+  }
+}
diff --git a/streampipes-data-export/src/main/java/org/apache/streampipes/export/utils/SerializationUtils.java b/streampipes-data-export/src/main/java/org/apache/streampipes/export/utils/SerializationUtils.java
new file mode 100644
index 0000000..44eae52
--- /dev/null
+++ b/streampipes-data-export/src/main/java/org/apache/streampipes/export/utils/SerializationUtils.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+package org.apache.streampipes.export.utils;
+
+import com.fasterxml.jackson.databind.DeserializationFeature;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.streampipes.serializers.json.JacksonSerializer;
+
+public class SerializationUtils {
+
+  public static ObjectMapper getSpObjectMapper() {
+    return JacksonSerializer.getObjectMapper();
+  }
+
+  public static ObjectMapper getDefaultObjectMapper() {
+    var mapper = new ObjectMapper();
+    mapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
+
+    return mapper;
+  }
+}
diff --git a/streampipes-dataformat-cbor/pom.xml b/streampipes-dataformat-cbor/pom.xml
index 604a3af..3c2658b 100644
--- a/streampipes-dataformat-cbor/pom.xml
+++ b/streampipes-dataformat-cbor/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,12 +32,12 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-vocabulary</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-dataformat-fst/pom.xml b/streampipes-dataformat-fst/pom.xml
index e6f74b0..519c4bf 100644
--- a/streampipes-dataformat-fst/pom.xml
+++ b/streampipes-dataformat-fst/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,12 +32,12 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-vocabulary</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-dataformat-json/pom.xml b/streampipes-dataformat-json/pom.xml
index 88357bd..3150e08 100644
--- a/streampipes-dataformat-json/pom.xml
+++ b/streampipes-dataformat-json/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,12 +32,12 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-vocabulary</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-dataformat-smile/pom.xml b/streampipes-dataformat-smile/pom.xml
index ac774c8..0fe2690 100644
--- a/streampipes-dataformat-smile/pom.xml
+++ b/streampipes-dataformat-smile/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,12 +32,12 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-dataformat</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-vocabulary</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
diff --git a/streampipes-dataformat/pom.xml b/streampipes-dataformat/pom.xml
index b573e54..80d1057 100644
--- a/streampipes-dataformat/pom.xml
+++ b/streampipes-dataformat/pom.xml
@@ -21,7 +21,7 @@
     <parent>
         <artifactId>streampipes-parent</artifactId>
         <groupId>org.apache.streampipes</groupId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <modelVersion>4.0.0</modelVersion>
 
@@ -32,7 +32,7 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-model</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
     </dependencies>
 
diff --git a/streampipes-extensions/pom.xml b/streampipes-extensions/pom.xml
index cfa8acf..33709e5 100644
--- a/streampipes-extensions/pom.xml
+++ b/streampipes-extensions/pom.xml
@@ -17,49 +17,54 @@
   ~
   -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
     <modelVersion>4.0.0</modelVersion>
 
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-parent</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
 
     <artifactId>streampipes-extensions</artifactId>
     <packaging>pom</packaging>
 
     <modules>
-        <module>streampipes-sinks-databases-jvm</module>
-        <module>streampipes-sinks-internal-jvm</module>
-        <module>streampipes-sinks-brokers-jvm</module>
-        <module>streampipes-processors-filters-jvm</module>
-        <module>streampipes-sinks-databases-flink</module>
+        <module>streampipes-connect-adapters</module>
+        <module>streampipes-connect-adapters-iiot</module>
+
+        <module>streampipes-extensions-all-iiot</module>
+        <module>streampipes-extensions-all-jvm</module>
+
+        <module>streampipes-pipeline-elements-all-jvm</module>
+        <module>streampipes-pipeline-elements-all-flink</module>
+        <module>streampipes-pipeline-elements-data-simulator</module>
+        <module>streampipes-pipeline-elements-shared</module>
+
         <module>streampipes-processors-aggregation-flink</module>
-        <module>streampipes-processors-pattern-detection-flink</module>
+        <module>streampipes-processors-change-detection-jvm</module>
         <module>streampipes-processors-enricher-flink</module>
         <module>streampipes-processors-enricher-jvm</module>
-        <module>streampipes-sources-watertank-simulator</module>
-        <module>streampipes-sources-vehicle-simulator</module>
-        <module>streampipes-processors-transformation-flink</module>
-        <module>streampipes-processors-geo-jvm</module>
-        <module>streampipes-processors-statistics-flink</module>
+        <module>streampipes-processors-filters-jvm</module>
         <module>streampipes-processors-filters-siddhi</module>
+        <module>streampipes-processors-geo-flink</module>
+        <module>streampipes-processors-geo-jvm</module>
+        <module>streampipes-processors-image-processing-jvm</module>
+        <module>streampipes-processors-pattern-detection-flink</module>
+        <module>streampipes-processors-statistics-flink</module>
+        <module>streampipes-processors-transformation-flink</module>
         <module>streampipes-processors-text-mining-flink</module>
         <module>streampipes-processors-text-mining-jvm</module>
-        <module>streampipes-sinks-notifications-jvm</module>
-        <module>streampipes-pipeline-elements-shared</module>
-        <module>streampipes-processors-geo-flink</module>
-        <module>streampipes-processors-image-processing-jvm</module>
         <module>streampipes-processors-transformation-jvm</module>
-        <module>streampipes-pipeline-elements-all-jvm</module>
-        <module>streampipes-pipeline-elements-data-simulator</module>
-        <module>streampipes-connect-adapters</module>
-        <module>streampipes-pipeline-elements-all-flink</module>
-        <module>streampipes-processors-change-detection-jvm</module>
-        <module>streampipes-extensions-all-jvm</module>
-        <module>streampipes-connect-adapters-iiot</module>
+
+        <module>streampipes-sinks-brokers-jvm</module>
+        <module>streampipes-sinks-databases-jvm</module>
+        <module>streampipes-sinks-internal-jvm</module>
+        <module>streampipes-sinks-databases-flink</module>
+        <module>streampipes-sinks-notifications-jvm</module>
+
+        <module>streampipes-sources-watertank-simulator</module>
+        <module>streampipes-sources-vehicle-simulator</module>
     </modules>
 
     <properties>
@@ -84,7 +89,7 @@
         <google-maps-services.version>0.10.0</google-maps-services.version>
         <graalvm.js.version>21.3.0</graalvm.js.version>
         <iotdb.version>0.12.0</iotdb.version>
-        <java-websocket.version>1.4.0</java-websocket.version>
+        <java-websocket.version>1.5.0</java-websocket.version>
         <javax-websocket-client-api.version>1.1</javax-websocket-client-api.version>
         <jsrosbridge.version>0.2.0</jsrosbridge.version>
         <jedis.version>3.3.0</jedis.version>
@@ -98,8 +103,8 @@
         <netty-resolver.version>4.1.72.Final</netty-resolver.version>
         <okhttp.version>3.13.1</okhttp.version>
         <opennlp.version>1.9.0</opennlp.version>
-        <postgresql.version>42.3.3</postgresql.version>
-        <pulsar.version>2.9.1</pulsar.version>
+        <postgresql.version>42.4.1</postgresql.version>
+        <pulsar.version>2.10.1</pulsar.version>
         <quartz.version>2.3.2</quartz.version>
         <scala-lang.version>2.11.12</scala-lang.version>
         <scala-parser-combinators.version>1.1.1</scala-parser-combinators.version>
@@ -125,12 +130,12 @@
         <influxdb.java.version>2.14</influxdb.java.version>
         <eclipse.milo.version>0.6.3</eclipse.milo.version>
         <mysql-binlog-connector.version>0.18.1</mysql-binlog-connector.version>
-        <mysql-connector-java.version>8.0.16</mysql-connector-java.version>
+        <mysql-connector-java.version>8.0.28</mysql-connector-java.version>
         <netty.version>4.1.72.Final</netty.version>
         <nimbus-jose-jwt.version>7.9</nimbus-jose-jwt.version>
         <opencsv.version>5.5.2</opencsv.version>
-        <plc4x.version>0.8.0</plc4x.version>
-        <protobuf.version>3.16.1</protobuf.version>
+        <plc4x.version>0.9.1</plc4x.version>
+        <protobuf.version>3.16.3</protobuf.version>
         <nats.version>2.11.0</nats.version>
 
     </properties>
@@ -616,7 +621,7 @@
             <plugin>
                 <groupId>org.apache.streampipes</groupId>
                 <artifactId>streampipes-maven-plugin</artifactId>
-                <version>0.70.0-SNAPSHOT</version>
+                <version>0.71.0-SNAPSHOT</version>
             </plugin>
         </plugins>
     </build>
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/.idea/workspace.xml b/streampipes-extensions/streampipes-connect-adapters-iiot/.idea/workspace.xml
deleted file mode 100644
index 5cfa370..0000000
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/.idea/workspace.xml
+++ /dev/null
@@ -1,65 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project version="4">
-  <component name="AutoImportSettings">
-    <option name="autoReloadType" value="SELECTIVE" />
-  </component>
-  <component name="ChangeListManager">
-    <list default="true" id="d3e2227a-dba3-47e7-b123-6c9059538b23" name="Changes" comment="">
-      <change afterPath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/PullAdapter.java" afterDir="false" />
-      <change afterPath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/PullRestAdapter.java" afterDir="false" />
-      <change beforePath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/ConnectAdapterIiotInit.java" beforeDir="false" afterPath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/ConnectAdapterIiotInit.java" afterDir="false" />
-      <change beforePath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioRestAdapter.java" beforeDir="false" afterPath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioRestAdapter.java" afterDir="false" />
-      <change beforePath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaAdapter.java" beforeDir="false" afterPath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaAdapter.java" afterDir="false" />
-      <change beforePath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/modbus/Plc4xModbusAdapter.java" beforeDir="false" afterPath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/modbus/Plc4xModbusAdapter.java" afterDir="false" />
-      <change beforePath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/Plc4xS7Adapter.java" beforeDir="false" afterPath="$PROJECT_DIR$/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/Plc4xS7Adapter.java" afterDir="false" />
-    </list>
-    <option name="SHOW_DIALOG" value="false" />
-    <option name="HIGHLIGHT_CONFLICTS" value="true" />
-    <option name="HIGHLIGHT_NON_ACTIVE_CHANGELIST" value="false" />
-    <option name="LAST_RESOLUTION" value="IGNORE" />
-  </component>
-  <component name="Git.Settings">
-    <option name="RECENT_GIT_ROOT_PATH" value="$PROJECT_DIR$/.." />
-  </component>
-  <component name="ProjectId" id="1zENWGAsJxaiZwkmabyYBU9b1cM" />
-  <component name="ProjectViewState">
-    <option name="hideEmptyMiddlePackages" value="true" />
-    <option name="showLibraryContents" value="true" />
-  </component>
-  <component name="PropertiesComponent">
-    <property name="RunOnceActivity.OpenProjectViewOnStart" value="true" />
-    <property name="RunOnceActivity.ShowReadmeOnStart" value="true" />
-    <property name="last_opened_file_path" value="$PROJECT_DIR$/.." />
-  </component>
-  <component name="RunManager">
-    <configuration name="ConnectAdapterIiotInit" type="Application" factoryName="Application" temporary="true" nameIsGenerated="true">
-      <option name="MAIN_CLASS_NAME" value="org.apache.streampipes.connect.iiot.ConnectAdapterIiotInit" />
-      <module name="streampipes-connect-adapters-iiot" />
-      <extension name="coverage">
-        <pattern>
-          <option name="PATTERN" value="org.apache.streampipes.connect.iiot.*" />
-          <option name="ENABLED" value="true" />
-        </pattern>
-      </extension>
-      <method v="2">
-        <option name="Make" enabled="true" />
-      </method>
-    </configuration>
-    <recent_temporary>
-      <list>
-        <item itemvalue="Application.ConnectAdapterIiotInit" />
-      </list>
-    </recent_temporary>
-  </component>
-  <component name="SpellCheckerSettings" RuntimeDictionaries="0" Folders="0" CustomDictionaries="0" DefaultDictionary="application-level" UseSingleDictionary="true" transferred="true" />
-  <component name="TaskManager">
-    <task active="true" id="Default" summary="Default task">
-      <changelist id="d3e2227a-dba3-47e7-b123-6c9059538b23" name="Changes" comment="" />
-      <created>1633709316229</created>
-      <option name="number" value="Default" />
-      <option name="presentableId" value="Default" />
-      <updated>1633709316229</updated>
-    </task>
-    <servers />
-  </component>
-</project>
\ No newline at end of file
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/pom.xml b/streampipes-extensions/streampipes-connect-adapters-iiot/pom.xml
index 5ed13be..71a318a 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/pom.xml
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/pom.xml
@@ -17,16 +17,14 @@
   ~
   -->
 
-<project xmlns="http://maven.apache.org/POM/4.0.0"
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
 
     <modelVersion>4.0.0</modelVersion>
 
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-extensions</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
     <artifactId>streampipes-connect-adapters-iiot</artifactId>
 
@@ -39,7 +37,7 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-connect-container-worker</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
             <exclusions>
                 <exclusion>
                     <groupId>org.graalvm.nativeimage</groupId>
@@ -50,17 +48,17 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-pipeline-elements-shared</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-client</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-container-extensions</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
@@ -69,10 +67,6 @@
             <artifactId>jackson-module-jaxb-annotations</artifactId>
         </dependency>
         <dependency>
-            <groupId>com.github.shyiko</groupId>
-            <artifactId>mysql-binlog-connector-java</artifactId>
-        </dependency>
-        <dependency>
             <groupId>org.influxdb</groupId>
             <artifactId>influxdb-java</artifactId>
         </dependency>
@@ -268,5 +262,4 @@
         </plugins>
         <finalName>streampipes-connect-adapters-iiot</finalName>
     </build>
-
 </project>
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/ConnectAdapterIiotInit.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/ConnectAdapterIiotInit.java
index 7298fab..97f0df0 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/ConnectAdapterIiotInit.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/ConnectAdapterIiotInit.java
@@ -20,20 +20,16 @@
 
 import org.apache.streampipes.connect.iiot.adapters.influxdb.InfluxDbSetAdapter;
 import org.apache.streampipes.connect.iiot.adapters.influxdb.InfluxDbStreamAdapter;
-import org.apache.streampipes.connect.iiot.adapters.mysql.MySqlSetAdapter;
-import org.apache.streampipes.connect.iiot.adapters.mysql.MySqlStreamAdapter;
-import org.apache.streampipes.connect.iiot.adapters.netio.NetioMQTTAdapter;
-import org.apache.streampipes.connect.iiot.adapters.netio.NetioRestAdapter;
 import org.apache.streampipes.connect.iiot.adapters.opcua.OpcUaAdapter;
 import org.apache.streampipes.connect.iiot.adapters.plc4x.modbus.Plc4xModbusAdapter;
 import org.apache.streampipes.connect.iiot.adapters.plc4x.s7.Plc4xS7Adapter;
 import org.apache.streampipes.connect.iiot.adapters.ros.RosBridgeAdapter;
 import org.apache.streampipes.connect.iiot.adapters.simulator.machine.MachineDataStreamAdapter;
-import org.apache.streampipes.container.extensions.ExtensionsModelSubmitter;
 import org.apache.streampipes.connect.iiot.protocol.set.FileProtocol;
 import org.apache.streampipes.connect.iiot.protocol.set.HttpProtocol;
 import org.apache.streampipes.connect.iiot.protocol.stream.*;
 import org.apache.streampipes.connect.iiot.protocol.stream.pulsar.PulsarProtocol;
+import org.apache.streampipes.container.extensions.ExtensionsModelSubmitter;
 import org.apache.streampipes.container.model.SpServiceDefinition;
 import org.apache.streampipes.container.model.SpServiceDefinitionBuilder;
 
@@ -48,15 +44,11 @@
 						"StreamPipes connect worker containing adapters relevant for the IIoT",
 						"",
 						8001)
-				.registerAdapter(new MySqlStreamAdapter())
-				.registerAdapter(new MySqlSetAdapter())
 				.registerAdapter(new MachineDataStreamAdapter())
 				.registerAdapter(new RosBridgeAdapter())
 				.registerAdapter(new OpcUaAdapter())
 				.registerAdapter(new InfluxDbStreamAdapter())
 				.registerAdapter(new InfluxDbSetAdapter())
-				.registerAdapter(new NetioRestAdapter())
-				.registerAdapter(new NetioMQTTAdapter())
 				.registerAdapter(new Plc4xS7Adapter())
 				.registerAdapter(new Plc4xModbusAdapter())
 				.registerAdapter(new FileProtocol())
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/PullAdapter.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/PullAdapter.java
index 2e92cfb..02f1b58 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/PullAdapter.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/PullAdapter.java
@@ -51,9 +51,7 @@
     public void startAdapter() throws AdapterException {
         before();
 
-        final Runnable errorThread = () -> {
-            executeAdpaterLogic();
-        };
+        final Runnable errorThread = this::executeAdpaterLogic;
 
         scheduler = Executors.newScheduledThreadPool(1);
         scheduler.schedule(errorThread, 0, TimeUnit.MILLISECONDS);
@@ -61,11 +59,7 @@
     }
 
     private void executeAdpaterLogic() {
-        final Runnable task = () -> {
-
-            pullData();
-
-        };
+        final Runnable task = this::pullData;
 
         scheduler = Executors.newScheduledThreadPool(1);
         ScheduledFuture<?> handle = scheduler.scheduleAtFixedRate(task, 1,
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/Column.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/Column.java
deleted file mode 100644
index 08ee0c5..0000000
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/Column.java
+++ /dev/null
@@ -1,90 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.streampipes.connect.iiot.adapters.mysql;
-
-import org.apache.streampipes.sdk.utils.Datatypes;
-import org.apache.streampipes.vocabulary.SO;
-
-class Column {
-  private String name;
-  private Datatypes type;
-  private Object def;
-  private String domainProperty;
-
-  Column(String name, String dataType, String columnType) {
-    this.name = name;
-    switch (dataType) {
-      case "tinyint":
-      case "smallint":
-      case "mediumint":
-      case "int":
-      case "bit":
-        this.type = Datatypes.Integer;
-        def = 0;
-        break;
-      case "bigint":
-        this.type = Datatypes.Long;
-        def = 0L;
-        break;
-      case "float":
-      case "decimal":   // Watch out for loss of precision
-      case "double":
-        this.type = Datatypes.Float;
-        def = 0.0f;
-        break;
-      case "text":
-      case "varchar":
-      case "char":
-        this.type = Datatypes.String;
-        def = "";
-        break;
-      case "date":
-      case "datetime":
-      case "time":
-      case "timestamp":
-      case "year":
-        this.type = Datatypes.Float;
-        def = System.currentTimeMillis();
-        this.domainProperty = SO.DateTime;
-        break;
-      default:
-        throw new IllegalArgumentException("Type " + type + " not supported.");
-    }
-    if (columnType.equals("tinyint(1)") || columnType.equals("bit(1)")) {
-      this.type = Datatypes.Boolean;
-      def = Boolean.FALSE;
-    }
-    System.out.println("Found column: " + name + ", type: " + this.type + " (sql-type: "
-        + dataType + ", column-tpye: " + columnType + ")");
-  }
-
-  public String getName() {
-    return name;
-  }
-  public Datatypes getType() {
-    return type;
-  }
-  public Object getDefault() {
-    return def;
-  }
-
-  public String getDomainProperty() {
-    return domainProperty;
-  }
-}
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlAdapter.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlAdapter.java
deleted file mode 100644
index 59fecb9..0000000
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlAdapter.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.streampipes.connect.iiot.adapters.mysql;
-
-import com.github.shyiko.mysql.binlog.BinaryLogClient;
-import com.github.shyiko.mysql.binlog.event.*;
-import com.github.shyiko.mysql.binlog.event.deserialization.EventDeserializer;
-import org.apache.streampipes.connect.adapter.Adapter;
-import org.apache.streampipes.connect.api.exception.AdapterException;
-import org.apache.streampipes.connect.api.exception.ParseException;
-import org.apache.streampipes.connect.adapter.model.specific.SpecificDataStreamAdapter;
-import org.apache.streampipes.connect.adapter.sdk.ParameterExtractor;
-import org.apache.streampipes.model.connect.adapter.SpecificAdapterStreamDescription;
-import org.apache.streampipes.model.connect.guess.GuessSchema;
-import org.apache.streampipes.model.schema.EventProperty;
-import org.apache.streampipes.model.schema.EventSchema;
-import org.apache.streampipes.sdk.builder.adapter.SpecificDataStreamAdapterBuilder;
-import org.apache.streampipes.sdk.builder.PrimitivePropertyBuilder;
-import org.apache.streampipes.sdk.helpers.Labels;
-import org.apache.streampipes.vocabulary.SO;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.sql.*;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-
-public class MySqlAdapter extends SpecificDataStreamAdapter {
-
-  public static final String ID = "http://streampipes.org/adapter/specific/mysql";
-
-  private static String MYSQL_HOST = "MYSQL_HOST";
-  private static String MYSQL_USER = "MYSQL_USER";
-  private static String MYSQL_PASS = "MYSQL_PASS";
-  private static String MYSQL_DB = "MYSQL_DB";
-  private static String MYSQL_TABLE = "MYSQL_TABLE";
-  private static String MYSQL_PORT = "MYSQL_PORT";
-
-  private String host;
-  private String user;
-  private String pass;
-  private String database;
-  private String table;
-  private String port;
-
-  private boolean dataComing = false;
-  private List<Column> tableSchema;
-  private BinaryLogClient client;
-
-  public MySqlAdapter() {
-  }
-
-  public MySqlAdapter(SpecificAdapterStreamDescription adapterDescription) {
-    super(adapterDescription);
-
-    getConfigurations(adapterDescription);
-  }
-
-  @Override
-  public SpecificAdapterStreamDescription declareModel() {
-    //TODO: Add Icon
-    SpecificAdapterStreamDescription description = SpecificDataStreamAdapterBuilder.create(ID,
-            "MySql Adapter",
-            "Creates a data stream for a SQL table")
-            .iconUrl("sql.png")
-            .requiredTextParameter(Labels.from(MYSQL_HOST, "Hostname", "Hostname of the MySql Server"))
-            .requiredTextParameter(Labels.from(MYSQL_USER, "Username", "Username of the user"))
-            .requiredTextParameter(Labels.from(MYSQL_PASS, "Password", "Password of the user"))
-            .requiredTextParameter(Labels.from(MYSQL_DB, "Database", "Database in which the table is located"))
-            .requiredTextParameter(Labels.from(MYSQL_TABLE, "Table", "Table which should be watched"))
-            .requiredIntegerParameter(Labels.from(MYSQL_PORT, "Port", "Port of the MySql Server. Default: 3306"), 3306)
-            .build();
-
-    description.setAppId(ID);
-    return  description;
-  }
-
-  @Override
-  public void startAdapter() throws AdapterException {
-    checkJdbcDriver();
-    extractTableInformation();
-
-    // Connect BinaryLogClient
-    client = new BinaryLogClient(host, Integer.parseInt(port), user, pass);
-    EventDeserializer eventDeserializer = new EventDeserializer();
-    eventDeserializer.setCompatibilityMode(
-            EventDeserializer.CompatibilityMode.DATE_AND_TIME_AS_LONG,
-            EventDeserializer.CompatibilityMode.CHAR_AND_BINARY_AS_BYTE_ARRAY
-    );
-    client.setEventDeserializer(eventDeserializer);
-    client.registerEventListener(event -> sendEvent(event));
-    try {
-      client.connect();
-    } catch (IOException e) {
-      throw new AdapterException(e.getMessage());
-    }
-  }
-
-  private void sendEvent(Event event) {
-    // An event can contain multiple insertions/updates
-    if (event.getHeader().getEventType() == EventType.TABLE_MAP) {
-      // Check table and database, if the next event should be streamed
-      if (((TableMapEventData) event.getData()).getDatabase().equals(database)
-              && ((TableMapEventData) event.getData()).getTable().equals((table))) {
-        dataComing = true;
-      }
-    }
-    if (dataComing) {
-      if (EventType.isUpdate(event.getHeader().getEventType())) {
-        for (Entry<Serializable[], Serializable[]> en : ((UpdateRowsEventData) event.getData()).getRows()) {
-          sendChange(en.getValue());
-        }
-        dataComing = false;
-      } else if (EventType.isWrite(event.getHeader().getEventType())) {
-        for (Serializable[] s : ((WriteRowsEventData) event.getData()).getRows()) {
-          sendChange(s);
-        }
-        dataComing = false;
-      }
-    }
-  }
-
-  private void sendChange(Serializable[] rows) {
-    Map<String, Object> out = new HashMap<>();
-    for (int i = 0; i < rows.length; i++) {
-      if (rows[i] != null) {
-        if (rows[i] instanceof byte[]) {
-          // Strings are sent in byte arrays and have to be converted. TODO: Check that encoding is correct
-          out.put(tableSchema.get(i).getName(), new String((byte[])rows[i]));
-        } else {
-          out.put(tableSchema.get(i).getName(), rows[i]);
-        }
-      } else {
-        out.put(tableSchema.get(i).getName(), tableSchema.get(i).getDefault());
-      }
-    }
-    adapterPipeline.process(out);
-  }
-
-  @Override
-  public void stopAdapter() throws AdapterException {
-    try {
-      client.disconnect();
-    } catch (IOException e) {
-      throw new AdapterException("Thrown exception: " + e.getMessage());
-    }
-  }
-
-  @Override
-  public Adapter getInstance(SpecificAdapterStreamDescription adapterDescription) {
-    return new MySqlAdapter(adapterDescription);
-  }
-
-  @Override
-  public GuessSchema getSchema(SpecificAdapterStreamDescription adapterDescription)
-          throws AdapterException, ParseException {
-    // Load JDBC Driver, connect JDBC Driver, Extract information, disconnect JDBC Driver
-    EventSchema eventSchema = new EventSchema();
-    GuessSchema guessSchema = new GuessSchema();
-    List<EventProperty> allProperties = new ArrayList<>();
-
-    getConfigurations(adapterDescription);
-
-    checkJdbcDriver();
-    extractTableInformation();
-
-    for (Column column : tableSchema) {
-      if (SO.DateTime.equals(column.getDomainProperty())) {
-        allProperties.add(PrimitivePropertyBuilder
-                .create(column.getType(), column.getName())
-                .label(column.getName())
-                .domainProperty(SO.DateTime)
-                .build());
-      } else {
-        allProperties.add(PrimitivePropertyBuilder
-                .create(column.getType(), column.getName())
-                .label(column.getName())
-                .build());
-      }
-
-    }
-
-    eventSchema.setEventProperties(allProperties);
-    guessSchema.setEventSchema(eventSchema);
-
-    return guessSchema;
-  }
-
-  @Override
-  public String getId() {
-    return ID;
-  }
-
-  private void getConfigurations(SpecificAdapterStreamDescription adapterDescription) {
-    ParameterExtractor extractor = new ParameterExtractor(adapterDescription.getConfig());
-
-    this.host = extractor.singleValue(MYSQL_HOST, String.class);
-    this.user = extractor.singleValue(MYSQL_USER, String.class);
-    this.pass = extractor.singleValue(MYSQL_PASS, String.class);
-    this.database = extractor.singleValue(MYSQL_DB, String.class);
-    this.table = extractor.singleValue(MYSQL_TABLE, String.class);
-    this.port = extractor.singleValue(MYSQL_PORT, String.class);
-  }
-
-  private void checkJdbcDriver() throws AdapterException {
-    try {
-      Class.forName("com.mysql.cj.jdbc.Driver");
-    } catch (ClassNotFoundException e) {
-      throw new AdapterException("MySql Driver not found.");
-    }
-  }
-
-  private void extractTableInformation() throws AdapterException {
-    String server = "jdbc:mysql://" + host + ":" + port + "/" + "?sslMode=DISABLED&allowPublicKeyRetrieval=true";
-    ResultSet resultSet = null;
-    tableSchema = new ArrayList<>();
-
-    String query = "SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE FROM "
-            + "INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = ? AND TABLE_SCHEMA = ? ORDER BY "
-            + "ORDINAL_POSITION ASC;";
-
-    try (Connection con = DriverManager.getConnection(server, user, pass);
-         PreparedStatement statement = con.prepareStatement(query)) {
-
-      statement.setString(1, table);
-      statement.setString(2, database);
-      resultSet = statement.executeQuery();
-
-      if (resultSet.next()) {
-        do {
-          String name = resultSet.getString("COLUMN_NAME");
-          String dataType = resultSet.getString("DATA_TYPE");
-          String columnType = resultSet.getString("COLUMN_TYPE");
-          tableSchema.add(new Column(name, dataType, columnType));
-        } while(resultSet.next());
-      } else {
-        // No columns found -> Table/Database does not exist
-        throw new IllegalArgumentException("Database/table not found");
-      }
-    } catch (SQLException e) {
-      throw new AdapterException("SqlException: " + e.getMessage()
-              + ", Error code: " + e.getErrorCode()
-              + ", SqlState: " + e.getSQLState());
-    } finally {
-      try {
-        resultSet.close();
-      } catch (Exception e) {}
-    }
-  }
-}
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlClient.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlClient.java
deleted file mode 100644
index 251f16a..0000000
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlClient.java
+++ /dev/null
@@ -1,215 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.streampipes.connect.iiot.adapters.mysql;
-
-import org.apache.streampipes.connect.api.exception.AdapterException;
-import org.apache.streampipes.model.connect.guess.GuessSchema;
-import org.apache.streampipes.model.schema.EventProperty;
-import org.apache.streampipes.model.schema.EventSchema;
-import org.apache.streampipes.sdk.builder.PrimitivePropertyBuilder;
-import org.apache.streampipes.vocabulary.SO;
-
-import java.sql.*;
-import java.util.ArrayList;
-import java.util.List;
-
-public class MySqlClient {
-
-  public static final String ID = "http://streampipes.org/adapter/specific/mysql";
-
-  static final String HOST = "mysqlHost";
-  static final String PORT = "mysqlPort";
-  static final String DATABASE = "mysqlDatabase";
-  static final String TABLE = "mysqlTable";
-  static final String USER = "mysqlUser";
-  static final String PASSWORD = "mysqlPassword";
-
-  static final String REPLACE_NULL_VALUES = "replaceNullValues";
-  static final String DO_REPLACE_NULL_VALUES = "doReplaceNullValues";
-  static final String DO_NOT_REPLACE_NULL_VALUES = "doNotReplaceNullValues";
-
-  private String host;
-  private Integer port;
-  private String database;
-  private String table;
-
-  private String username;
-  private String password;
-
-
-  private List<Column> columns;
-
-  Connection connection;
-
-  MySqlClient(String host,
-              int port,
-              String database,
-              String table,
-              String username,
-              String password) {
-    this.host = host;
-    this.port = port;
-    this.database = database;
-    this.table = table;
-    this.username = username;
-    this.password = password;
-
-    connection = null;
-  }
-
-  public void connect() throws AdapterException {
-    checkJdbcDriver();
-    String server = "jdbc:mysql://" + host + ":" + port + "/" + "?sslMode=DISABLED&allowPublicKeyRetrieval=true";
-    try {
-      connection = DriverManager.getConnection(server, username, password);
-    } catch (SQLException e) {
-      throw new AdapterException("Could not connect to server: " + e.getMessage());
-    }
-  }
-
-  public void disconnect() throws AdapterException {
-    if (connection != null) {
-      try {
-        connection.close();
-      } catch (SQLException e) {
-        throw new AdapterException("Error while disconnecting: " + e.getMessage());
-      }
-      connection = null;
-    }
-  }
-
-  public GuessSchema getSchema() throws AdapterException {
-    connect();
-    loadColumns();
-
-    EventSchema eventSchema = new EventSchema();
-    GuessSchema guessSchema = new GuessSchema();
-    List<EventProperty> allProperties = new ArrayList<>();
-
-    for (Column column : columns) {
-      if (SO.DateTime.equals(column.getDomainProperty())) {
-        allProperties.add(PrimitivePropertyBuilder
-                .create(column.getType(), column.getName())
-                .label(column.getName())
-                .domainProperty(SO.DateTime)
-                .build());
-      } else {
-        allProperties.add(PrimitivePropertyBuilder
-                .create(column.getType(), column.getName())
-                .label(column.getName())
-                .build());
-      }
-    }
-
-    eventSchema.setEventProperties(allProperties);
-    guessSchema.setEventSchema(eventSchema);
-
-    disconnect();
-    return guessSchema;
-  }
-
-  /**
-   * Checks that the MySql-JDBC-Driver is "installed". Throws an AdapterException otherwise
-   */
-  private void checkJdbcDriver() throws AdapterException {
-    try {
-      Class.forName("com.mysql.cj.jdbc.Driver");
-    } catch (ClassNotFoundException e) {
-      throw new AdapterException("MySql Driver not found.");
-    }
-  }
-
-  /**
-   * Fills the columns with the columns from the SQL Table
-   */
-  public void loadColumns() throws AdapterException {
-    if (connection == null) {
-      throw new AdapterException("Client must be connected in order to load the columns");
-    }
-    ResultSet resultSet = null;
-    columns = new ArrayList<>();
-
-    String query = "SELECT COLUMN_NAME, DATA_TYPE, COLUMN_TYPE FROM "
-            + "INFORMATION_SCHEMA.COLUMNS WHERE TABLE_NAME = ? AND TABLE_SCHEMA = ? ORDER BY "
-            + "ORDINAL_POSITION ASC;";
-
-    try (PreparedStatement statement = connection.prepareStatement(query)) {
-
-      statement.setString(1, table);
-      statement.setString(2, database);
-      resultSet = statement.executeQuery();
-
-      if (resultSet.next()) {
-        do {
-          String name = resultSet.getString("COLUMN_NAME");
-          String dataType = resultSet.getString("DATA_TYPE");
-          String columnType = resultSet.getString("COLUMN_TYPE");
-          columns.add(new Column(name, dataType, columnType));
-        } while(resultSet.next());
-      } else {
-        // No columns found -> Table/Database does not exist
-        throw new IllegalArgumentException("Database/table not found");
-      }
-    } catch (SQLException e) {
-      throw new AdapterException("SqlException while loading columns: " + e.getMessage()
-              + ", Error code: " + e.getErrorCode()
-              + ", SqlState: " + e.getSQLState());
-    } finally {
-      try {
-        resultSet.close();
-      } catch (Exception e) {}
-    }
-  }
-
-  public String getHost() {
-    return host;
-  }
-
-  public Integer getPort() {
-    return port;
-  }
-
-  public String getDatabase() {
-    return database;
-  }
-
-  public String getTable() {
-    return table;
-  }
-
-  public String getUsername() {
-    return username;
-  }
-
-  public String getPassword() {
-    return password;
-  }
-
-  public List<Column> getColumns() {
-    return columns;
-  }
-
-  public boolean isConnected() {
-    return connection != null;
-  }
-
-  Connection getConnection() {
-    return connection;
-  }
-}
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlSetAdapter.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlSetAdapter.java
deleted file mode 100644
index 87769ee..0000000
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlSetAdapter.java
+++ /dev/null
@@ -1,203 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.streampipes.connect.iiot.adapters.mysql;
-
-import org.apache.streampipes.connect.adapter.Adapter;
-import org.apache.streampipes.connect.api.exception.AdapterException;
-import org.apache.streampipes.connect.api.exception.ParseException;
-import org.apache.streampipes.connect.adapter.model.specific.SpecificDataSetAdapter;
-import org.apache.streampipes.connect.adapter.sdk.ParameterExtractor;
-import org.apache.streampipes.model.connect.adapter.SpecificAdapterSetDescription;
-import org.apache.streampipes.model.connect.guess.GuessSchema;
-import org.apache.streampipes.sdk.builder.adapter.SpecificDataSetAdapterBuilder;
-import org.apache.streampipes.sdk.helpers.Labels;
-import org.apache.streampipes.sdk.helpers.Locales;
-import org.apache.streampipes.sdk.helpers.Options;
-import org.apache.streampipes.sdk.helpers.Tuple2;
-import org.apache.streampipes.sdk.utils.Assets;
-
-import java.sql.ResultSet;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.util.HashMap;
-import java.util.Map;
-
-public class MySqlSetAdapter extends SpecificDataSetAdapter {
-
-    public static final String ID = "org.apache.streampipes.connect.iiot.adapters.mysql.set";
-
-    private MySqlClient mySqlClient;
-    private Thread fetchDataThread;
-
-    private boolean replaceNullValues;
-
-    public static class FetchDataThread implements Runnable {
-
-        MySqlSetAdapter mySqlSetAdapter;
-        MySqlClient mySqlClient;
-
-        public FetchDataThread(MySqlSetAdapter mySqlSetAdapter) throws AdapterException {
-            this.mySqlSetAdapter = mySqlSetAdapter;
-            this.mySqlClient = mySqlSetAdapter.getMySqlClient();
-
-            mySqlClient.connect();
-            mySqlClient.loadColumns();
-        }
-
-        @Override
-        public void run() {
-            if (!mySqlClient.isConnected()) {
-                System.out.println("Cannot start PollingThread, when the client is not connected");
-                return;
-            }
-            // No batch approach like in the influx adapter due to the lack of a unique key in the table
-            // Create the columnString:
-            StringBuilder sb = new StringBuilder();
-            for (Column column : mySqlClient.getColumns()) {
-                sb.append(column.getName()).append(", ");
-            }
-            sb.setLength(Math.max(0, sb.length() - 2));
-
-            String query = "SELECT " + sb.toString() + " FROM " + mySqlClient.getDatabase() + "." + mySqlClient.getTable();
-
-            try (Statement statement = mySqlClient.getConnection().createStatement()) {
-                boolean executed = statement.execute(query);
-                if (executed) {
-                    ResultSet resultSet = statement.getResultSet();
-                    while (resultSet.next()) {
-
-                        // Retrieve by column name
-                        Map<String, Object> event = new HashMap<>();
-                        for (Column column : mySqlClient.getColumns()) {
-                            Object in = resultSet.getObject(column.getName());
-                            if (in == null) {
-                                if (mySqlSetAdapter.replaceNullValues) {
-                                    in = column.getDefault();
-                                } else {
-                                    // We do not want to send this event (replaceNullValues == false)
-                                    event = null;
-                                    break;
-                                }
-                            }
-                            event.put(column.getName(), in);
-                        }
-                        if (event != null) {
-                            mySqlSetAdapter.send(event);
-                        }
-                    }
-                    resultSet.close();
-                }
-            } catch (SQLException e) {
-                System.out.println(e.getMessage());
-            }
-
-            try {
-                mySqlClient.disconnect();
-            } catch (AdapterException e) {
-                e.printStackTrace();
-            }
-        }
-    }
-
-    public MySqlSetAdapter() {
-    }
-
-    public MySqlSetAdapter(SpecificAdapterSetDescription adapterDescription) {
-        super(adapterDescription);
-
-        getConfigurations(adapterDescription);
-    }
-
-
-    @Override
-    public SpecificAdapterSetDescription declareModel() {
-        SpecificAdapterSetDescription description = SpecificDataSetAdapterBuilder.create(ID)
-                .withAssets(Assets.DOCUMENTATION, Assets.ICON)
-                .withLocales(Locales.EN)
-                .requiredTextParameter(Labels.withId(MySqlClient.HOST))
-                .requiredIntegerParameter(Labels.withId(MySqlClient.PORT), 3306)
-                .requiredTextParameter(Labels.withId(MySqlClient.DATABASE))
-                .requiredTextParameter(Labels.withId(MySqlClient.TABLE))
-                .requiredTextParameter(Labels.withId(MySqlClient.USER))
-                .requiredSecret(Labels.withId(MySqlClient.PASSWORD))
-                .requiredSingleValueSelection(Labels.withId(MySqlClient.REPLACE_NULL_VALUES),
-                        Options.from(
-                                new Tuple2<>("Yes", MySqlClient.DO_REPLACE_NULL_VALUES),
-                                new Tuple2<>("No", MySqlClient.DO_NOT_REPLACE_NULL_VALUES)))
-                .build();
-
-        description.setAppId(ID);
-        return description;
-    }
-
-    @Override
-    public void startAdapter() throws AdapterException {
-        fetchDataThread = new Thread(new FetchDataThread(this));
-        fetchDataThread.start();
-    }
-
-    @Override
-    public void stopAdapter() throws AdapterException {
-        fetchDataThread.interrupt();
-        try {
-            fetchDataThread.join();
-        } catch (InterruptedException e) {
-            throw new AdapterException("Unexpected Error while joining polling thread: " + e.getMessage());
-        }
-    }
-
-    @Override
-    public Adapter getInstance(SpecificAdapterSetDescription adapterDescription) {
-        return new MySqlSetAdapter(adapterDescription);
-    }
-
-    @Override
-    public GuessSchema getSchema(SpecificAdapterSetDescription adapterDescription) throws AdapterException, ParseException {
-        getConfigurations(adapterDescription);
-        return mySqlClient.getSchema();
-    }
-
-    @Override
-    public String getId() {
-        return ID;
-    }
-
-    private void send(Map<String, Object> map) {
-        adapterPipeline.process(map);
-    }
-
-    private void getConfigurations(SpecificAdapterSetDescription adapterDescription) {
-        ParameterExtractor extractor = new ParameterExtractor(adapterDescription.getConfig());
-
-        String replace = extractor.selectedSingleValueInternalName(MySqlClient.REPLACE_NULL_VALUES);
-        replaceNullValues = replace.equals(MySqlClient.DO_REPLACE_NULL_VALUES);
-
-        mySqlClient = new MySqlClient(
-                extractor.singleValue(MySqlClient.HOST, String.class),
-                extractor.singleValue(MySqlClient.PORT, Integer.class),
-                extractor.singleValue(MySqlClient.DATABASE, String.class),
-                extractor.singleValue(MySqlClient.TABLE, String.class),
-                extractor.singleValue(MySqlClient.USER, String.class),
-                extractor.secretValue(MySqlClient.PASSWORD));
-    }
-
-    public MySqlClient getMySqlClient() {
-        return mySqlClient;
-    }
-}
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlStreamAdapter.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlStreamAdapter.java
deleted file mode 100644
index c36e14a..0000000
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/mysql/MySqlStreamAdapter.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- *
- */
-
-package org.apache.streampipes.connect.iiot.adapters.mysql;
-
-import com.github.shyiko.mysql.binlog.BinaryLogClient;
-import com.github.shyiko.mysql.binlog.event.*;
-import com.github.shyiko.mysql.binlog.event.deserialization.EventDeserializer;
-import org.apache.streampipes.connect.adapter.Adapter;
-import org.apache.streampipes.connect.api.exception.AdapterException;
-import org.apache.streampipes.connect.api.exception.ParseException;
-import org.apache.streampipes.connect.adapter.model.specific.SpecificDataStreamAdapter;
-import org.apache.streampipes.connect.adapter.sdk.ParameterExtractor;
-import org.apache.streampipes.model.connect.adapter.SpecificAdapterStreamDescription;
-import org.apache.streampipes.model.connect.guess.GuessSchema;
-import org.apache.streampipes.sdk.builder.adapter.SpecificDataStreamAdapterBuilder;
-import org.apache.streampipes.sdk.helpers.Labels;
-import org.apache.streampipes.sdk.helpers.Locales;
-import org.apache.streampipes.sdk.helpers.Options;
-import org.apache.streampipes.sdk.helpers.Tuple2;
-import org.apache.streampipes.sdk.utils.Assets;
-
-import java.io.IOException;
-import java.io.Serializable;
-import java.util.HashMap;
-import java.util.Map;
-
-public class MySqlStreamAdapter extends SpecificDataStreamAdapter {
-
-    public static final String ID = "org.apache.streampipes.connect.iiot.adapters.mysql.stream";
-
-    private MySqlClient mySqlClient;
-    private BinaryLogClient binaryLogClient;
-
-    private Thread subscriptionThread  = new Thread(()-> {
-        try {
-            binaryLogClient.connect();
-        } catch (IOException e) {
-            e.printStackTrace();
-        }
-    });
-
-    private boolean replaceNullValues;
-    private boolean dataComing = false;
-
-    public MySqlStreamAdapter() {
-    }
-
-    public MySqlStreamAdapter(SpecificAdapterStreamDescription adapterDescription) {
-        super(adapterDescription);
-
-        getConfigurations(adapterDescription);
-    }
-
-    @Override
-    public SpecificAdapterStreamDescription declareModel() {
-        SpecificAdapterStreamDescription description = SpecificDataStreamAdapterBuilder.create(ID)
-                .withAssets(Assets.DOCUMENTATION, Assets.ICON)
-                .withLocales(Locales.EN)
-                .requiredTextParameter(Labels.withId(MySqlClient.HOST))
-                .requiredIntegerParameter(Labels.withId(MySqlClient.PORT), 3306)
-                .requiredTextParameter(Labels.withId(MySqlClient.DATABASE))
-                .requiredTextParameter(Labels.withId(MySqlClient.TABLE))
-                .requiredTextParameter(Labels.withId(MySqlClient.USER))
-                .requiredSecret(Labels.withId(MySqlClient.PASSWORD))
-                .requiredSingleValueSelection(Labels.withId(MySqlClient.REPLACE_NULL_VALUES),
-                        Options.from(
-                                new Tuple2<>("Yes", MySqlClient.DO_REPLACE_NULL_VALUES),
-                                new Tuple2<>("No", MySqlClient.DO_NOT_REPLACE_NULL_VALUES)))
-                .build();
-
-        description.setAppId(ID);
-        return description;
-    }
-
-    @Override
-    public void startAdapter() throws AdapterException {
-        // Making sure, that the columns are all loaded
-        mySqlClient.connect();
-        mySqlClient.loadColumns();
-        mySqlClient.disconnect();
-
-        // Connect BinaryLogClient
-        binaryLogClient = new BinaryLogClient(
-                mySqlClient.getHost(),
-                mySqlClient.getPort(),
-                mySqlClient.getUsername(),
-                mySqlClient.getPassword());
-
-        EventDeserializer eventDeserializer = new EventDeserializer();
-        eventDeserializer.setCompatibilityMode(
-                EventDeserializer.CompatibilityMode.DATE_AND_TIME_AS_LONG,
-                EventDeserializer.CompatibilityMode.CHAR_AND_BINARY_AS_BYTE_ARRAY
-        );
-        binaryLogClient.setEventDeserializer(eventDeserializer);
-        binaryLogClient.registerEventListener(event -> sendEvent(event));
-        subscriptionThread.start();
-    }
-
-
-    private void sendEvent(Event event) {
-        // An event can contain multiple insertions/updates
-        if (event.getHeader().getEventType() == EventType.TABLE_MAP) {
-            // Check table and database, if the next event should be streamed
-            if (((TableMapEventData) event.getData()).getDatabase().equals(mySqlClient.getDatabase())
-                    && ((TableMapEventData) event.getData()).getTable().equals((mySqlClient.getTable()))) {
-                dataComing = true;
-            }
-        }
-        if (dataComing) {
-            if (EventType.isUpdate(event.getHeader().getEventType())) {
-                for (Map.Entry<Serializable[], Serializable[]> en : ((UpdateRowsEventData) event.getData()).getRows()) {
-                    sendChange(en.getValue());
-                }
-                dataComing = false;
-            } else if (EventType.isWrite(event.getHeader().getEventType())) {
-                for (Serializable[] s : ((WriteRowsEventData) event.getData()).getRows()) {
-                    sendChange(s);
-                }
-                dataComing = false;
-            }
-        }
-    }
-
-    private void sendChange(Serializable[] rows) {
-        Map<String, Object> out = new HashMap<>();
-        for (int i = 0; i < rows.length; i++) {
-            if (rows[i] != null) {
-                if (rows[i] instanceof byte[]) {
-                    // Strings are sent in byte arrays and have to be converted.
-                    //TODO: Check that encoding is correct
-                    out.put(mySqlClient.getColumns().get(i).getName(), new String((byte[])rows[i]));
-                } else {
-                    out.put(mySqlClient.getColumns().get(i).getName(), rows[i]);
-                }
-            } else if (replaceNullValues) {
-                out.put(mySqlClient.getColumns().get(i).getName(), mySqlClient.getColumns().get(i).getDefault());
-            } else {
-                // We should skip events with null values
-                return;
-            }
-        }
-        adapterPipeline.process(out);
-    }
-
-    @Override
-    public void stopAdapter() throws AdapterException {
-        try {
-            binaryLogClient.disconnect();
-            subscriptionThread.join();
-        } catch (IOException | InterruptedException e) {
-            throw new AdapterException("Thrown exception: " + e.getMessage());
-        }
-    }
-
-    @Override
-    public Adapter getInstance(SpecificAdapterStreamDescription adapterDescription) {
-        return new MySqlStreamAdapter(adapterDescription);
-    }
-
-    @Override
-    public GuessSchema getSchema(SpecificAdapterStreamDescription adapterDescription) throws AdapterException, ParseException {
-        getConfigurations(adapterDescription);
-        return mySqlClient.getSchema();
-    }
-
-    @Override
-    public String getId() {
-        return ID;
-    }
-
-    private void getConfigurations(SpecificAdapterStreamDescription adapterDescription) {
-        ParameterExtractor extractor = new ParameterExtractor(adapterDescription.getConfig());
-
-        String replace = extractor.selectedSingleValueInternalName(MySqlClient.REPLACE_NULL_VALUES);
-        replaceNullValues = replace.equals(MySqlClient.DO_REPLACE_NULL_VALUES);
-
-        mySqlClient = new MySqlClient(
-                extractor.singleValue(MySqlClient.HOST, String.class),
-                extractor.singleValue(MySqlClient.PORT, Integer.class),
-                extractor.singleValue(MySqlClient.DATABASE, String.class),
-                extractor.singleValue(MySqlClient.TABLE, String.class),
-                extractor.singleValue(MySqlClient.USER, String.class),
-                extractor.secretValue(MySqlClient.PASSWORD));
-    }
-}
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/MiloOpcUaConfigurationProvider.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/MiloOpcUaConfigurationProvider.java
index e4336d8..7145858 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/MiloOpcUaConfigurationProvider.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/MiloOpcUaConfigurationProvider.java
@@ -18,6 +18,7 @@
 
 package org.apache.streampipes.connect.iiot.adapters.opcua;
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.connect.iiot.adapters.opcua.configuration.SpOpcUaConfig;
 import org.eclipse.milo.opcua.sdk.client.api.config.OpcUaClientConfig;
 import org.eclipse.milo.opcua.sdk.client.api.config.OpcUaClientConfigBuilder;
@@ -31,10 +32,11 @@
 import java.net.URISyntaxException;
 import java.util.Collections;
 import java.util.List;
+import java.util.concurrent.ExecutionException;
 
 public class MiloOpcUaConfigurationProvider {
 
-  public OpcUaClientConfig makeClientConfig(SpOpcUaConfig spOpcConfig) throws Exception {
+  public OpcUaClientConfig makeClientConfig(SpOpcUaConfig spOpcConfig) throws ExecutionException, InterruptedException, SpConfigurationException, URISyntaxException {
     String opcServerUrl = spOpcConfig.getOpcServerURL();
     List<EndpointDescription> endpoints = DiscoveryClient.getEndpoints(opcServerUrl).get();
     String host = opcServerUrl.split("://")[1].split(":")[0];
@@ -43,7 +45,7 @@
             .stream()
             .filter(e -> e.getSecurityPolicyUri().equals(SecurityPolicy.None.getUri()))
             .findFirst()
-            .orElseThrow(() -> new Exception("No endpoint with security policy none"));
+            .orElseThrow(() -> new SpConfigurationException("No endpoint with security policy none"));
 
     tmpEndpoint = updateEndpointUrl(tmpEndpoint, host);
     endpoints = Collections.singletonList(tmpEndpoint);
@@ -51,7 +53,7 @@
     EndpointDescription endpoint = endpoints
             .stream()
             .filter(e -> e.getSecurityPolicyUri().equals(SecurityPolicy.None.getUri()))
-            .findFirst().orElseThrow(() -> new Exception("no desired endpoints returned"));
+            .findFirst().orElseThrow(() -> new SpConfigurationException("no desired endpoints returned"));
 
     return buildConfig(endpoint, spOpcConfig);
   }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaAdapter.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaAdapter.java
index 525ca38..34016cb 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaAdapter.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaAdapter.java
@@ -18,6 +18,7 @@
 
 package org.apache.streampipes.connect.iiot.adapters.opcua;
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.connect.adapter.Adapter;
 import org.apache.streampipes.connect.adapter.util.PollingSettings;
 import org.apache.streampipes.connect.api.exception.AdapterException;
@@ -30,6 +31,7 @@
 import org.apache.streampipes.model.AdapterType;
 import org.apache.streampipes.model.connect.adapter.SpecificAdapterStreamDescription;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
+import org.apache.streampipes.model.connect.rules.schema.DeleteRuleDescription;
 import org.apache.streampipes.model.staticproperty.StaticProperty;
 import org.apache.streampipes.sdk.StaticProperties;
 import org.apache.streampipes.sdk.builder.adapter.SpecificDataStreamAdapterBuilder;
@@ -41,49 +43,69 @@
 import org.eclipse.milo.opcua.sdk.client.api.subscriptions.UaMonitoredItem;
 import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue;
 import org.eclipse.milo.opcua.stack.core.types.builtin.NodeId;
+import org.eclipse.milo.opcua.stack.core.types.builtin.StatusCode;
 import org.eclipse.milo.opcua.stack.core.types.enumerated.TimestampsToReturn;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.*;
-import java.util.concurrent.CompletableFuture;
 import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.stream.Collectors;
 
 public class OpcUaAdapter extends PullAdapter implements SupportsRuntimeConfig {
 
     public static final String ID = "org.apache.streampipes.connect.iiot.adapters.opcua";
+    private static final Logger LOG = LoggerFactory.getLogger(OpcUaAdapter.class);
 
     private int pullingIntervalMilliSeconds;
     private SpOpcUaClient spOpcUaClient;
     private List<OpcNode> allNodes;
     private List<NodeId> allNodeIds;
     private int numberProperties;
-    private Map<String, Object> event;
+    private final Map<String, Object> event;
+
+    /**
+     * This variable is used to map the node ids during the subscription to the labels of the nodes
+     */
+    private final Map<String, String> nodeIdToLabelMapping;
 
     public OpcUaAdapter() {
         super();
         this.numberProperties = 0;
         this.event = new HashMap<>();
+        this.nodeIdToLabelMapping = new HashMap<>();
     }
 
     public OpcUaAdapter(SpecificAdapterStreamDescription adapterStreamDescription) {
         super(adapterStreamDescription);
         this.numberProperties = 0;
         this.event = new HashMap<>();
+        this.nodeIdToLabelMapping = new HashMap<>();
     }
 
     @Override
     protected void before() throws AdapterException {
 
         this.allNodeIds = new ArrayList<>();
+        List<String> deleteKeys = this.adapterDescription
+          .getSchemaRules()
+          .stream()
+          .filter(rule -> rule instanceof DeleteRuleDescription)
+          .map(rule -> ((DeleteRuleDescription) rule).getRuntimeKey())
+          .collect(Collectors.toList());
+
         try {
             this.spOpcUaClient.connect();
-            OpcUaNodeBrowser browserClient = new OpcUaNodeBrowser(this.spOpcUaClient.getClient(), this.spOpcUaClient.getSpOpcConfig());
-            this.allNodes = browserClient.findNodes();
+            OpcUaNodeBrowser browserClient =
+                    new OpcUaNodeBrowser(this.spOpcUaClient.getClient(), this.spOpcUaClient.getSpOpcConfig());
+            this.allNodes = browserClient.findNodes(deleteKeys);
 
 
-                for (OpcNode node : this.allNodes) {
-                    this.allNodeIds.add(node.nodeId);
-                }
+            for (OpcNode node : this.allNodes) {
+                this.allNodeIds.add(node.nodeId);
+            }
 
             if (spOpcUaClient.inPullMode()) {
                 this.pullingIntervalMilliSeconds = spOpcUaClient.getPullIntervalMilliSeconds();
@@ -92,13 +114,15 @@
                 this.spOpcUaClient.createListSubscription(this.allNodeIds, this);
             }
 
+            this.allNodes.forEach(node -> this.nodeIdToLabelMapping.put(node.getNodeId().toString(), node.getLabel()));
+
 
         } catch (Exception e) {
-            throw new AdapterException("The Connection to the OPC UA server could not be established.");
+            throw new AdapterException("The Connection to the OPC UA server could not be established.", e.getCause());
         }
     }
 
-        @Override
+    @Override
     public void startAdapter() throws AdapterException {
 
         this.spOpcUaClient = new SpOpcUaClient(SpOpcUaConfigBuilder.from(this.adapterDescription));
@@ -115,49 +139,67 @@
         // close connection
         this.spOpcUaClient.disconnect();
 
-        if (this.spOpcUaClient.inPullMode()){
+        if (this.spOpcUaClient.inPullMode()) {
             super.stopAdapter();
         }
     }
 
     @Override
     protected void pullData() {
-        CompletableFuture<List<DataValue>> response = this.spOpcUaClient.getClient().readValues(0, TimestampsToReturn.Both, this.allNodeIds);
+        var response =
+          this.spOpcUaClient.getClient().readValues(0, TimestampsToReturn.Both, this.allNodeIds);
+        boolean badStatusCodeReceived = false;
+        boolean emptyValueReceived = false;
         try {
-        List<DataValue> returnValues = response.get();
-            for (int i = 0; i<returnValues.size(); i++) {
-
-                Object value = returnValues.get(i).getValue().getValue();
-                this.event.put(this.allNodes.get(i).getLabel(), value);
-
+            List<DataValue> returnValues = response.get(this.getPollingInterval().getValue(), this.getPollingInterval().getTimeUnit());
+            if (returnValues.size() == 0) {
+                emptyValueReceived = true;
+                LOG.warn("Empty value object returned - event will not be sent");
+            } else {
+                for (int i = 0; i < returnValues.size(); i++) {
+                    var status = returnValues.get(i).getStatusCode();
+                    if (StatusCode.GOOD.equals(status)) {
+                        Object value = returnValues.get(i).getValue().getValue();
+                        this.event.put(this.allNodes.get(i).getLabel(), value);
+                    } else {
+                        badStatusCodeReceived = true;
+                        LOG.warn("Received status code {} for node label: {} - event will not be sent",
+                                status,
+                                this.allNodes.get(i).getLabel());
+                    }
+                }
             }
-         } catch (InterruptedException | ExecutionException ie) {
-            ie.printStackTrace();
-         }
-
-        adapterPipeline.process(this.event);
-
+            if (!badStatusCodeReceived && !emptyValueReceived) {
+                adapterPipeline.process(this.event);
+            }
+        } catch (InterruptedException | ExecutionException | TimeoutException ie) {
+            LOG.error("Exception while reading data", ie);
+        }
     }
 
     public void onSubscriptionValue(UaMonitoredItem item, DataValue value) {
 
-        String key = OpcUaUtil.getRuntimeNameOfNode(item.getReadValueId().getNodeId());
+        String key = this.nodeIdToLabelMapping.get(item.getReadValueId().getNodeId().toString());
 
         OpcNode currNode = this.allNodes.stream()
-                .filter(node -> key.equals(node.getNodeId().getIdentifier().toString()))
+                .filter(node -> key.equals(node.getLabel()))
                 .findFirst()
                 .orElse(null);
 
-        event.put(currNode.getLabel(), value.getValue().getValue());
+        if (currNode != null) {
+            event.put(currNode.getLabel(), value.getValue().getValue());
 
-        // ensure that event is complete and all opc ua subscriptions transmitted at least one value
-        if (event.keySet().size() >= this.numberProperties) {
-            Map <String, Object> newEvent = new HashMap<>();
-            // deep copy of event to prevent preprocessor error
-            for (String k : event.keySet()) {
-                newEvent.put(k, event.get(k));
+            // ensure that event is complete and all opc ua subscriptions transmitted at least one value
+            if (event.keySet().size() >= this.numberProperties) {
+                Map<String, Object> newEvent = new HashMap<>();
+                // deep copy of event to prevent preprocessor error
+                for (String k : event.keySet()) {
+                    newEvent.put(k, event.get(k));
+                }
+                adapterPipeline.process(newEvent);
             }
-            adapterPipeline.process(newEvent);
+        } else {
+           LOG.error("No event is produced, because subscription item {} could not be found within all nodes", item);
         }
     }
 
@@ -176,27 +218,32 @@
                 .category(AdapterType.Generic, AdapterType.Manufacturing)
                 .requiredAlternatives(Labels.withId(OpcUaLabels.ADAPTER_TYPE.name()),
                         Alternatives.from(Labels.withId(OpcUaLabels.PULL_MODE.name()),
-                                StaticProperties.integerFreeTextProperty(Labels.withId(OpcUaLabels.PULLING_INTERVAL.name()))),
+                                StaticProperties.integerFreeTextProperty(
+                                        Labels.withId(OpcUaLabels.PULLING_INTERVAL.name()))),
                         Alternatives.from(Labels.withId(OpcUaLabels.SUBSCRIPTION_MODE.name())))
                 .requiredAlternatives(Labels.withId(OpcUaLabels.ACCESS_MODE.name()),
                         Alternatives.from(Labels.withId(OpcUaLabels.UNAUTHENTICATED.name())),
                         Alternatives.from(Labels.withId(OpcUaLabels.USERNAME_GROUP.name()),
                                 StaticProperties.group(
                                         Labels.withId(OpcUaLabels.USERNAME_GROUP.name()),
-                                        StaticProperties.stringFreeTextProperty(Labels.withId(OpcUaLabels.USERNAME.name())),
+                                        StaticProperties.stringFreeTextProperty(
+                                                Labels.withId(OpcUaLabels.USERNAME.name())),
                                         StaticProperties.secretValue(Labels.withId(OpcUaLabels.PASSWORD.name()))
                                 ))
                 )
                 .requiredAlternatives(Labels.withId(OpcUaLabels.OPC_HOST_OR_URL.name()),
                         Alternatives.from(
                                 Labels.withId(OpcUaLabels.OPC_URL.name()),
-                                StaticProperties.stringFreeTextProperty(Labels.withId(OpcUaLabels.OPC_SERVER_URL.name())))
+                                StaticProperties.stringFreeTextProperty(
+                                        Labels.withId(OpcUaLabels.OPC_SERVER_URL.name()), "opc.tcp://localhost:4840"))
                         ,
                         Alternatives.from(Labels.withId(OpcUaLabels.OPC_HOST.name()),
                                 StaticProperties.group(
                                         Labels.withId("host-port"),
-                                        StaticProperties.stringFreeTextProperty(Labels.withId(OpcUaLabels.OPC_SERVER_HOST.name())),
-                                        StaticProperties.stringFreeTextProperty(Labels.withId(OpcUaLabels.OPC_SERVER_PORT.name()))
+                                        StaticProperties.stringFreeTextProperty(
+                                                Labels.withId(OpcUaLabels.OPC_SERVER_HOST.name())),
+                                        StaticProperties.stringFreeTextProperty(
+                                                Labels.withId(OpcUaLabels.OPC_SERVER_PORT.name()))
                                 ))
                 )
                 .requiredTextParameter(Labels.withId(OpcUaLabels.NAMESPACE_INDEX.name()))
@@ -218,7 +265,8 @@
     }
 
     @Override
-    public GuessSchema getSchema(SpecificAdapterStreamDescription adapterDescription) throws AdapterException, ParseException {
+    public GuessSchema getSchema(SpecificAdapterStreamDescription adapterDescription)
+            throws AdapterException, ParseException {
         return OpcUaUtil.getSchema(adapterDescription);
     }
 
@@ -228,7 +276,8 @@
     }
 
     @Override
-    public StaticProperty resolveConfiguration(String staticPropertyInternalName, StaticPropertyExtractor extractor) {
+    public StaticProperty resolveConfiguration(String staticPropertyInternalName,
+                                               StaticPropertyExtractor extractor) throws SpConfigurationException {
         return OpcUaUtil.resolveConfiguration(staticPropertyInternalName, extractor);
     }
 }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaNodeBrowser.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaNodeBrowser.java
index 5a9f2fb..c1cbc20 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaNodeBrowser.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/OpcUaNodeBrowser.java
@@ -23,13 +23,15 @@
 import org.apache.streampipes.model.staticproperty.TreeInputNode;
 import org.eclipse.milo.opcua.sdk.client.AddressSpace;
 import org.eclipse.milo.opcua.sdk.client.OpcUaClient;
-import org.eclipse.milo.opcua.sdk.client.model.nodes.variables.BaseDataVariableTypeNode;
 import org.eclipse.milo.opcua.sdk.client.nodes.UaNode;
+import org.eclipse.milo.opcua.sdk.client.nodes.UaVariableNode;
 import org.eclipse.milo.opcua.stack.core.UaException;
 import org.eclipse.milo.opcua.stack.core.types.builtin.NodeId;
 import org.eclipse.milo.opcua.stack.core.types.builtin.StatusCode;
 import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger;
 import org.eclipse.milo.opcua.stack.core.types.enumerated.NodeClass;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -43,21 +45,30 @@
   private final OpcUaClient client;
   private final SpOpcUaConfig spOpcConfig;
 
+  private static final Logger LOG = LoggerFactory.getLogger(OpcUaNodeBrowser.class);
+
   public OpcUaNodeBrowser(OpcUaClient client,
                           SpOpcUaConfig spOpcUaClientConfig) {
     this.client = client;
     this.spOpcConfig = spOpcUaClientConfig;
   }
 
-  public List<OpcNode> findNodes() {
-    return this.spOpcConfig.getSelectedNodeNames().stream().map(n -> {
-      try {
-        return toOpcNode(n);
-      } catch (UaException e) {
-        e.printStackTrace();
-        return null;
-      }
-    }).collect(Collectors.toList());
+  public List<OpcNode> findNodes() throws UaException {
+    var opcNodes = new ArrayList<OpcNode>();
+    for(String selectedNodeName: this.spOpcConfig.getSelectedNodeNames()) {
+      opcNodes.add(toOpcNode(selectedNodeName));
+    }
+
+    return opcNodes;
+  }
+
+  public List<OpcNode> findNodes(List<String> runtimeNameFilters) throws UaException {
+    return findNodes()
+      .stream()
+      .filter(node -> runtimeNameFilters
+        .stream()
+        .noneMatch(f -> f.equals(node.getLabel())))
+      .collect(Collectors.toList());
   }
 
   public List<TreeInputNode> buildNodeTreeFromOrigin() throws UaException, ExecutionException, InterruptedException {
@@ -80,11 +91,15 @@
     NodeId nodeId = NodeId.parse(nodeName);
     UaNode node = addressSpace.getNode(nodeId);
 
-    if (node instanceof BaseDataVariableTypeNode) {
-      UInteger value = (UInteger) ((BaseDataVariableTypeNode) node).getDataType().getIdentifier();
+    LOG.info("Using node of type {}", node.getNodeClass().toString() );
+
+    if (node instanceof UaVariableNode) {
+      UInteger value = (UInteger) ((UaVariableNode) node).getDataType().getIdentifier();
       return new OpcNode(node.getDisplayName().getText(), OpcUaTypes.getType(value), node.getNodeId());
     }
 
+    LOG.warn("Node {} not of type UaVariableNode", node.getDisplayName());
+
     throw new UaException(StatusCode.BAD, "Node is not of type BaseDataVariableTypeNode");
   }
 
@@ -112,6 +127,6 @@
   }
 
   private boolean isDataNode(UaNode node) {
-    return node.getNodeClass().equals(NodeClass.Variable) && node instanceof BaseDataVariableTypeNode;
+    return (node.getNodeClass().equals(NodeClass.Variable) || (node.getNodeClass().equals(NodeClass.VariableType))) && node instanceof UaVariableNode;
   }
 }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/SpOpcUaClient.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/SpOpcUaClient.java
index da00cbf..299c4f0 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/SpOpcUaClient.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/SpOpcUaClient.java
@@ -19,15 +19,19 @@
 package org.apache.streampipes.connect.iiot.adapters.opcua;
 
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.connect.iiot.adapters.opcua.configuration.SpOpcUaConfig;
 import org.eclipse.milo.opcua.sdk.client.OpcUaClient;
 import org.eclipse.milo.opcua.sdk.client.api.config.OpcUaClientConfig;
 import org.eclipse.milo.opcua.sdk.client.api.subscriptions.UaMonitoredItem;
 import org.eclipse.milo.opcua.sdk.client.api.subscriptions.UaSubscription;
+import org.eclipse.milo.opcua.sdk.client.api.subscriptions.UaSubscriptionManager;
 import org.eclipse.milo.opcua.stack.core.AttributeId;
+import org.eclipse.milo.opcua.stack.core.UaException;
 import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue;
 import org.eclipse.milo.opcua.stack.core.types.builtin.NodeId;
 import org.eclipse.milo.opcua.stack.core.types.builtin.QualifiedName;
+import org.eclipse.milo.opcua.stack.core.types.builtin.StatusCode;
 import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger;
 import org.eclipse.milo.opcua.stack.core.types.enumerated.MonitoringMode;
 import org.eclipse.milo.opcua.stack.core.types.enumerated.TimestampsToReturn;
@@ -37,9 +41,11 @@
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import java.net.URISyntaxException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.concurrent.CompletableFuture;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.atomic.AtomicLong;
 
 import static org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.Unsigned.uint;
@@ -71,9 +77,9 @@
     /***
      * Establishes appropriate connection to OPC UA endpoint depending on the {@link SpOpcUaClient} instance
      *
-     * @throws Exception An exception occurring during OPC connection
+     * @throws UaException An exception occurring during OPC connection
      */
-    public void connect() throws Exception {
+    public void connect() throws UaException, ExecutionException, InterruptedException, SpConfigurationException, URISyntaxException {
         OpcUaClientConfig clientConfig = new MiloOpcUaConfigurationProvider().makeClientConfig(spOpcConfig);
         this.client = OpcUaClient.create(clientConfig);
         client.connect().get();
@@ -89,7 +95,25 @@
      * @param opcUaAdapter current instance of {@link OpcUaAdapter}
      * @throws Exception
      */
-    public void createListSubscription(List<NodeId> nodes, OpcUaAdapter opcUaAdapter) throws Exception {
+    public void createListSubscription(List<NodeId> nodes,
+                                       OpcUaAdapter opcUaAdapter) throws Exception {
+        client.getSubscriptionManager().addSubscriptionListener(new UaSubscriptionManager.SubscriptionListener() {
+            @Override
+            public void onSubscriptionTransferFailed(UaSubscription subscription, StatusCode statusCode) {
+                LOG.warn("Transfer for subscriptionId={} failed: {}", subscription.getSubscriptionId(), statusCode);
+                try {
+                    initSubscription(nodes, opcUaAdapter);
+                } catch (Exception e) {
+                    LOG.error("Re-creating the subscription failed", e);
+                }
+            }
+        });
+
+        initSubscription(nodes, opcUaAdapter);
+    }
+
+
+    public void initSubscription(List<NodeId> nodes, OpcUaAdapter opcUaAdapter) throws Exception {
         /*
          * create a subscription @ 1000ms
          */
diff --git a/ui/src/app/connect/components/format-item-json/format-item-json.component.scss b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/utils/ExceptionMessageExtractor.java
similarity index 62%
copy from ui/src/app/connect/components/format-item-json/format-item-json.component.scss
copy to streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/utils/ExceptionMessageExtractor.java
index 30123c0..7c1b453 100644
--- a/ui/src/app/connect/components/format-item-json/format-item-json.component.scss
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/utils/ExceptionMessageExtractor.java
@@ -16,32 +16,23 @@
  *
  */
 
+package org.apache.streampipes.connect.iiot.adapters.opcua.utils;
 
-.format-label {
-  line-height:50px;
-  margin: auto;
-  text-align: center;
-  font-weight: bold;
-  font-size: 1.3em;
+import org.eclipse.milo.opcua.stack.core.UaException;
+
+public class ExceptionMessageExtractor {
+
+  public static String getDescription(UaException e) {
+    String[] parts = e.getMessage().split(", ");
+    if (parts.length > 1) {
+      String[] kv = parts[1].split("=");
+      if (kv.length > 1) {
+        return kv[1];
+      } else {
+        return parts[1];
+      }
+    } else {
+      return e.getMessage();
+    }
+  }
 }
-
-.format-box {
-  min-height: 50px;
-  box-shadow: 1px 1px 2px #555;
-  border: 1px solid gray;
-  cursor: pointer;
-  padding: 10px;
-  opacity: 0.7;
-  margin: 10px;
-  background: #ffffff;
-}
-
-.format-box:hover {
-  opacity: 1;
-}
-
-.selectedItem {
-  opacity: 1;
-  background-color: grey;
-}
-
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/utils/OpcUaUtil.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/utils/OpcUaUtil.java
index 38d4d4c..739dc19 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/utils/OpcUaUtil.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/opcua/utils/OpcUaUtil.java
@@ -18,6 +18,7 @@
 
 package org.apache.streampipes.connect.iiot.adapters.opcua.utils;
 
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.connect.api.exception.AdapterException;
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.connect.iiot.adapters.opcua.OpcNode;
@@ -25,7 +26,9 @@
 import org.apache.streampipes.connect.iiot.adapters.opcua.SpOpcUaClient;
 import org.apache.streampipes.connect.iiot.adapters.opcua.configuration.SpOpcUaConfigBuilder;
 import org.apache.streampipes.model.connect.adapter.SpecificAdapterStreamDescription;
+import org.apache.streampipes.model.connect.guess.FieldStatusInfo;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
+import org.apache.streampipes.model.connect.guess.GuessTypeInfo;
 import org.apache.streampipes.model.schema.EventProperty;
 import org.apache.streampipes.model.schema.EventSchema;
 import org.apache.streampipes.model.staticproperty.RuntimeResolvableTreeInputStaticProperty;
@@ -33,165 +36,220 @@
 import org.apache.streampipes.sdk.extractor.StaticPropertyExtractor;
 import org.eclipse.milo.opcua.sdk.client.OpcUaClient;
 import org.eclipse.milo.opcua.stack.core.UaException;
+import org.eclipse.milo.opcua.stack.core.types.builtin.DataValue;
 import org.eclipse.milo.opcua.stack.core.types.builtin.NodeId;
+import org.eclipse.milo.opcua.stack.core.types.builtin.StatusCode;
 import org.eclipse.milo.opcua.stack.core.types.builtin.unsigned.UInteger;
+import org.eclipse.milo.opcua.stack.core.types.enumerated.TimestampsToReturn;
 
 import java.net.URI;
+import java.net.URISyntaxException;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
+import java.util.concurrent.ExecutionException;
+import java.util.stream.Collectors;
 
 /***
  * Collection of several utility functions in context of OPC UA
  */
 public class OpcUaUtil {
 
-    /***
-     * Ensures server address starts with {@code opc.tcp://}
-     * @param serverAddress server address as given by user
-     * @return correctly formated server address
-     */
-    public static String formatServerAddress(String serverAddress) {
+  /***
+   * Ensures server address starts with {@code opc.tcp://}
+   * @param serverAddress server address as given by user
+   * @return correctly formated server address
+   */
+  public static String formatServerAddress(String serverAddress) {
 
-        if (!serverAddress.startsWith("opc.tcp://")) {
-            serverAddress = "opc.tcp://" + serverAddress;
-        }
-
-        return serverAddress;
+    if (!serverAddress.startsWith("opc.tcp://")) {
+      serverAddress = "opc.tcp://" + serverAddress;
     }
 
-    /***
-     * OPC UA specific implementation of {@link org.apache.streampipes.connect.adapter.Adapter}
-     * @param adapterStreamDescription
-     * @return guess schema
-     * @throws AdapterException
-     * @throws ParseException
-     */
-    public static GuessSchema getSchema(SpecificAdapterStreamDescription adapterStreamDescription) throws AdapterException, ParseException {
-        GuessSchema guessSchema = new GuessSchema();
-        EventSchema eventSchema = new EventSchema();
-        List<EventProperty> allProperties = new ArrayList<>();
+    return serverAddress;
+  }
 
-        SpOpcUaClient spOpcUaClient = new SpOpcUaClient(SpOpcUaConfigBuilder.from(adapterStreamDescription));
+  /***
+   * OPC UA specific implementation of {@link org.apache.streampipes.connect.adapter.Adapter}
+   * @param adapterStreamDescription
+   * @return guess schema
+   * @throws AdapterException
+   * @throws ParseException
+   */
+  public static GuessSchema getSchema(SpecificAdapterStreamDescription adapterStreamDescription)
+    throws AdapterException, ParseException {
+    GuessSchema guessSchema = new GuessSchema();
+    EventSchema eventSchema = new EventSchema();
+    List<Map<String, GuessTypeInfo>> eventPreview = new ArrayList<>();
+    Map<String, FieldStatusInfo> fieldStatusInfos = new HashMap<>();
+    List<EventProperty> allProperties = new ArrayList<>();
 
-        try {
-            spOpcUaClient.connect();
-            OpcUaNodeBrowser nodeBrowser = new OpcUaNodeBrowser(spOpcUaClient.getClient(), spOpcUaClient.getSpOpcConfig());
-            List<OpcNode> selectedNodes = nodeBrowser.findNodes();
+    SpOpcUaClient spOpcUaClient = new SpOpcUaClient(SpOpcUaConfigBuilder.from(adapterStreamDescription));
 
-            if (!selectedNodes.isEmpty()) {
-                for (OpcNode opcNode : selectedNodes) {
-                    if (opcNode.hasUnitId()) {
-                        allProperties.add(PrimitivePropertyBuilder
-                            .create(opcNode.getType(), opcNode.getLabel())
-                            .label(opcNode.getLabel())
-                            .measurementUnit(new URI(opcNode.getQudtURI()))
-                            .build());
-                    } else {
-                        allProperties.add(PrimitivePropertyBuilder
-                            .create(opcNode.getType(), opcNode.getLabel())
-                            .label(opcNode.getLabel())
-                            .build());
-                    }
+    try {
+      spOpcUaClient.connect();
+      OpcUaNodeBrowser nodeBrowser =
+        new OpcUaNodeBrowser(spOpcUaClient.getClient(), spOpcUaClient.getSpOpcConfig());
+      List<OpcNode> selectedNodes = nodeBrowser.findNodes();
 
-                }
-            }
-
-            spOpcUaClient.disconnect();
-
-        } catch (Exception e) {
-            throw new AdapterException("Could not guess schema for opc node! " + e.getMessage());
+      if (!selectedNodes.isEmpty()) {
+        for (OpcNode opcNode : selectedNodes) {
+          if (opcNode.hasUnitId()) {
+            allProperties.add(PrimitivePropertyBuilder
+              .create(opcNode.getType(), opcNode.getLabel())
+              .label(opcNode.getLabel())
+              .measurementUnit(new URI(opcNode.getQudtURI()))
+              .build());
+          } else {
+            allProperties.add(PrimitivePropertyBuilder
+              .create(opcNode.getType(), opcNode.getLabel())
+              .label(opcNode.getLabel())
+              .build());
+          }
         }
+      }
 
-        eventSchema.setEventProperties(allProperties);
-        guessSchema.setEventSchema(eventSchema);
+      var nodeIds = selectedNodes.stream().map(OpcNode::getNodeId).collect(Collectors.toList());
+      var response = spOpcUaClient.getClient().readValues(0, TimestampsToReturn.Both, nodeIds);
 
-        return guessSchema;
+      var returnValues = response.get();
+
+      makeEventPreview(selectedNodes, eventPreview, fieldStatusInfos, returnValues);
+
+
+    } catch (Exception e) {
+      throw new AdapterException("Could not guess schema for opc node:  " + e.getMessage(), e);
+    } finally {
+      spOpcUaClient.disconnect();
     }
 
+    eventSchema.setEventProperties(allProperties);
+    guessSchema.setEventSchema(eventSchema);
+    guessSchema.setEventPreview(eventPreview);
+    guessSchema.setFieldStatusInfo(fieldStatusInfos);
 
-    /***
-     * OPC UA specific implementation of {@link org.apache.streampipes.container.api.ResolvesContainerProvidedOptions#resolveOptions(String, StaticPropertyExtractor)}.  }
-     * @param internalName The internal name of the Static Property
-     * @param parameterExtractor
-     * @return {@code List<Option>} with available node names for the given OPC UA configuration
-     */
-    public static RuntimeResolvableTreeInputStaticProperty resolveConfiguration (String internalName,
-                                                                                 StaticPropertyExtractor parameterExtractor) {
+    return guessSchema;
+  }
 
-        RuntimeResolvableTreeInputStaticProperty config = parameterExtractor
-                .getStaticPropertyByName(internalName, RuntimeResolvableTreeInputStaticProperty.class);
-        // access mode and host/url have to be selected
-        try {
-            parameterExtractor.selectedAlternativeInternalId(OpcUaLabels.OPC_HOST_OR_URL.name());
-            parameterExtractor.selectedAlternativeInternalId(OpcUaLabels.ACCESS_MODE.name());
-        } catch (NullPointerException nullPointerException) {
-            return config;
-        }
+  private static void makeEventPreview(List<OpcNode> selectedNodes,
+                                       List<Map<String, GuessTypeInfo>> eventPreview,
+                                       Map<String, FieldStatusInfo> fieldStatusInfos,
+                                       List<DataValue> dataValues) {
+    var singlePreview = new HashMap<String, GuessTypeInfo>();
 
-        SpOpcUaClient spOpcUaClient = new SpOpcUaClient(SpOpcUaConfigBuilder.from(parameterExtractor));
-        try{
-            spOpcUaClient.connect();
-            OpcUaNodeBrowser nodeBrowser = new OpcUaNodeBrowser(spOpcUaClient.getClient(), spOpcUaClient.getSpOpcConfig());
-            config.setNodes(nodeBrowser.buildNodeTreeFromOrigin());
-            spOpcUaClient.disconnect();
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-        return config;
+    for (int i = 0; i < dataValues.size(); i++) {
+      var dv = dataValues.get(i);
+      String label = selectedNodes.get(i).getLabel();
+      if (StatusCode.GOOD.equals(dv.getStatusCode())) {
+        var value = dv.getValue().getValue();
+        singlePreview.put(label, new GuessTypeInfo(value.getClass().getCanonicalName(), value));
+        fieldStatusInfos.put(label, FieldStatusInfo.good());
+      } else {
+        String additionalInfo = dv.getStatusCode() != null ? dv.getStatusCode().toString() : "Status code is null";
+        fieldStatusInfos.put(label, FieldStatusInfo.bad(additionalInfo, false));
+      }
     }
 
-    public static String getRuntimeNameOfNode(NodeId nodeId) {
-        String[] keys = nodeId.getIdentifier().toString().split("\\.");
-        String key;
+    eventPreview.add(singlePreview);
+  }
 
-        if (keys.length > 0) {
-            key = keys[keys.length - 1];
-        } else {
-            key = nodeId.getIdentifier().toString();
-        }
 
-        return key;
+  /***
+   * OPC UA specific implementation of {@link
+   * org.apache.streampipes.container.api.ResolvesContainerProvidedOptions#
+   * resolveOptions(String, StaticPropertyExtractor)}.
+   * @param internalName The internal name of the Static Property
+   * @param parameterExtractor to extract parameters from the OPC UA config
+   * @return {@code List<Option>} with available node names for the given OPC UA configuration
+   */
+  public static RuntimeResolvableTreeInputStaticProperty resolveConfiguration(String internalName,
+                                                                              StaticPropertyExtractor parameterExtractor) throws SpConfigurationException {
+
+    RuntimeResolvableTreeInputStaticProperty config = parameterExtractor
+      .getStaticPropertyByName(internalName, RuntimeResolvableTreeInputStaticProperty.class);
+    // access mode and host/url have to be selected
+    try {
+      parameterExtractor.selectedAlternativeInternalId(OpcUaLabels.OPC_HOST_OR_URL.name());
+      parameterExtractor.selectedAlternativeInternalId(OpcUaLabels.ACCESS_MODE.name());
+    } catch (NullPointerException nullPointerException) {
+      return config;
     }
 
-    /**
-     * connects to each node individually and updates the data type in accordance to the data from the server.
-     * @param opcNodes List of opcNodes where the data type is not determined appropriately
-     */
-    public static void retrieveDataTypesFromServer(OpcUaClient client, List<OpcNode> opcNodes) throws AdapterException {
+    SpOpcUaClient spOpcUaClient = new SpOpcUaClient(SpOpcUaConfigBuilder.from(parameterExtractor));
 
-        for (OpcNode opcNode : opcNodes) {
-            try {
-                UInteger dataTypeId = (UInteger) client.getAddressSpace().getVariableNode(opcNode.getNodeId()).getDataType().getIdentifier();
-                OpcUaTypes.getType(dataTypeId);
-                opcNode.setType(OpcUaTypes.getType(dataTypeId));
-            } catch (UaException e) {
-               throw new AdapterException("Could not guess schema for opc node! " + e.getMessage());
-            }
-        }
+    try {
+      spOpcUaClient.connect();
+      OpcUaNodeBrowser nodeBrowser =
+        new OpcUaNodeBrowser(spOpcUaClient.getClient(), spOpcUaClient.getSpOpcConfig());
+      config.setNodes(nodeBrowser.buildNodeTreeFromOrigin());
+
+      return config;
+    } catch (UaException e) {
+      throw new SpConfigurationException(ExceptionMessageExtractor.getDescription(e), e);
+    } catch (ExecutionException | InterruptedException | URISyntaxException e) {
+      throw new SpConfigurationException("Could not connect to the OPC UA server with the provided settings", e);
+    } finally {
+      if (spOpcUaClient.getClient() != null) {
+        spOpcUaClient.disconnect();
+      }
+    }
+  }
+
+  public static String getRuntimeNameOfNode(NodeId nodeId) {
+    String[] keys = nodeId.getIdentifier().toString().split("\\.");
+    String key;
+
+    if (keys.length > 0) {
+      key = keys[keys.length - 1];
+    } else {
+      key = nodeId.getIdentifier().toString();
     }
 
-    /***
-     * Enum for all possible labels in the context of OPC UA adapters
-     */
-    public enum OpcUaLabels {
-        OPC_HOST_OR_URL,
-        OPC_URL,
-        OPC_HOST,
-        OPC_SERVER_URL,
-        OPC_SERVER_HOST,
-        OPC_SERVER_PORT,
-        NAMESPACE_INDEX,
-        NODE_ID,
-        ACCESS_MODE,
-        USERNAME_GROUP,
-        USERNAME,
-        PASSWORD,
-        UNAUTHENTICATED,
-        AVAILABLE_NODES,
-        PULLING_INTERVAL,
-        ADAPTER_TYPE,
-        PULL_MODE,
-        SUBSCRIPTION_MODE;
+    return key;
+  }
+
+  /**
+   * connects to each node individually and updates the data type in accordance to the data from the server.
+   *
+   * @param opcNodes List of opcNodes where the data type is not determined appropriately
+   */
+  public static void retrieveDataTypesFromServer(OpcUaClient client, List<OpcNode> opcNodes) throws AdapterException {
+
+    for (OpcNode opcNode : opcNodes) {
+      try {
+        UInteger dataTypeId =
+          (UInteger) client.getAddressSpace().getVariableNode(opcNode.getNodeId()).getDataType()
+            .getIdentifier();
+        OpcUaTypes.getType(dataTypeId);
+        opcNode.setType(OpcUaTypes.getType(dataTypeId));
+      } catch (UaException e) {
+        throw new AdapterException("Could not guess schema for opc node! " + e.getMessage());
+      }
     }
+  }
+
+  /***
+   * Enum for all possible labels in the context of OPC UA adapters
+   */
+  public enum OpcUaLabels {
+    OPC_HOST_OR_URL,
+    OPC_URL,
+    OPC_HOST,
+    OPC_SERVER_URL,
+    OPC_SERVER_HOST,
+    OPC_SERVER_PORT,
+    NAMESPACE_INDEX,
+    NODE_ID,
+    ACCESS_MODE,
+    USERNAME_GROUP,
+    USERNAME,
+    PASSWORD,
+    UNAUTHENTICATED,
+    AVAILABLE_NODES,
+    PULLING_INTERVAL,
+    ADAPTER_TYPE,
+    PULL_MODE,
+    SUBSCRIPTION_MODE;
+  }
 }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/Plc4xS7Adapter.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/Plc4xS7Adapter.java
index 04ece8a..227054b 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/Plc4xS7Adapter.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/Plc4xS7Adapter.java
@@ -27,11 +27,12 @@
 import org.apache.plc4x.java.utils.connectionpool.PooledPlcDriverManager;
 import org.apache.streampipes.connect.adapter.Adapter;
 import org.apache.streampipes.connect.adapter.util.PollingSettings;
-import org.apache.streampipes.connect.iiot.adapters.PullAdapter;
 import org.apache.streampipes.connect.api.exception.AdapterException;
+import org.apache.streampipes.connect.iiot.adapters.PullAdapter;
 import org.apache.streampipes.model.AdapterType;
 import org.apache.streampipes.model.connect.adapter.SpecificAdapterStreamDescription;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
+import org.apache.streampipes.model.connect.guess.GuessTypeInfo;
 import org.apache.streampipes.model.schema.EventProperty;
 import org.apache.streampipes.model.schema.EventSchema;
 import org.apache.streampipes.model.staticproperty.CollectionStaticProperty;
@@ -54,10 +55,10 @@
 import java.util.List;
 import java.util.Map;
 import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.ExecutionException;
 import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
 
-public class Plc4xS7Adapter extends PullAdapter {
+public class Plc4xS7Adapter extends PullAdapter implements PlcReadResponseHandler {
 
     /**
      * A unique id to identify the Plc4xS7Adapter
@@ -133,31 +134,45 @@
     public GuessSchema getSchema(SpecificAdapterStreamDescription adapterDescription) throws AdapterException {
 
         // Extract user input
-        getConfigurations(adapterDescription);
+        try {
+            getConfigurations(adapterDescription);
 
-        if (this.pollingInterval < 10) {
-            throw new AdapterException("Polling interval must be higher then 10. Current value: " + this.pollingInterval);
+            if (this.pollingInterval < 10) {
+                throw new AdapterException("Polling interval must be higher than 10. Current value: " + this.pollingInterval);
+            }
+
+            GuessSchema guessSchema = new GuessSchema();
+
+            EventSchema eventSchema = new EventSchema();
+            List<EventProperty> allProperties = new ArrayList<>();
+
+            for (Map<String, String> node : this.nodes) {
+                Datatypes datatype = getStreamPipesDataType(node.get(PLC_NODE_TYPE).toUpperCase().replaceAll(" ", "_"));
+
+                allProperties.add(
+                  PrimitivePropertyBuilder
+                    .create(datatype, node.get(PLC_NODE_RUNTIME_NAME))
+                    .label(node.get(PLC_NODE_RUNTIME_NAME))
+                    .description("")
+                    .build());
+            }
+
+            this.before();
+            var event = readPlcDataSynchronized();
+            var preview = event
+              .entrySet()
+              .stream()
+              .collect(Collectors.toMap(Map.Entry::getKey, e ->
+                new GuessTypeInfo(e.getValue().getClass().getCanonicalName(), e.getValue())));
+
+            eventSchema.setEventProperties(allProperties);
+            guessSchema.setEventSchema(eventSchema);
+            guessSchema.setEventPreview(List.of(preview));
+
+            return guessSchema;
+        } catch (Exception e) {
+            throw new AdapterException(e.getMessage(), e);
         }
-
-        GuessSchema guessSchema = new GuessSchema();
-
-        EventSchema eventSchema = new EventSchema();
-        List<EventProperty> allProperties = new ArrayList<>();
-
-        for (Map<String, String> node : this.nodes) {
-            Datatypes datatype = getStreamPipesDataType(node.get(PLC_NODE_TYPE).toUpperCase().replaceAll(" ", "_"));
-
-            allProperties.add(
-                    PrimitivePropertyBuilder
-                            .create(datatype, node.get(PLC_NODE_RUNTIME_NAME))
-                            .label(node.get(PLC_NODE_RUNTIME_NAME))
-                            .description("")
-                            .build());
-        }
-
-        eventSchema.setEventProperties(allProperties);
-        guessSchema.setEventSchema(eventSchema);
-        return guessSchema;
     }
 
     /**
@@ -171,7 +186,6 @@
 
         this.driverManager = new PooledPlcDriverManager();
         try (PlcConnection plcConnection = this.driverManager.getConnection("s7://" + this.ip)) {
-
             if (!plcConnection.getMetadata().canRead()) {
                 this.LOG.error("The S7 on IP: " + this.ip + " does not support reading data");
             }
@@ -188,47 +202,36 @@
      */
     @Override
     protected void pullData() {
-
         // Create PLC read request
-        try (PlcConnection plcConnection = this.driverManager.getConnection("s7://" + this.ip)) {
-            PlcReadRequest.Builder builder = plcConnection.readRequestBuilder();
-            for (Map<String, String> node : this.nodes) {
-                builder.addItem(node.get(PLC_NODE_NAME), node.get(PLC_NODE_NAME) + ":" + node.get(PLC_NODE_TYPE).toUpperCase().replaceAll(" ", "_"));
-            }
-            PlcReadRequest readRequest = builder.build();
-
-            // Execute the request
-            CompletableFuture<? extends PlcReadResponse> asyncResponse = readRequest.execute();
-
-            asyncResponse.whenComplete((response, throwable) -> {
-                // Create an event containing the value of the PLC
-                if (throwable != null) {
-                    throwable.printStackTrace();
-                    this.LOG.error(throwable.getMessage());
-                } else {
-                    Map<String, Object> event = new HashMap<>();
-                    for (Map<String, String> node : this.nodes) {
-                        if (response.getResponseCode(node.get(PLC_NODE_NAME)) == PlcResponseCode.OK) {
-                            event.put(node.get(PLC_NODE_RUNTIME_NAME), response.getObject(node.get(PLC_NODE_NAME)));
-                        } else {
-                            this.LOG.error("Error[" + node.get(PLC_NODE_NAME) + "]: " +
-                                    response.getResponseCode(node.get(PLC_NODE_NAME)).name());
-                        }
-                    }
-
-                    // publish the final event
-                    adapterPipeline.process(event);
-                }
-            });
-
-        } catch (InterruptedException | ExecutionException e) {
-            this.LOG.error(e.getMessage());
-            e.printStackTrace();
+        try(PlcConnection plcConnection = this.driverManager.getConnection("s7://" + this.ip)) {
+            readPlcData(plcConnection, this);
         } catch (Exception e) {
-            this.LOG.error("Could not establish connection to S7 with ip " + this.ip, e);
-            e.printStackTrace();
+            LOG.error("Error while reading from PLC with IP {} ", this.ip, e);
         }
+    }
 
+    private PlcReadRequest makeReadRequest(PlcConnection plcConnection) throws PlcConnectionException {
+        PlcReadRequest.Builder builder = plcConnection.readRequestBuilder();
+        for (Map<String, String> node : this.nodes) {
+            builder.addItem(node.get(PLC_NODE_NAME), node.get(PLC_NODE_NAME) + ":" + node.get(PLC_NODE_TYPE).toUpperCase().replaceAll(" ", "_"));
+        }
+        return builder.build();
+    }
+
+    private void readPlcData(PlcConnection plcConnection, PlcReadResponseHandler handler) throws PlcConnectionException {
+        var readRequest = makeReadRequest(plcConnection);
+        // Execute the request
+        CompletableFuture<? extends PlcReadResponse> asyncResponse = readRequest.execute();
+        asyncResponse.whenComplete(handler::onReadResult);
+    }
+
+    private Map<String, Object> readPlcDataSynchronized() throws Exception {
+        try (PlcConnection plcConnection = this.driverManager.getConnection("s7://" + this.ip)) {
+            var readRequest = makeReadRequest(plcConnection);
+            // Execute the request
+            var readResponse = readRequest.execute().get(5000, TimeUnit.MILLISECONDS);
+            return makeEvent(readResponse);
+        }
     }
 
     /**
@@ -315,4 +318,28 @@
         }
     }
 
+    @Override
+    public void onReadResult(PlcReadResponse response, Throwable throwable) {
+        if (throwable != null) {
+            throwable.printStackTrace();
+            this.LOG.error(throwable.getMessage());
+        } else {
+            var event = makeEvent(response);
+            // publish the final event
+            adapterPipeline.process(event);
+        }
+    }
+
+    private Map<String, Object> makeEvent(PlcReadResponse response) {
+        Map<String, Object> event = new HashMap<>();
+        for (Map<String, String> node : this.nodes) {
+            if (response.getResponseCode(node.get(PLC_NODE_NAME)) == PlcResponseCode.OK) {
+                event.put(node.get(PLC_NODE_RUNTIME_NAME), response.getObject(node.get(PLC_NODE_NAME)));
+            } else {
+                this.LOG.error("Error[" + node.get(PLC_NODE_NAME) + "]: " +
+                  response.getResponseCode(node.get(PLC_NODE_NAME)).name());
+            }
+        }
+        return event;
+    }
 }
diff --git a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/PlcReadResponseHandler.java
similarity index 75%
copy from ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
copy to streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/PlcReadResponseHandler.java
index 58ba04b..b9b8f1b 100644
--- a/ui/src/app/connect/components/schema-editor/error-message/error-message.component.scss
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/plc4x/s7/PlcReadResponseHandler.java
@@ -16,3 +16,12 @@
  *
  */
 
+package org.apache.streampipes.connect.iiot.adapters.plc4x.s7;
+
+import org.apache.plc4x.java.api.messages.PlcReadResponse;
+
+public interface PlcReadResponseHandler {
+
+  void onReadResult(PlcReadResponse response,
+                    Throwable throwable);
+}
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataSimulatorUtils.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataSimulatorUtils.java
index 0d7b2af..f157bab 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataSimulatorUtils.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataSimulatorUtils.java
@@ -30,123 +30,137 @@
 
 public class MachineDataSimulatorUtils {
 
-    // Vocabulary
-    public static final String NS = "https://streampipes.org/vocabulary/examples/watertank/v1/";
-    public static final String HAS_SENSOR_ID = NS + "hasSensorId";
+  // Vocabulary
+  public static final String NS = "https://streampipes.org/vocabulary/examples/watertank/v1/";
+  public static final String HAS_SENSOR_ID = NS + "hasSensorId";
 
-    private static final String TIMESTAMP = "timestamp";
-    private static final String SENSOR_ID = "sensorId";
-    private static final String MASS_FLOW = "mass_flow";
-    private static final String TEMPERATURE = "temperature";
+  private static final String TIMESTAMP = "timestamp";
+  private static final String SENSOR_ID = "sensorId";
+  private static final String MASS_FLOW = "mass_flow";
+  private static final String TEMPERATURE = "temperature";
 
-    public static GuessSchema getSchema(String selectedSimulatorOption) throws AdapterException {
-        switch(selectedSimulatorOption) {
-            case "flowrate":
-                return getFlowrateSchema();
-            case "pressure":
-                return getPressureSchema();
-            case "waterlevel":
-                return getWaterlevelSchema();
-            default:
-                throw new AdapterException("resource not found");
-        }
+  public static GuessSchema getSchema(String selectedSimulatorOption) throws AdapterException {
+    switch (selectedSimulatorOption) {
+      case "flowrate":
+        return getFlowrateSchema();
+      case "pressure":
+        return getPressureSchema();
+      case "waterlevel":
+        return getWaterlevelSchema();
+      default:
+        throw new AdapterException("resource not found");
     }
+  }
 
-    private static GuessSchema getWaterlevelSchema() {
-        return GuessSchemaBuilder.create()
-                .property(timestampProperty(TIMESTAMP))
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.String, "sensorId")
-                        .label("Sensor ID")
-                        .description("The ID of the sensor")
-                        .domainProperty(HAS_SENSOR_ID)
-                        .scope(PropertyScope.DIMENSION_PROPERTY)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Float, "level")
-                        .label("Water Level")
-                        .description("Denotes the current water level in the container")
-                        .domainProperty(SO.Number)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Boolean, "overflow")
-                        .label("Overflow")
-                        .description("Indicates whether the tank overflows")
-                        .domainProperty(SO.Number)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .build())
-                .build();
-    }
+  private static GuessSchema getWaterlevelSchema() {
+    return GuessSchemaBuilder.create()
+      .property(timestampProperty(TIMESTAMP))
+      .sample(TIMESTAMP, System.currentTimeMillis())
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.String, "sensorId")
+        .label("Sensor ID")
+        .description("The ID of the sensor")
+        .domainProperty(HAS_SENSOR_ID)
+        .scope(PropertyScope.DIMENSION_PROPERTY)
+        .build())
+      .sample("sensorId", "sensor01")
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Float, "level")
+        .label("Water Level")
+        .description("Denotes the current water level in the container")
+        .domainProperty(SO.Number)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .build())
+      .sample("level", 5.25f)
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Boolean, "overflow")
+        .label("Overflow")
+        .description("Indicates whether the tank overflows")
+        .domainProperty(SO.Number)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .build())
+      .sample("overflow", true)
+      .build();
+  }
 
-    private static GuessSchema getPressureSchema() {
-        return GuessSchemaBuilder.create()
-                .property(timestampProperty(TIMESTAMP))
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.String, "sensorId")
-                        .label("Sensor ID")
-                        .description("The ID of the sensor")
-                        .domainProperty(HAS_SENSOR_ID)
-                        .scope(PropertyScope.DIMENSION_PROPERTY)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Float, "pressure")
-                        .label("Pressure")
-                        .description("Denotes the current pressure in the pressure tank")
-                        .domainProperty(SO.Number)
-                        .valueSpecification(0.0f, 100.0f, 0.5f)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .build())
-                .build();
-    }
+  private static GuessSchema getPressureSchema() {
+    return GuessSchemaBuilder.create()
+      .property(timestampProperty(TIMESTAMP))
+      .sample(TIMESTAMP, System.currentTimeMillis())
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.String, "sensorId")
+        .label("Sensor ID")
+        .description("The ID of the sensor")
+        .domainProperty(HAS_SENSOR_ID)
+        .scope(PropertyScope.DIMENSION_PROPERTY)
+        .build())
+      .sample("sensorId", "sensor01")
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Float, "pressure")
+        .label("Pressure")
+        .description("Denotes the current pressure in the pressure tank")
+        .domainProperty(SO.Number)
+        .valueSpecification(0.0f, 100.0f, 0.5f)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .build())
+      .sample("pressure", 85.22f)
+      .build();
+  }
 
-    public static GuessSchema getFlowrateSchema() {
-        return GuessSchemaBuilder.create()
-                .property(timestampProperty(TIMESTAMP))
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.String, SENSOR_ID)
-                        .label("Sensor ID")
-                        .description("The ID of the sensor")
-                        .domainProperty(HAS_SENSOR_ID)
-                        .scope(PropertyScope.DIMENSION_PROPERTY)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Float, MASS_FLOW)
-                        .label("Mass Flow")
-                        .description("Denotes the current mass flow in the sensor")
-                        .domainProperty(SO.Number)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Float, "volume_flow")
-                        .label("Volume Flow")
-                        .description("Denotes the current volume flow")
-                        .domainProperty(SO.Number)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Float, TEMPERATURE)
-                        .label("Temperature")
-                        .description("Denotes the current temperature in degrees celsius")
-                        .domainProperty(SO.Number)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .measurementUnit(URI.create("http://codes.wmo.int/common/unit/degC"))
-                        .valueSpecification(0.0f, 100.0f, 0.1f)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Float, "density")
-                        .label("Density")
-                        .description("Denotes the current density of the fluid")
-                        .domainProperty(SO.Number)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .build())
-                .property(PrimitivePropertyBuilder
-                        .create(Datatypes.Boolean, "sensor_fault_flags")
-                        .label("Sensor Fault Flags")
-                        .description("Any fault flags of the sensors")
-                        .domainProperty(SO.Boolean)
-                        .scope(PropertyScope.MEASUREMENT_PROPERTY)
-                        .build())
-                .build();
-    }
+  public static GuessSchema getFlowrateSchema() {
+    return GuessSchemaBuilder.create()
+      .property(timestampProperty(TIMESTAMP))
+      .sample(TIMESTAMP, System.currentTimeMillis())
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.String, SENSOR_ID)
+        .label("Sensor ID")
+        .description("The ID of the sensor")
+        .domainProperty(HAS_SENSOR_ID)
+        .scope(PropertyScope.DIMENSION_PROPERTY)
+        .build())
+      .sample("sensorId", "sensor01")
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Float, MASS_FLOW)
+        .label("Mass Flow")
+        .description("Denotes the current mass flow in the sensor")
+        .domainProperty(SO.Number)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .build())
+      .sample(MASS_FLOW, 5.76f)
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Float, "volume_flow")
+        .label("Volume Flow")
+        .description("Denotes the current volume flow")
+        .domainProperty(SO.Number)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .build())
+      .sample("volume_flow", 3.34f)
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Float, TEMPERATURE)
+        .label("Temperature")
+        .description("Denotes the current temperature in degrees celsius")
+        .domainProperty(SO.Number)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .measurementUnit(URI.create("http://codes.wmo.int/common/unit/degC"))
+        .valueSpecification(0.0f, 100.0f, 0.1f)
+        .build())
+      .sample(TEMPERATURE, 33.221f)
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Float, "density")
+        .label("Density")
+        .description("Denotes the current density of the fluid")
+        .domainProperty(SO.Number)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .build())
+      .sample("density", 5.0f)
+      .property(PrimitivePropertyBuilder
+        .create(Datatypes.Boolean, "sensor_fault_flags")
+        .label("Sensor Fault Flags")
+        .description("Any fault flags of the sensors")
+        .domainProperty(SO.Boolean)
+        .scope(PropertyScope.MEASUREMENT_PROPERTY)
+        .build())
+      .sample("sensor_fault_flags", true)
+      .build();
+  }
 }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataStreamAdapter.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataStreamAdapter.java
index e661a7c..aaf7a90 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataStreamAdapter.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/simulator/machine/MachineDataStreamAdapter.java
@@ -59,7 +59,7 @@
                 .withAssets(Assets.DOCUMENTATION, Assets.ICON)
                 .withLocales(Locales.EN)
                 .category(AdapterType.Debugging)
-                .requiredIntegerParameter(Labels.withId(WAIT_TIME_MS))
+                .requiredIntegerParameter(Labels.withId(WAIT_TIME_MS), 1000)
                 .requiredSingleValueSelection(Labels.withId(SELECTED_SIMULATOR_OPTION), Options.from(
                         "flowrate", "pressure", "waterlevel"))
                 .build();
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/FileProtocol.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/FileProtocol.java
index 25976de..8dcb342 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/FileProtocol.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/FileProtocol.java
@@ -121,11 +121,12 @@
             InputStream targetStream = FileProtocolUtils.getFileInputStream(this.selectedFilename);
             List<byte[]> dataByte = parser.parseNEvents(targetStream, 20);
 
-            EventSchema eventSchema = parser.getEventSchema(dataByte);
-
-            GuessSchema result = SchemaGuesser.guessSchma(eventSchema);
-
-            return result;
+            if (parser.supportsPreview()) {
+                return SchemaGuesser.guessSchema(parser.getSchemaAndSample(dataByte));
+            } else {
+                EventSchema eventSchema = parser.getEventSchema(dataByte);
+                return SchemaGuesser.guessSchema(eventSchema);
+            }
         } catch (FileNotFoundException e) {
             throw new ParseException("Could not read local file");
         }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/HttpProtocol.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/HttpProtocol.java
index 1feb4cc..eca0f25 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/HttpProtocol.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/set/HttpProtocol.java
@@ -116,7 +116,7 @@
 
         EventSchema eventSchema= parser.getEventSchema(dataByte);
 
-        GuessSchema result = SchemaGuesser.guessSchma(eventSchema);
+        GuessSchema result = SchemaGuesser.guessSchema(eventSchema);
 
         return result;
     }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/BrokerProtocol.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/BrokerProtocol.java
index 835fffc..e34d7f9 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/BrokerProtocol.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/BrokerProtocol.java
@@ -23,7 +23,6 @@
 import org.apache.streampipes.connect.api.IParser;
 import org.apache.streampipes.connect.api.exception.ParseException;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
-import org.apache.streampipes.model.schema.EventSchema;
 
 import java.util.ArrayList;
 import java.util.List;
@@ -48,9 +47,12 @@
   public GuessSchema getGuessSchema() throws ParseException {
 
     List<byte[]> eventByte = getNByteElements(1);
-    EventSchema eventSchema = parser.getEventSchema(eventByte);
 
-    return SchemaGuesser.guessSchma(eventSchema);
+    if (parser.supportsPreview()) {
+      return SchemaGuesser.guessSchema(parser.getSchemaAndSample(eventByte));
+    } else {
+      return SchemaGuesser.guessSchema(parser.getEventSchema(eventByte));
+    }
   }
 
   @Override
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/FileStreamProtocol.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/FileStreamProtocol.java
index 4b9368b..f88f5c5 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/FileStreamProtocol.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/FileStreamProtocol.java
@@ -211,11 +211,12 @@
 
     List<byte[]> dataByte = parser.parseNEvents(dataInputStream, 2);
 
-    EventSchema eventSchema = parser.getEventSchema(dataByte);
-
-    GuessSchema result = SchemaGuesser.guessSchma(eventSchema);
-
-    return result;
+    if (parser.supportsPreview()) {
+      return SchemaGuesser.guessSchema(parser.getSchemaAndSample(dataByte));
+    } else {
+      EventSchema eventSchema = parser.getEventSchema(dataByte);
+      return SchemaGuesser.guessSchema(eventSchema);
+    }
   }
 
   @Override
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/HttpStreamProtocol.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/HttpStreamProtocol.java
index 39a5dad..be68829 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/HttpStreamProtocol.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/HttpStreamProtocol.java
@@ -112,7 +112,7 @@
             dataByte.addAll(dataByte);
         }
         EventSchema eventSchema= parser.getEventSchema(dataByte);
-        GuessSchema result = SchemaGuesser.guessSchma(eventSchema);
+        GuessSchema result = SchemaGuesser.guessSchema(eventSchema);
 
         return result;
     }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/KafkaProtocol.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/KafkaProtocol.java
index 502aa7c..cdc8cb4 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/KafkaProtocol.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/KafkaProtocol.java
@@ -20,9 +20,11 @@
 
 import org.apache.commons.io.IOUtils;
 import org.apache.kafka.clients.consumer.*;
+import org.apache.kafka.common.KafkaException;
 import org.apache.kafka.common.TopicPartition;
 import org.apache.kafka.common.serialization.ByteArrayDeserializer;
 import org.apache.streampipes.commons.constants.GlobalStreamPipesConstants;
+import org.apache.streampipes.commons.exceptions.SpConfigurationException;
 import org.apache.streampipes.commons.exceptions.SpRuntimeException;
 import org.apache.streampipes.connect.SendToPipeline;
 import org.apache.streampipes.connect.adapter.model.generic.Protocol;
@@ -30,7 +32,7 @@
 import org.apache.streampipes.connect.api.IFormat;
 import org.apache.streampipes.connect.api.IParser;
 import org.apache.streampipes.connect.api.exception.ParseException;
-import org.apache.streampipes.container.api.ResolvesContainerProvidedOptions;
+import org.apache.streampipes.container.api.SupportsRuntimeConfig;
 import org.apache.streampipes.messaging.InternalEventProcessor;
 import org.apache.streampipes.messaging.kafka.SpKafkaConsumer;
 import org.apache.streampipes.model.AdapterType;
@@ -38,6 +40,8 @@
 import org.apache.streampipes.model.grounding.KafkaTransportProtocol;
 import org.apache.streampipes.model.grounding.SimpleTopicDefinition;
 import org.apache.streampipes.model.staticproperty.Option;
+import org.apache.streampipes.model.staticproperty.RuntimeResolvableOneOfStaticProperty;
+import org.apache.streampipes.model.staticproperty.StaticProperty;
 import org.apache.streampipes.pe.shared.config.kafka.KafkaConfig;
 import org.apache.streampipes.pe.shared.config.kafka.KafkaConnectUtils;
 import org.apache.streampipes.sdk.builder.adapter.ProtocolDescriptionBuilder;
@@ -45,7 +49,6 @@
 import org.apache.streampipes.sdk.helpers.AdapterSourceType;
 import org.apache.streampipes.sdk.helpers.Labels;
 import org.apache.streampipes.sdk.helpers.Locales;
-import org.apache.streampipes.sdk.helpers.Options;
 import org.apache.streampipes.sdk.utils.Assets;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
@@ -55,7 +58,7 @@
 import java.util.*;
 import java.util.stream.Collectors;
 
-public class KafkaProtocol extends BrokerProtocol implements ResolvesContainerProvidedOptions {
+public class KafkaProtocol extends BrokerProtocol implements SupportsRuntimeConfig {
 
     Logger logger = LoggerFactory.getLogger(KafkaProtocol.class);
     KafkaConfig config;
@@ -154,7 +157,7 @@
         return resultEventsByte;
     }
 
-    private Consumer<byte[], byte[]> createConsumer(KafkaConfig kafkaConfig) {
+    private Consumer<byte[], byte[]> createConsumer(KafkaConfig kafkaConfig) throws KafkaException {
         final Properties props = new Properties();
 
         kafkaConfig.getSecurityConfig().appendConfig(props);
@@ -165,6 +168,8 @@
         props.put(ConsumerConfig.GROUP_ID_CONFIG,
                 "KafkaExampleConsumer" + System.currentTimeMillis());
 
+        props.put(ConsumerConfig.DEFAULT_API_TIMEOUT_MS_CONFIG, 6000);
+
         props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG,
                 ByteArrayDeserializer.class.getName());
         props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,
@@ -214,22 +219,30 @@
     }
 
     @Override
-    public List<Option> resolveOptions(String requestId, StaticPropertyExtractor extractor) {
+    public StaticProperty resolveConfiguration(String staticPropertyInternalName, StaticPropertyExtractor extractor) throws SpConfigurationException {
+        RuntimeResolvableOneOfStaticProperty config = extractor
+          .getStaticPropertyByName(KafkaConnectUtils.TOPIC_KEY, RuntimeResolvableOneOfStaticProperty.class);
         KafkaConfig kafkaConfig = KafkaConnectUtils.getConfig(extractor, false);
         boolean hideInternalTopics = extractor.slideToggleValue(KafkaConnectUtils.getHideInternalTopicsKey());
 
-        Consumer<byte[], byte[]> consumer = createConsumer(kafkaConfig);
+        try {
+            Consumer<byte[], byte[]> consumer = createConsumer(kafkaConfig);
+            Set<String> topics = consumer.listTopics().keySet();
+            consumer.close();
 
-        Set<String> topics = consumer.listTopics().keySet();
-        consumer.close();
+            if (hideInternalTopics) {
+                topics = topics
+                  .stream()
+                  .filter(t -> !t.startsWith(GlobalStreamPipesConstants.INTERNAL_TOPIC_PREFIX))
+                  .collect(Collectors.toSet());
+            }
 
-        if (hideInternalTopics) {
-            topics = topics
-                    .stream()
-                    .filter(t -> !t.startsWith(GlobalStreamPipesConstants.INTERNAL_TOPIC_PREFIX))
-                    .collect(Collectors.toSet());
+            config.setOptions(topics.stream().map(Option::new).collect(Collectors.toList()));
+
+            return config;
+        } catch (KafkaException e) {
+            throw new SpConfigurationException(e.getMessage(), e);
         }
-        return topics.stream().map(Option::new).collect(Collectors.toList());
     }
 
 
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/pulsar/PulsarProtocol.java b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/pulsar/PulsarProtocol.java
index 197456a..4f431c5 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/pulsar/PulsarProtocol.java
+++ b/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/protocol/stream/pulsar/PulsarProtocol.java
@@ -17,8 +17,10 @@
  */
 package org.apache.streampipes.connect.iiot.protocol.stream.pulsar;
 
+import java.io.IOException;
 import org.apache.pulsar.client.api.PulsarClient;
 import org.apache.pulsar.client.api.PulsarClientException;
+import org.apache.pulsar.client.api.Reader;
 import org.apache.streampipes.connect.SendToPipeline;
 import org.apache.streampipes.connect.adapter.model.generic.Protocol;
 import org.apache.streampipes.connect.adapter.sdk.ParameterExtractor;
@@ -68,18 +70,17 @@
   @Override
   protected List<byte[]> getNByteElements(int n) throws ParseException {
     List<byte[]> elements = new ArrayList<>();
-    InternalEventProcessor<byte[]> eventProcessor = elements::add;
-    PulsarConsumer consumer = new PulsarConsumer(this.brokerUrl, this.topic, eventProcessor, n);
-
-    Thread thread = new Thread(consumer);
-    thread.start();
-
-    while (consumer.getMessageCount() < n) {
-      try {
-        Thread.sleep(100);
-      } catch (InterruptedException e) {
-        e.printStackTrace();
+    try (PulsarClient pulsarClient = PulsarUtils.makePulsarClient(brokerUrl);
+         Reader<byte[]> reader = pulsarClient.newReader()
+                 .topic(topic)
+                 .create()) {
+      for (int i = 0; i < n; i++) {
+        if (reader.hasMessageAvailable()) {
+          elements.add(reader.readNext().getValue());
+        }
       }
+    } catch (IOException e) {
+      throw new ParseException("Failed to fetch messages.", e);
     }
     return elements;
   }
diff --git a/streampipes-extensions/streampipes-connect-adapters/pom.xml b/streampipes-extensions/streampipes-connect-adapters/pom.xml
index 713a33c..40c2f3f 100644
--- a/streampipes-extensions/streampipes-connect-adapters/pom.xml
+++ b/streampipes-extensions/streampipes-connect-adapters/pom.xml
@@ -23,7 +23,7 @@
     <parent>
         <groupId>org.apache.streampipes</groupId>
         <artifactId>streampipes-extensions</artifactId>
-        <version>0.70.0-SNAPSHOT</version>
+        <version>0.71.0-SNAPSHOT</version>
     </parent>
 
     <artifactId>streampipes-connect-adapters</artifactId>
@@ -37,7 +37,7 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-connect-container-worker</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
             <exclusions>
                 <exclusion>
                     <groupId>org.graalvm.nativeimage</groupId>
@@ -48,12 +48,12 @@
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-pipeline-elements-shared</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
         <dependency>
             <groupId>org.apache.streampipes</groupId>
             <artifactId>streampipes-container-extensions</artifactId>
-            <version>0.70.0-SNAPSHOT</version>
+            <version>0.71.0-SNAPSHOT</version>
         </dependency>
 
         <!-- External dependencies -->
@@ -215,5 +215,4 @@
         </plugins>
         <finalName>streampipes-connect-adapters</finalName>
     </build>
-
 </project>
diff --git a/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/ConnectAdapterInit.java b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/ConnectAdapterInit.java
index 78423b8..624b9e2 100644
--- a/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/ConnectAdapterInit.java
+++ b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/ConnectAdapterInit.java
@@ -26,6 +26,8 @@
 import org.apache.streampipes.connect.adapters.image.set.ImageSetAdapter;
 import org.apache.streampipes.connect.adapters.image.stream.ImageStreamAdapter;
 import org.apache.streampipes.connect.adapters.iss.IssAdapter;
+import org.apache.streampipes.connect.adapters.netio.NetioMQTTAdapter;
+import org.apache.streampipes.connect.adapters.netio.NetioRestAdapter;
 import org.apache.streampipes.connect.adapters.simulator.random.RandomDataSetAdapter;
 import org.apache.streampipes.connect.adapters.simulator.random.RandomDataStreamAdapter;
 import org.apache.streampipes.connect.adapters.slack.SlackAdapter;
@@ -45,23 +47,25 @@
   @Override
   public SpServiceDefinition provideServiceDefinition() {
     return SpServiceDefinitionBuilder.create("connect-adapter",
-            "StreamPipes Connect Worker Main",
-            "",8001)
-            .registerAdapter(new GdeltAdapter())
-            .registerAdapter(new CoindeskBitcoinAdapter())
-            .registerAdapter(new IexCloudNewsAdapter())
-            .registerAdapter(new IexCloudStockAdapter())
-            .registerAdapter(new RandomDataSetAdapter())
-            .registerAdapter(new RandomDataStreamAdapter())
-            .registerAdapter(new SlackAdapter())
-            .registerAdapter(new WikipediaEditedArticlesAdapter())
-            .registerAdapter(new WikipediaNewArticlesAdapter())
-            .registerAdapter(new ImageStreamAdapter())
-            .registerAdapter(new ImageSetAdapter())
-            .registerAdapter(new IssAdapter())
-            .registerAdapter(new FlicMQTTAdapter())
-            .registerAdapter(new TISensorTag())
-            .build();
+        "StreamPipes Connect Worker Main",
+        "", 8001)
+      .registerAdapter(new GdeltAdapter())
+      .registerAdapter(new CoindeskBitcoinAdapter())
+      .registerAdapter(new NetioRestAdapter())
+      .registerAdapter(new NetioMQTTAdapter())
+      .registerAdapter(new IexCloudNewsAdapter())
+      .registerAdapter(new IexCloudStockAdapter())
+      .registerAdapter(new RandomDataSetAdapter())
+      .registerAdapter(new RandomDataStreamAdapter())
+      .registerAdapter(new SlackAdapter())
+      .registerAdapter(new WikipediaEditedArticlesAdapter())
+      .registerAdapter(new WikipediaNewArticlesAdapter())
+      .registerAdapter(new ImageStreamAdapter())
+      .registerAdapter(new ImageSetAdapter())
+      .registerAdapter(new IssAdapter())
+      .registerAdapter(new FlicMQTTAdapter())
+      .registerAdapter(new TISensorTag())
+      .build();
   }
 
 }
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioMQTTAdapter.java b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioMQTTAdapter.java
similarity index 95%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioMQTTAdapter.java
rename to streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioMQTTAdapter.java
index d692cd7..f2686c8 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioMQTTAdapter.java
+++ b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioMQTTAdapter.java
@@ -16,7 +16,7 @@
  *
  */
 
-package org.apache.streampipes.connect.iiot.adapters.netio;
+package org.apache.streampipes.connect.adapters.netio;
 
 import com.google.gson.Gson;
 
@@ -24,8 +24,8 @@
 import org.apache.streampipes.connect.api.exception.AdapterException;
 import org.apache.streampipes.connect.adapter.model.pipeline.AdapterPipeline;
 import org.apache.streampipes.connect.adapter.model.specific.SpecificDataStreamAdapter;
-import org.apache.streampipes.connect.iiot.adapters.netio.model.NetioAllPowerOutputs;
-import org.apache.streampipes.connect.iiot.adapters.netio.model.NetioPowerOutput;
+import org.apache.streampipes.connect.adapters.netio.model.NetioAllPowerOutputs;
+import org.apache.streampipes.connect.adapters.netio.model.NetioPowerOutput;
 import org.apache.streampipes.pe.shared.config.mqtt.*;
 import org.apache.streampipes.messaging.InternalEventProcessor;
 import org.apache.streampipes.model.AdapterType;
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioRestAdapter.java b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioRestAdapter.java
similarity index 95%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioRestAdapter.java
rename to streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioRestAdapter.java
index 7d7a5d3..4b4c9f4 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioRestAdapter.java
+++ b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioRestAdapter.java
@@ -16,7 +16,7 @@
  *
  */
 
-package org.apache.streampipes.connect.iiot.adapters.netio;
+package org.apache.streampipes.connect.adapters.netio;
 
 import com.google.gson.Gson;
 import org.apache.http.HttpHost;
@@ -24,12 +24,12 @@
 import org.apache.http.client.fluent.Executor;
 import org.apache.http.client.fluent.Request;
 import org.apache.streampipes.connect.adapter.Adapter;
-import org.apache.streampipes.connect.api.exception.AdapterException;
 import org.apache.streampipes.connect.adapter.sdk.ParameterExtractor;
 import org.apache.streampipes.connect.adapter.util.PollingSettings;
-import org.apache.streampipes.connect.iiot.adapters.PullAdapter;
-import org.apache.streampipes.connect.iiot.adapters.netio.model.NetioAllPowerOutputs;
-import org.apache.streampipes.connect.iiot.adapters.netio.model.NetioPowerOutput;
+import org.apache.streampipes.connect.adapters.PullAdapter;
+import org.apache.streampipes.connect.adapters.netio.model.NetioAllPowerOutputs;
+import org.apache.streampipes.connect.adapters.netio.model.NetioPowerOutput;
+import org.apache.streampipes.connect.api.exception.AdapterException;
 import org.apache.streampipes.model.AdapterType;
 import org.apache.streampipes.model.connect.adapter.SpecificAdapterStreamDescription;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioUtils.java b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioUtils.java
similarity index 96%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioUtils.java
rename to streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioUtils.java
index b7da893..2a47cc1 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/NetioUtils.java
+++ b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/NetioUtils.java
@@ -16,10 +16,10 @@
  *
  */
 
-package org.apache.streampipes.connect.iiot.adapters.netio;
+package org.apache.streampipes.connect.adapters.netio;
 
-import org.apache.streampipes.connect.iiot.adapters.netio.model.NetioGlobalMeasure;
-import org.apache.streampipes.connect.iiot.adapters.netio.model.NetioPowerOutput;
+import org.apache.streampipes.connect.adapters.netio.model.NetioGlobalMeasure;
+import org.apache.streampipes.connect.adapters.netio.model.NetioPowerOutput;
 import org.apache.streampipes.model.connect.guess.GuessSchema;
 import org.apache.streampipes.model.schema.EventProperty;
 import org.apache.streampipes.model.schema.EventSchema;
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioAllPowerOutputs.java b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioAllPowerOutputs.java
similarity index 95%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioAllPowerOutputs.java
rename to streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioAllPowerOutputs.java
index c816ca4..1b4e5ac 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioAllPowerOutputs.java
+++ b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioAllPowerOutputs.java
@@ -16,7 +16,7 @@
  *
  */
 
-package org.apache.streampipes.connect.iiot.adapters.netio.model;
+package org.apache.streampipes.connect.adapters.netio.model;
 
 import com.google.gson.annotations.SerializedName;
 
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioGlobalMeasure.java b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioGlobalMeasure.java
similarity index 96%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioGlobalMeasure.java
rename to streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioGlobalMeasure.java
index 0a6c9a6..34c0d22 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioGlobalMeasure.java
+++ b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioGlobalMeasure.java
@@ -16,7 +16,7 @@
  *
  */
 
-package org.apache.streampipes.connect.iiot.adapters.netio.model;
+package org.apache.streampipes.connect.adapters.netio.model;
 
 
 import com.google.gson.annotations.SerializedName;
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioPowerOutput.java b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioPowerOutput.java
similarity index 97%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioPowerOutput.java
rename to streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioPowerOutput.java
index f2b75eb..2b2c6ae 100644
--- a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/java/org/apache/streampipes/connect/iiot/adapters/netio/model/NetioPowerOutput.java
+++ b/streampipes-extensions/streampipes-connect-adapters/src/main/java/org/apache/streampipes/connect/adapters/netio/model/NetioPowerOutput.java
@@ -16,7 +16,7 @@
  *
  */
 
-package org.apache.streampipes.connect.iiot.adapters.netio.model;
+package org.apache.streampipes.connect.adapters.netio.model;
 
 import com.google.gson.annotations.SerializedName;
 
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/documentation.md b/streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/documentation.md
similarity index 100%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/documentation.md
rename to streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/documentation.md
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/icon.png b/streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/icon.png
similarity index 100%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/icon.png
rename to streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/icon.png
Binary files differ
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/strings.en b/streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/strings.en
similarity index 100%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/strings.en
rename to streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.mqtt/strings.en
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/documentation.md b/streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/documentation.md
similarity index 100%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/documentation.md
rename to streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/documentation.md
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/icon.png b/streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/icon.png
similarity index 100%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/icon.png
rename to streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/icon.png
Binary files differ
diff --git a/streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/strings.en b/streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/strings.en
similarity index 100%
rename from streampipes-extensions/streampipes-connect-adapters-iiot/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/strings.en
rename to streampipes-extensions/streampipes-connect-adapters/src/main/resources/org.apache.streampipes.connect.iiot.adapters.netio.rest/strings.en
diff --git a/streampipes-extensions/streampipes-processors-image-processing-jvm/Dockerfile b/streampipes-extensions/streampipes-extensions-all-iiot/Dockerfile
similarity index 73%
rename from streampipes-extensions/streampipes-processors-image-processing-jvm/Dockerfile
rename to streampipes-extensions/streampipes-extensions-all-iiot/Dockerfile
index e64e6d1..99ba527 100644
--- a/streampipes-extensions/streampipes-processors-image-processing-jvm/Dockerfile
+++ b/streampipes-extensions/streampipes-extensions-all-iiot/Dockerfile
@@ -13,16 +13,15 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-ARG BASE_IMAGE=adoptopenjdk/openjdk8-openj9:alpine
-FROM $BASE_IMAGE
+FROM eclipse-temurin:11-jre-focal
 
 ENV CONSUL_LOCATION consul
 
 EXPOSE 8090
-# needed pkgs processors-image-processing-jvm
-RUN apk --update add --no-cache \
-    fontconfig \
-    ttf-dejavu
 
-COPY target/streampipes-processors-image-processing-jvm.jar  /streampipes-processing-element-container.jar
-ENTRYPOINT ["java", "-jar", "/streampipes-processing-element-container.jar"]
+# needed pkgs processors-image-processing-jvm
+RUN apt-get update && apt-get install ttf-dejavu-core && apt-get install fontconfig
+
+COPY target/streampipes-extensions-all-iiot.jar  /streampipes-extensions-all-iiot.jar
+
+ENTRYPOINT ["java", "-