remove old or uinfinished files
diff --git a/Dockerfile b/Dockerfile
deleted file mode 100644
index 0897f83..0000000
--- a/Dockerfile
+++ /dev/null
@@ -1,92 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-FROM openjdk:8-alpine
-
-ENV spark_uid=185
-
-ENV SCALA_MAJOR 2.12
-ENV HADOOP_MAJOR 2.7
-ENV SPARK_MAJOR_MINOR 2.4.4
-
-# Before building the mahout docker image, we must build a spark distrobution following
-# the instructions in http://spark.apache.org/docs/latest/building-spark.html.
-# this Dockerfile will build Spark version 2.4.4 against Scala 2.12 by default.
-# docker build -t mahout:latest -f resource_managers/docker/kubernetes/src/main/dockerfiles/Dockerfile .
-
-
-RUN set -ex && \
-    apk upgrade --no-cache && \
-    ln -s /lib /lib64 && \
-    apk add --no-cache bash tini libc6-compat linux-pam krb5 krb5-libs nss curl openssl && \
-    mkdir -p /opt/mahout && \
-    mkdir -p /opt/mahout/examples && \
-    mkdir -p /opt/mahout/work-dir && \
-    mkdir -p /opt/spark && \
-    export MAHOUT_DOCKER_HOME=/opt/mahout && \
-    export SPARK_VERSION=spark-${SPARK_MAJOR_MINOR} && \
-    export SPARK_BASE=/opt/spark && \
-    export SPARK_HOME=${SPARK_BASE}/${SPARK_VERSION}
-    export MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=512m" && \
-    export SPARK_SRC_URL="https://archive.apache.org/dist/spark/${SPARK_VERSION}/${SPARK_VERSION}.tgz" && \
-    export SPARK_SRC_SHA512_URL="https://archive.apache.org/dist/spark/${SPARK_VERSION}/${SPARK_VERSION}.tgz.sha512" && \
-    export SPARK_SRC_SHA512="D33096E7EFBC4B131004C85FB5833AC3BAB8F097644CBE68D89ADC81F5144B5535337FD0082FA04A19C2870BD7D84758E8AE9C6EC1C7F3DF9FED35325EEA8928" && \
-    curl  -LfsS $SPARK_SRC_URL -o ${SPARK_BASE}/${SPARK_VERSION}.tgz  && \
-    curl  -LfsS $SPARK_SRC_SHA512_URL -o ${SPARK_BASE}/${SPARK_VERSION}.tgz.sha512
-    #$SPARK_HOME/$SPARK_VERSION.sha512 ${SPARK_HOME}/$SPARK_VERSION.tgz | shasum -a 512 -c - && \
-    tar -xzvf ${SPARK_BASE}/${SPARK_VERSION}.tgz -C ${SPARK_BASE}/&& \
-    echo ${SPARK_BASE}/${SPARK_VERSION}
-    sh ${SPARK_HOME}/dev/change-scala-version.sh ${SCALA_MAJOR} && \
-    sh ${SPARK_HOME}/dev/make-distribution.sh --name ${DATE}-${REVISION} --pip --tgz -DzincPort=${ZINC_PORT} \
-         -Phadoop-${HADOOP_MAJOR} -Pkubernetes -Pkinesis-asl -Phive -Phive-thriftserver
-
-    #sh ${SPARK_HOME}/build/mvn -Pkubernetes -Pscala-${scala_version} -DskipTests clean package
-    touch /opt/mahout/RELEASE && \
-    # below is for nodes.  for the moment lets get a master up
-    # rm /bin/sh && \
-    # ln -sv /bin/bash /bin/sh && \
-    # echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
-    chgrp root /etc/passwd && chmod ug+rw /etc/passwd
-
-# build mahout
-RUN mvn clean package install
-
-ENV MAHOUT_HOME /opt/mahout
-COPY lib ${MAHPOUT_HOME}/lib
-COPY bin ${MAHPOUT_HOME}/bin
-COPY entrypoint.sh ${MAHPOUT_HOME}
-COPY Dockerfile ${MAHPOUT_HOME}
-COPY examples ${MAHPOUT_HOME}/examples
-
-ENV SPARK_HOME /opt/spark
-COPY spark-build/jars ${SPARK_HOME}/jars
-COPY spark-build/bin ${SPARK_HOME}/bin
-COPY spark-build/sbin ${SPARK_HOME}/sbin
-COPY spark-build/kubernetes/tests ${SPARK_HOME}/tests
-COPY spark-build/data ${SPARK_HOME}/data
-
-ENV MAHOUT_CLASSPATH ${MAHOUT_HOME}/lib
-ENV SPARK_CLASSPATH ${SPARK_HOME}/jars
-
-WORKDIR /opt/mahout/work-dir
-RUN chmod g+w /opt/mahout/work-dir
-
-ENTRYPOINT [ "/opt/entrypoint.sh" ]
-
-# Specify the User that the actual main process will run as
-USER ${spark_uid}
\ No newline at end of file
diff --git a/entrypoint.sh b/entrypoint.sh
deleted file mode 100755
index bb40767..0000000
--- a/entrypoint.sh
+++ /dev/null
@@ -1,94 +0,0 @@
-#!/usr/bin/env bash
-
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# echo commands to the terminal output
-set -ex
-
-# Check whether there is a passwd entry for the container UID
-myuid=$(id -u)
-mygid=$(id -g)
-# turn off -e for getent because it will return error code in anonymous uid case
-set +e
-uidentry=$(getent passwd $myuid)
-set -e
-
-# If there is no passwd entry for the container UID, attempt to create one
-if [ -z "$uidentry" ] ; then
-    if [ -w /etc/passwd ] ; then
-        echo "$myuid:x:$myuid:$mygid:${SPARK_USER_NAME:-anonymous uid}:${SPARK_HOME}:/bin/false" >> /etc/passwd
-    else
-        echo "Container ENTRYPOINT failed to add passwd entry for anonymous UID"
-    fi
-fi
-
-###Todo: ensure that MAHOUT_HOME is properly established as an env var.
-SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*:${$MAHOUT_HOME}/lib/*"
-env | grep SPARK_JAVA_OPT_ | sort -t_ -k4 -n | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt
-readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt
-
-if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then
-  SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH"
-fi
-
-
-
-if ! [ -z ${HADOOP_CONF_DIR+x} ]; then
-  SPARK_CLASSPATH="$HADOOP_CONF_DIR:$SPARK_CLASSPATH:$MAHOUT_CLASSPATH";
-fi
-
-case "$1" in
-  driver)
-    shift 1
-    CMD=(
-      "$SPARK_HOME/bin/spark-submit"
-      --conf "spark.driver.bindAddress=$SPARK_DRIVER_BIND_ADDRESS"
-      --conf "spark.executor.extraLibraryPath"=\
-         "${MAHOUT_HOME}/lib/mahout-spark_*-dependency-reduced.jar"
-      --deploy-mode client
-      "$@"
-    )
-    ;;
-  executor)
-    shift 1
-    CMD=(
-      ${JAVA_HOME}/bin/java
-      "${SPARK_EXECUTOR_JAVA_OPTS[@]}"
-      -Xms$SPARK_EXECUTOR_MEMORY
-      -Xmx$SPARK_EXECUTOR_MEMORY
-      -cp "$SPARK_CLASSPATH:$MAHOUT_CLASSPATH"
-      org.apache.spark.executor.CoarseGrainedExecutorBackend
-      --driver-url $SPARK_DRIVER_URL
-      --executor-id $SPARK_EXECUTOR_ID
-      --conf spark.executor.extraLibraryPath=\
-        "$MAHOUT_HOME/lib/mahout-spark_*-dependency-reduced.jar"
-      --cores $SPARK_EXECUTOR_CORES
-      --app-id $SPARK_APPLICATION_ID
-      --hostname $SPARK_EXECUTOR_POD_IP
-    )
-    ;;
-
-  *)
-    echo "Non-spark-on-k8s command provided, proceeding in pass-through mode..."
-    CMD=("$@")
-    ;;
-esac
-
-# Execute the container CMD under tini for better hygiene
-exec /sbin/tini -s -- "${CMD[@]}"
\ No newline at end of file
diff --git a/resource-managers/kubernetes/docker/src/main/dockerfiles/.gitignore b/resource-managers/kubernetes/docker/src/main/dockerfiles/.gitignore
deleted file mode 100644
index e69de29..0000000
--- a/resource-managers/kubernetes/docker/src/main/dockerfiles/.gitignore
+++ /dev/null
diff --git a/resource-managers/kubernetes/docker/src/main/dockerfiles/Dockerfile b/resource-managers/kubernetes/docker/src/main/dockerfiles/Dockerfile
deleted file mode 100644
index 6f588ab..0000000
--- a/resource-managers/kubernetes/docker/src/main/dockerfiles/Dockerfile
+++ /dev/null
@@ -1,76 +0,0 @@
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-FROM openjdk:8-alpine
-
-ARG spark_uid=185
-
-# Before building the mahout docker image, we must build a spark distrobution following
-# the instructions in http://spark.apache.org/docs/latest/building-spark.html.
-# this Dockerfile will build the Spark version 2.4.3 against Scala 2.12.
-# docker build -t mahout:latest -f resource_managers/docker/kubernetes/src/main/dockerfiles/Dockerfile .
-
-
-RUN set -ex && \
-    apk upgrade --no-cache && \
-    ln -s /lib /lib64 && \
-    apk add --no-cache bash tini libc6-compat linux-pam krb5 krb5-libs nss curl openssl && \
-    mkdir -p /opt/mahout && \
-    mkdir -p /opt/mahout/examples && \
-    mkdir -p /opt/mahout/work-dir && \
-    mkdir -p /opt/spark && \
-    export MAHOUT_HOME=. && \
-    mkdir -p $MAHOUT_HOME/spark-build && \
-    export MAVEN_OPTS="-Xmx2g -XX:ReservedCodeCacheSize=512m" && \
-    export SPARK_HOME=$MAHOUT_HOME/spark-build/ && \
-    export SPARK_SRC_URL="https://www.apache.org/dyn/closer.lua/spark/spark-2.4.3/spark-2.4.3.tgz" && \
-    export SPARK_SRC_SHA256="3EAEA3B0A81A717BB43CE6EE0BB2C3B8351EF080DB9499AF66F9F22C8A18D38C5E1426CBFEF04AFD2A4002ACE5B28A6BEACBCE4E5E42506F4FD270B05D0DB379" && \
-    curl  -LfsS $SPARK_SRC_URL -o $SPARK_HOME/spark-2.4.3.tgz  && \
-    echo "${SPARK_SRC_SHA256} ${SPARK_HOME}/spark-2.4.3.tgz" | sha512sum -c - && \
-    $SPARK_HOME/dev/change-scala-version.sh 2.12 && \
-    $SPARK_HOME/build/mvn -Pkubernetes -Pscala-2.12 -DskipTests clean package && \
-    touch /opt/mahout/RELEASE && \
-    rm /bin/sh && \
-    ln -sv /bin/bash /bin/sh && \
-    echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
-    chgrp root /etc/passwd && chmod ug+rw /etc/passwd
-
-
-COPY lib /opt/mahout/lib
-COPY bin /opt/mahout/bin
-COPY resource-managers/kubernetes/docker/src/main/dockerfiles/entrypoint.sh /opt/
-COPY examples /opt/mahout/examples
-
-COPY spark-build/jars /opt/spark/jars
-COPY spark-build/bin /opt/spark/bin
-COPY spark-build/sbin /opt/spark/sbin
-COPY spark-build/kubernetes/tests /opt/spark/tests
-COPY spark-build/data /opt/spark/data
-
-ENV MAHOUT_HOME /opt/mahout
-ENV SPARK_HOME /opt/spark
-
-
-
-WORKDIR /opt/mahout/work-dir
-RUN chmod g+w /opt/mahout/work-dir
-
-ENTRYPOINT [ "/opt/entrypoint.sh" ]
-
-# Specify the User that the actual main process will run as
-USER ${spark_uid}
\ No newline at end of file
diff --git a/resource-managers/kubernetes/docker/src/main/dockerfiles/entrypoint.sh b/resource-managers/kubernetes/docker/src/main/dockerfiles/entrypoint.sh
deleted file mode 100755
index 726b613..0000000
--- a/resource-managers/kubernetes/docker/src/main/dockerfiles/entrypoint.sh
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env bash
-
-#!/bin/bash
-#
-# Licensed to the Apache Software Foundation (ASF) under one or more
-# contributor license agreements.  See the NOTICE file distributed with
-# this work for additional information regarding copyright ownership.
-# The ASF licenses this file to You under the Apache License, Version 2.0
-# (the "License"); you may not use this file except in compliance with
-# the License.  You may obtain a copy of the License at
-#
-#    http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# echo commands to the terminal output
-set -ex
-
-# Check whether there is a passwd entry for the container UID
-myuid=$(id -u)
-mygid=$(id -g)
-# turn off -e for getent because it will return error code in anonymous uid case
-set +e
-uidentry=$(getent passwd $myuid)
-set -e
-
-# If there is no passwd entry for the container UID, attempt to create one
-if [ -z "$uidentry" ] ; then
-    if [ -w /etc/passwd ] ; then
-        echo "$myuid:x:$myuid:$mygid:${SPARK_USER_NAME:-anonymous uid}:$SPARK_HOME:/bin/false" >> /etc/passwd
-    else
-        echo "Container ENTRYPOINT failed to add passwd entry for anonymous UID"
-    fi
-fi
-
-SPARK_CLASSPATH="$SPARK_CLASSPATH:${SPARK_HOME}/jars/*"
-env | grep SPARK_JAVA_OPT_ | sort -t_ -k4 -n | sed 's/[^=]*=\(.*\)/\1/g' > /tmp/java_opts.txt
-readarray -t SPARK_EXECUTOR_JAVA_OPTS < /tmp/java_opts.txt
-
-if [ -n "$SPARK_EXTRA_CLASSPATH" ]; then
-  SPARK_CLASSPATH="$SPARK_CLASSPATH:$SPARK_EXTRA_CLASSPATH"
-fi
-
-
-
-if ! [ -z ${HADOOP_CONF_DIR+x} ]; then
-  SPARK_CLASSPATH="$HADOOP_CONF_DIR:$SPARK_CLASSPATH:$MAHOUT_CLASSPATH";
-fi
-
-case "$1" in
-  driver)
-    shift 1
-    CMD=(
-      "$SPARK_HOME/bin/spark-submit"
-      --conf "spark.driver.bindAddress=$SPARK_DRIVER_BIND_ADDRESS"
-      -conf spark.executor.extraLibraryPath=\
-        "$MAHOUT_HOME/lib/mahout-spark_*-dependency-reduced.jar"
-      --deploy-mode client
-      "$@"
-    )
-    ;;
-  executor)
-    shift 1
-    CMD=(
-      ${JAVA_HOME}/bin/java
-      "${SPARK_EXECUTOR_JAVA_OPTS[@]}"
-      -Xms$SPARK_EXECUTOR_MEMORY
-      -Xmx$SPARK_EXECUTOR_MEMORY
-      -cp "$SPARK_CLASSPATH:$MAHOUT_CLASSPATH"
-      org.apache.spark.executor.CoarseGrainedExecutorBackend
-      --driver-url $SPARK_DRIVER_URL
-      --executor-id $SPARK_EXECUTOR_ID
-      --conf spark.executor.extraLibraryPath=\
-        "$MAHOUT_HOME/lib/mahout-spark_*-dependency-reduced.jar"
-      --cores $SPARK_EXECUTOR_CORES
-      --app-id $SPARK_APPLICATION_ID
-      --hostname $SPARK_EXECUTOR_POD_IP
-    )
-    ;;
-
-  *)
-    echo "Non-spark-on-k8s command provided, proceeding in pass-through mode..."
-    CMD=("$@")
-    ;;
-esac
-
-# Execute the container CMD under tini for better hygiene
-exec /sbin/tini -s -- "${CMD[@]}"
\ No newline at end of file