DIRKRB-700 DIRKRB-700.
diff --git a/has-project/supports/spark/README.md b/has-project/supports/spark/README.md
new file mode 100644
index 0000000..84e9eac
--- /dev/null
+++ b/has-project/supports/spark/README.md
@@ -0,0 +1,45 @@
+Enable Spark
+===============
+
+## 1. Apply the [patch](https://github.com/apache/directory-kerby/blob/has-project/has/supports/hadoop/hadoop-2.7.2.patch) to hadoop-2.7.2 source code
+```
+git apply hadoop-2.7.2.patch
+```
+
+## 2. Install
+```
+mvn clean install -DskipTests
+```
+
+## 3. Apply the patch to spark-v2.0.0 source code
+```
+git apply spark-v2.0.0.patch
+```
+
+## 4. Build
+```
+./build/mvn -Pyarn -Phadoop-2.7 -Dhadoop.version=2.7.2 -DskipTests clean package
+```
+
+## 5. Update spark-env.sh
+```
+SPARK_HISTORY_OPTS=-Dspark.history.kerberos.enabled=true \
+-Dspark.history.kerberos.principal=<spark/_HOST@HADOOP.COM> \
+-Dspark.history.kerberos.keytab=<keytab>
+```
+
+> Note "_HOST" should be replaced with the specific hostname.
+
+## 6. Spark-submit job
+> YARN mode supported only
+```
+/bin/spark-submit \
+  --use-has \
+  --class <main-class>
+  --master <master-url> \
+  --deploy-mode <deploy-mode> \
+  --conf <key>=<value> \
+  ... # other options
+  <application-jar> \
+  <application-arguments>
+```
diff --git a/has-project/supports/spark/spark-v2.0.0.patch b/has-project/supports/spark/spark-v2.0.0.patch
new file mode 100644
index 0000000..9e560a2
--- /dev/null
+++ b/has-project/supports/spark/spark-v2.0.0.patch
@@ -0,0 +1,59 @@
+diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+index 9feafc9..5501721 100644
+--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
++++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmit.scala
+@@ -549,7 +549,9 @@ object SparkSubmit {
+ 
+     // assure a keytab is available from any place in a JVM
+     if (clusterManager == YARN || clusterManager == LOCAL) {
+-      if (args.principal != null) {
++      if (args.useHas) {
++        UserGroupInformation.loginUserFromHas()
++      } else if (args.principal != null) {
+         require(args.keytab != null, "Keytab must be specified when principal is specified")
+         if (!new File(args.keytab).exists()) {
+           throw new SparkException(s"Keytab file: ${args.keytab} does not exist")
+diff --git a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+index f1761e7..835f4c1 100644
+--- a/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
++++ b/core/src/main/scala/org/apache/spark/deploy/SparkSubmitArguments.scala
+@@ -78,6 +78,8 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
+   var submissionToRequestStatusFor: String = null
+   var useRest: Boolean = true // used internally
+ 
++  var useHas: Boolean = false
++
+   /** Default properties present in the currently defined defaults file. */
+   lazy val defaultSparkProperties: HashMap[String, String] = {
+     val defaultProperties = new HashMap[String, String]()
+@@ -435,6 +437,9 @@ private[deploy] class SparkSubmitArguments(args: Seq[String], env: Map[String, S
+       case VERSION =>
+         SparkSubmit.printVersionAndExit()
+ 
++      case USE_HAS =>
++        useHas = true
++
+       case USAGE_ERROR =>
+         printUsageAndExit(1)
+ 
+diff --git a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+index 6767cc5..ec42de5 100644
+--- a/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
++++ b/launcher/src/main/java/org/apache/spark/launcher/SparkSubmitOptionParser.java
+@@ -76,6 +76,8 @@ class SparkSubmitOptionParser {
+   protected final String PRINCIPAL = "--principal";
+   protected final String QUEUE = "--queue";
+ 
++  protected final String USE_HAS = "--use-has";
++
+   /**
+    * This is the canonical list of spark-submit options. Each entry in the array contains the
+    * different aliases for the same option; the first element of each entry is the "official"
+@@ -115,6 +117,7 @@ class SparkSubmitOptionParser {
+     { REPOSITORIES },
+     { STATUS },
+     { TOTAL_EXECUTOR_CORES },
++    { USE_HAS },
+   };
+ 
+   /**
diff --git a/has-project/supports/zookeeper/README.md b/has-project/supports/zookeeper/README.md
index edc7a0e..f74c5dd 100644
--- a/has-project/supports/zookeeper/README.md
+++ b/has-project/supports/zookeeper/README.md
@@ -3,18 +3,18 @@
 
 ## 1. Create the dependency jars
 ```
-cd HAS/supports/zookeeper
+cd directroy-kerby/has-project/supports/zookeeper
 mvn clean package
 ```
 
 ## 2. Copy the jars to ZooKeeper lib directory
 ```
-cp HAS/supports/zookeeper/lib/* $ZOOKEEPER_HOME/lib/
+cp directroy-kerby/has-project/supports/zookeeper/lib/* $ZOOKEEPER_HOME/lib/
 ```
 
 ## 3. Copy the conf file to ZooKeeper conf directory
 ```
-cp HAS/supports/zookeeper/conf/* $ZOOKEEPER_HOME/conf/
+cp directroy-kerby/has-project/supports/zookeeper/conf/* $ZOOKEEPER_HOME/conf/
 ```
 
 ## 4. Update Zookeeper security configuration files