Revert "AMBARI-21569.Users randomly getting "HDFS020 Could not write file" exceptions while running query from Hive View"

This reverts commit 9ba009b5a69b640e68db3c92aaf117bbbb92ab8d.
diff --git a/contrib/views/hive20/src/main/resources/ui/yarn.lock b/contrib/views/hive20/src/main/resources/ui/yarn.lock
index 607cf81..477a15c 100644
--- a/contrib/views/hive20/src/main/resources/ui/yarn.lock
+++ b/contrib/views/hive20/src/main/resources/ui/yarn.lock
@@ -569,7 +569,7 @@
   dependencies:
     babel-runtime "^6.22.0"
 
-babel-plugin-transform-es2015-block-scoping@^6.23.0, babel-plugin-transform-es2015-block-scoping@^6.24.1:
+babel-plugin-transform-es2015-block-scoping@^6.23.0:
   version "6.24.1"
   resolved "https://registry.yarnpkg.com/babel-plugin-transform-es2015-block-scoping/-/babel-plugin-transform-es2015-block-scoping-6.24.1.tgz#76c295dc3a4741b1665adfd3167215dcff32a576"
   dependencies:
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
index 5bce7ba..90fa483 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsApi.java
@@ -386,20 +386,7 @@
    * @throws IOException
    * @throws InterruptedException
    */
-  public <T> T execute(PrivilegedExceptionAction<T> action) throws IOException, InterruptedException {
-    return this.execute(action, false);
-  }
-
-
-  /**
-   * Executes action on HDFS using doAs
-   * @param action strategy object
-   * @param <T> result type
-   * @return result of operation
-   * @throws IOException
-   * @throws InterruptedException
-   */
-  public <T> T execute(PrivilegedExceptionAction<T> action, boolean alwaysRetry)
+  public <T> T execute(PrivilegedExceptionAction<T> action)
       throws IOException, InterruptedException {
     T result = null;
 
@@ -414,7 +401,7 @@
         result = ugi.doAs(action);
         succeeded = true;
       } catch (IOException ex) {
-        if (!alwaysRetry && !ex.getMessage().contains("Cannot obtain block length for")) {
+        if (!ex.getMessage().contains("Cannot obtain block length for")) {
           throw ex;
         }
         if (tryNumber >= 3) {
@@ -422,7 +409,6 @@
         }
         LOG.info("HDFS threw 'IOException: Cannot obtain block length' exception. " +
             "Retrying... Try #" + (tryNumber + 1));
-        LOG.error("Retrying: " + ex.getMessage(),ex);
         Thread.sleep(1000);  //retry after 1 second
       }
     } while (!succeeded);
diff --git a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
index 39958c3..0670f1a 100644
--- a/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
+++ b/contrib/views/utils/src/main/java/org/apache/ambari/view/utils/hdfs/HdfsUtil.java
@@ -28,7 +28,6 @@
 
 import java.io.IOException;
 import java.util.Map;
-import java.security.PrivilegedExceptionAction;
 
 public class HdfsUtil {
   private final static Logger LOG =
@@ -39,29 +38,19 @@
    * @param filePath path to file
    * @param content new content of file
    */
-  public static void putStringToFile(final HdfsApi hdfs,final String filePath, final String content) throws HdfsApiException {
+  public static void putStringToFile(HdfsApi hdfs, String filePath, String content) throws HdfsApiException {
     FSDataOutputStream stream;
-      try {
+    try {
       synchronized (hdfs) {
-        hdfs.execute(new PrivilegedExceptionAction<Void>() {
-          @Override
-          public Void run() throws Exception {
-            stream = hdfs.create(filePath, true);
-            stream.write(content.getBytes());
-            stream.close();
-            return null;
-          }
-        }, true);
+        stream = hdfs.create(filePath, true);
+        stream.write(content.getBytes());
+        stream.close();
       }
     } catch (IOException e) {
       throw new HdfsApiException("HDFS020 Could not write file " + filePath, e);
     } catch (InterruptedException e) {
       throw new HdfsApiException("HDFS021 Could not write file " + filePath, e);
-    } finally {
-      if(stream != null) {
-          stream.close()
-        }
-      }
+    }
   }
 
   /**