[CARBONDATA-3102] Fix NoClassDefFoundError when use thriftServer and beeline to read/write data from/to S3

This PR fix NoClassDefFoundError when use thriftServer and beeline to use table on cloud storage

This closes #2925
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala b/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
index ce69c66..595af20 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/benchmark/SimpleQueryBenchmark.scala
@@ -273,7 +273,7 @@
     }
   }
 
-  // run testcases and print comparison result
+  // run test cases and print comparison result
   private def runTest(spark: SparkSession, table1: String, table2: String): Unit = {
     val formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
     val date = new Date
diff --git a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
index d3d0a37..9cc43d0 100644
--- a/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
+++ b/examples/spark2/src/main/scala/org/apache/carbondata/examples/S3Example.scala
@@ -157,8 +157,8 @@
   }
 
   def getSparkMaster(args: Array[String]): String = {
-      if (args.length == 5) args(4)
-      else if (args(3).contains("spark:") || args(3).contains("mesos:")) args(3)
-      else "local"
-    }
+    if (args.length == 5) args(4)
+    else if (args(3).contains("spark:") || args(3).contains("mesos:")) args(3)
+    else "local"
+  }
 }
diff --git a/integration/hive/pom.xml b/integration/hive/pom.xml
index b2256e7..6df4f24 100644
--- a/integration/hive/pom.xml
+++ b/integration/hive/pom.xml
@@ -86,12 +86,12 @@
         <dependency>
             <groupId>org.apache.httpcomponents</groupId>
             <artifactId>httpclient</artifactId>
-            <version>4.3.4</version>
+            <version>${httpclient.version}</version>
         </dependency>
         <dependency>
             <groupId>org.apache.httpcomponents</groupId>
             <artifactId>httpcore</artifactId>
-            <version>4.3-alpha1</version>
+            <version>${httpcore.version}</version>
         </dependency>
         <dependency>
             <groupId>org.apache.hadoop</groupId>
diff --git a/integration/presto/pom.xml b/integration/presto/pom.xml
index 3d7ec44..9fc1ace 100644
--- a/integration/presto/pom.xml
+++ b/integration/presto/pom.xml
@@ -543,7 +543,7 @@
     <dependency>
       <groupId>org.apache.httpcomponents</groupId>
       <artifactId>httpcore</artifactId>
-      <version>4.2</version>
+      <version>${httpcore.version}</version>
     </dependency>
   </dependencies>
 
diff --git a/integration/spark2/pom.xml b/integration/spark2/pom.xml
index d066142..c52ba40 100644
--- a/integration/spark2/pom.xml
+++ b/integration/spark2/pom.xml
@@ -129,6 +129,11 @@
       </exclusions>
     </dependency>
     <dependency>
+      <groupId>org.apache.httpcomponents</groupId>
+      <artifactId>httpclient</artifactId>
+      <version>${httpclient.version}</version>
+    </dependency>
+    <dependency>
       <groupId>net.java.dev.jets3t</groupId>
       <artifactId>jets3t</artifactId>
       <version>0.9.0</version>
diff --git a/pom.xml b/pom.xml
index df8ae42..c8c0fa4 100644
--- a/pom.xml
+++ b/pom.xml
@@ -112,6 +112,7 @@
     <snappy.version>1.1.2.6</snappy.version>
     <hadoop.version>2.7.2</hadoop.version>
     <httpclient.version>4.2.5</httpclient.version>
+    <httpcore.version>${httpclient.version}</httpcore.version>
     <scala.binary.version>2.11</scala.binary.version>
     <scala.version>2.11.8</scala.version>
     <hadoop.deps.scope>compile</hadoop.deps.scope>