[CARBONDATA-1396]Fix findbugs issues in carbondata-hive
Fix findbugs issues in carbondata-hive
This closes #1270
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
index bc66d49..f08b92b 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonDictionaryDecodeReadSupport.java
@@ -231,7 +231,7 @@
case SHORT:
return new ShortWritable((Short) obj);
case DATE:
- return new DateWritable(new Date((Integer) obj));
+ return new DateWritable(new Date((long) obj));
case TIMESTAMP:
return new TimestampWritable(new Timestamp((long) obj));
case STRING:
@@ -239,8 +239,9 @@
case DECIMAL:
return new HiveDecimalWritable(
HiveDecimal.create(new java.math.BigDecimal(obj.toString())));
+ default:
+ throw new IOException("unsupported data type:" + dataType);
}
- throw new IOException("Unknown primitive : " + dataType.getName());
}
/**
@@ -282,6 +283,9 @@
case DECIMAL:
((HiveDecimalWritable) writable)
.set(HiveDecimal.create(new java.math.BigDecimal(obj.toString())));
+ break;
+ default:
+ throw new IOException("unsupported data type:" + dataType);
}
}
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
index b922295..9171470 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveInputSplit.java
@@ -223,10 +223,10 @@
double seg1 = Double.parseDouble(segmentId);
double seg2 = Double.parseDouble(other.getSegmentId());
- if (seg1 - seg2 < 0) {
+ if (Double.compare(seg1, seg2) < 0) {
return -1;
}
- if (seg1 - seg2 > 0) {
+ if (Double.compare(seg1, seg2) > 0) {
return 1;
}
@@ -262,6 +262,28 @@
return 0;
}
+ @Override public boolean equals(Object obj) {
+ if (this == obj) {
+ return true;
+ }
+
+ if (!(obj instanceof CarbonHiveInputSplit)) {
+ return false;
+ }
+
+ CarbonHiveInputSplit other = (CarbonHiveInputSplit) obj;
+ return 0 == this.compareTo(other);
+ }
+
+ @Override public int hashCode() {
+ int result = taskId.hashCode();
+ result = 31 * result + segmentId.hashCode();
+ result = 31 * result + bucketId.hashCode();
+ result = 31 * result + invalidSegments.hashCode();
+ result = 31 * result + numberOfBlocklets;
+ return result;
+ }
+
@Override public String getBlockPath() {
return getPath().getName();
}
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
index ae87d66..b8bd0e2 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonHiveRecordReader.java
@@ -20,7 +20,6 @@
import java.sql.Date;
import java.sql.Timestamp;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
@@ -45,9 +44,7 @@
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
import org.apache.hadoop.hive.serde2.objectinspector.StructField;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.IntWritable;
@@ -61,7 +58,6 @@
implements org.apache.hadoop.mapred.RecordReader<Void, ArrayWritable> {
private ArrayWritable valueObj = null;
- private CarbonObjectInspector objInspector;
private long recordReaderCounter = 0;
private int[] columnIds;
@@ -95,14 +91,8 @@
List<TypeInfo> columnTypes;
// Get column names and sort order
final String colIds = conf.get("hive.io.file.readcolumn.ids");
- final String columnNameProperty = conf.get(serdeConstants.LIST_COLUMNS);
final String columnTypeProperty = conf.get(serdeConstants.LIST_COLUMN_TYPES);
- if (columnNameProperty.length() == 0) {
- columnNames = new ArrayList<String>();
- } else {
- columnNames = Arrays.asList(columnNameProperty.split(","));
- }
if (columnTypeProperty.length() == 0) {
columnTypes = new ArrayList<TypeInfo>();
} else {
@@ -115,7 +105,6 @@
if (!colIds.equals("")) {
String[] arraySelectedColId = colIds.split(",");
- List<TypeInfo> reqColTypes = new ArrayList<TypeInfo>();
columnIds = new int[arraySelectedColId.length];
int columnId = 0;
for (int j = 0; j < arraySelectedColId.length; j++) {
@@ -124,8 +113,6 @@
}
}
- rowTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNames, columnTypes);
- this.objInspector = new CarbonObjectInspector((StructTypeInfo) rowTypeInfo);
}
@Override public boolean next(Void aVoid, ArrayWritable value) throws IOException {
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
index 4c7f1a6..6722dcf 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/CarbonObjectInspector.java
@@ -166,7 +166,7 @@
return hash;
}
- class StructFieldImpl implements StructField {
+ private static class StructFieldImpl implements StructField {
private final String name;
private final ObjectInspector inspector;
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
index 273536a..86ebc0d 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/MapredCarbonInputFormat.java
@@ -123,7 +123,6 @@
// getting the table absoluteTableIdentifier from the carbonTable
// to avoid unnecessary deserialization
- StringBuilder colNames = new StringBuilder();
AbsoluteTableIdentifier identifier = carbonTable.getAbsoluteTableIdentifier();
String projection = getProjection(configuration, carbonTable,
diff --git a/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java b/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
index ae931fb..5bc6461 100644
--- a/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
+++ b/integration/hive/src/main/java/org/apache/carbondata/hive/server/HiveEmbeddedServer2.java
@@ -19,6 +19,7 @@
import java.io.File;
import java.lang.reflect.Field;
+import java.security.SecureRandom;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
@@ -51,6 +52,7 @@
private HiveServer2 hiveServer;
private HiveConf config;
private int port;
+ private static Random secureRandom = new SecureRandom();
public void start() throws Exception {
log.info("Starting Hive Local/Embedded Server...");
@@ -112,11 +114,18 @@
conf.addToRestrictList("columns.comments");
conf.set("hive.scratch.dir.permission", "777");
conf.setVar(ConfVars.SCRATCHDIRPERMISSION, "777");
- scratchDirFile.mkdirs();
- // also set the permissions manually since Hive doesn't do it...
- scratchDirFile.setWritable(true, false);
+ if (!scratchDirFile.exists()) {
+ if (!scratchDirFile.mkdirs()) {
+ throw new IllegalArgumentException("could not create the directory:" + scratchDir);
+ }
+ // also set the permissions manually since Hive doesn't do it...
+ if (!scratchDirFile.setWritable(true, false)) {
+ throw new IllegalArgumentException("could not set write permissions for the directory:" +
+ scratchDir);
+ }
+ }
- int random = new Random().nextInt();
+ int random = secureRandom.nextInt();
conf.set("hive.metastore.warehouse.dir", scratchDir + "/warehouse" + random);
conf.set("hive.metastore.metadb.dir", scratchDir + "/metastore_db" + random);
diff --git a/pom.xml b/pom.xml
index f722c66..df3c5bd 100644
--- a/pom.xml
+++ b/pom.xml
@@ -101,6 +101,7 @@
<module>hadoop</module>
<module>integration/spark-common</module>
<module>integration/spark-common-test</module>
+ <module>integration/hive</module>
<module>assembly</module>
</modules>