SQOOP-3014: Sqoop with HCatalog import loose precision for large numbers that does not fit into double
(Zoltan Toth via Boglarka Egyed)
diff --git a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java
index aba2458..1c1ed1e 100644
--- a/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java
+++ b/src/java/org/apache/sqoop/mapreduce/hcat/SqoopHCatImportHelper.java
@@ -149,6 +149,13 @@
LOG.debug("Static partition key used : " + partKeysString);
}
+ /* This construct is only for testing and avoiding static method
+ * usage
+ */
+ SqoopHCatImportHelper() {
+
+ }
+
public HCatRecord convertToHCatRecord(SqoopRecord sqr) throws IOException,
InterruptedException {
try {
@@ -441,13 +448,24 @@
HiveChar hChar = new HiveChar(val.toString(), cti.getLength());
return hChar;
} else if (hfsType == HCatFieldSchema.Type.DECIMAL) {
- BigDecimal bd = new BigDecimal(n.doubleValue(),
- MathContext.DECIMAL128);
- return HiveDecimal.create(bd);
+ return convertNumberIntoHiveDecimal(n);
}
return null;
}
+ HiveDecimal convertNumberIntoHiveDecimal(Number number) {
+ BigDecimal bigDecimal = null;
+
+ if(number instanceof BigDecimal) {
+ bigDecimal = (BigDecimal) number;
+ } else if(number instanceof Long) {
+ bigDecimal = BigDecimal.valueOf((Long)number);
+ } else if (number instanceof Double) {
+ bigDecimal = BigDecimal.valueOf((Double) number);
+ }
+ return HiveDecimal.create(bigDecimal);
+ }
+
public void cleanup() throws IOException {
if (null != lobLoader) {
lobLoader.close();
diff --git a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
index d784a20..4686493 100644
--- a/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
+++ b/src/test/org/apache/sqoop/hcat/HCatalogImportTest.java
@@ -42,6 +42,7 @@
import org.apache.hadoop.hive.common.type.HiveChar;
import org.apache.hadoop.hive.common.type.HiveDecimal;
import org.apache.hadoop.hive.common.type.HiveVarchar;
+import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hive.hcatalog.data.HCatRecord;
import org.apache.hive.hcatalog.data.schema.HCatFieldSchema;
import org.apache.hive.hcatalog.data.schema.HCatSchema;
@@ -401,6 +402,36 @@
}
@Test
+ public void testDecimalTypes() throws Exception{
+ final int TOTAL_RECORDS = 1 * 10;
+ String hcatTable = getTableName().toUpperCase();
+ boolean allowRoundig = false;
+ String dbTypeNumeric = "numeric";
+ String dbTypeDecimal = "decimal";
+ int sqlTypeNumeric = Types.NUMERIC;
+ int sqlTypeDecimal = Types.DECIMAL;
+ HCatFieldSchema.Type hcatTypeDecimal = HCatFieldSchema.Type.DECIMAL;
+
+ BigDecimal inputValue1 = new BigDecimal("454018528782.42006329");
+ HiveDecimal expectedValue1 = HiveDecimal.create(new BigDecimal("454018528782.42006"), allowRoundig);
+ BigDecimal inputValue2 = new BigDecimal("87658675864540185.123456789123456789");
+ HiveDecimal expectedValue2 = HiveDecimal.create(new BigDecimal("87658675864540185.12346"), allowRoundig);
+ int precision = 22;
+ int scale = 5;
+
+ ColumnGenerator[] hcatColumns = new ColumnGenerator[] {
+ HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(0), dbTypeNumeric, sqlTypeNumeric,
+ hcatTypeDecimal, precision, scale, expectedValue1, inputValue1, KeyType.NOT_A_KEY),
+
+ HCatalogTestUtils.colGenerator(HCatalogTestUtils.forIdx(1), dbTypeDecimal, sqlTypeDecimal,
+ hcatTypeDecimal, precision, scale, expectedValue2, inputValue2, KeyType.NOT_A_KEY)
+ };
+ List<String> addlArgsArray = new ArrayList<String>();
+ setExtraArgs(addlArgsArray);
+ runHCatImport(addlArgsArray, TOTAL_RECORDS, hcatTable, hcatColumns, null);
+ }
+
+ @Test
public void testNumberTypes() throws Exception {
final int TOTAL_RECORDS = 1 * 10;
String table = getTableName().toUpperCase();
diff --git a/src/test/org/apache/sqoop/mapreduce/hcat/TestSqoopHCatImportHelper.java b/src/test/org/apache/sqoop/mapreduce/hcat/TestSqoopHCatImportHelper.java
new file mode 100644
index 0000000..3f734ea
--- /dev/null
+++ b/src/test/org/apache/sqoop/mapreduce/hcat/TestSqoopHCatImportHelper.java
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sqoop.mapreduce.hcat;
+
+import org.apache.hadoop.hive.common.type.HiveDecimal;
+import org.junit.Before;
+import org.junit.Test;
+
+import java.math.BigDecimal;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestSqoopHCatImportHelper {
+
+ private SqoopHCatImportHelper importHelper;
+
+ @Before
+ public void init() {
+ importHelper = new SqoopHCatImportHelper();
+ }
+
+ @Test
+ public void convertLongNumberIntoBigDecimalWithoutRounding() {
+ Long input = new Long("20160523112914897");
+ HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input);
+ assertEquals(new BigDecimal("20160523112914897"), actual.bigDecimalValue());
+
+ }
+ @Test
+ public void convertDoubleNumberIntoBigDecimalWithoutRounding() {
+ Double input = new Double("0.12345678912345678");
+ HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input);
+ assertEquals(new BigDecimal("0.12345678912345678"), actual.bigDecimalValue());
+ }
+
+ @Test
+ public void keepBigDecimalNumberIfInputIsBigDecimal() {
+ BigDecimal input = new BigDecimal("87658675864540185.123456789123456789");
+ HiveDecimal actual = importHelper.convertNumberIntoHiveDecimal(input);
+ assertEquals(new BigDecimal("87658675864540185.123456789123456789"), actual.bigDecimalValue());
+ }
+
+}