PARQUET-1723: Read From Maps without using .contains(...) (#711)
diff --git a/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java b/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java
index a8b95f8..b9f9b67 100644
--- a/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java
+++ b/parquet-column/src/main/java/org/apache/parquet/column/EncodingStats.java
@@ -57,19 +57,13 @@
}
public int getNumDictionaryPagesEncodedAs(Encoding enc) {
- if (dictStats.containsKey(enc)) {
- return dictStats.get(enc);
- } else {
- return 0;
- }
+ final Integer i = dictStats.get(enc);
+ return (i == null) ? 0 : i.intValue();
}
public int getNumDataPagesEncodedAs(Encoding enc) {
- if (dataStats.containsKey(enc)) {
- return dataStats.get(enc);
- } else {
- return 0;
- }
+ final Integer i = dataStats.get(enc);
+ return (i == null) ? 0 : i.intValue();
}
public boolean hasDictionaryPages() {
diff --git a/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java b/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java
index 52184e1..897fdf8 100644
--- a/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java
+++ b/parquet-column/src/main/java/org/apache/parquet/schema/GroupType.java
@@ -171,10 +171,11 @@
* @return the index of the field with that name
*/
public int getFieldIndex(String name) {
- if (!indexByName.containsKey(name)) {
+ Integer i = indexByName.get(name);
+ if (i == null) {
throw new InvalidRecordException(name + " not found in " + this);
}
- return indexByName.get(name);
+ return i.intValue();
}
/**
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java b/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java
index f059023..5e4bd09 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/ParquetReadOptions.java
@@ -29,6 +29,7 @@
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
+import java.util.Optional;
import java.util.Set;
import static org.apache.parquet.format.converter.ParquetMetadataConverter.NO_FILTER;
@@ -134,11 +135,9 @@
}
public boolean isEnabled(String property, boolean defaultValue) {
- if (properties.containsKey(property)) {
- return Boolean.valueOf(properties.get(property));
- } else {
- return defaultValue;
- }
+ Optional<String> propValue = Optional.ofNullable(properties.get(property));
+ return propValue.isPresent() ? Boolean.valueOf(propValue.get())
+ : defaultValue;
}
public static Builder builder() {
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java
index 3067e2b..6f21fa3 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/ColumnChunkPageReadStore.java
@@ -227,10 +227,11 @@
@Override
public PageReader getPageReader(ColumnDescriptor path) {
- if (!readers.containsKey(path)) {
+ final PageReader pageReader = readers.get(path);
+ if (pageReader == null) {
throw new IllegalArgumentException(path + " is not in the store: " + readers.keySet() + " " + rowCount);
}
- return readers.get(path);
+ return pageReader;
}
@Override
diff --git a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
index dc5c31d..d6fabb2 100644
--- a/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
+++ b/parquet-hadoop/src/main/java/org/apache/parquet/hadoop/MemoryManager.java
@@ -92,9 +92,7 @@
* @param writer the writer that has been closed
*/
synchronized void removeWriter(InternalParquetRecordWriter writer) {
- if (writerList.containsKey(writer)) {
- writerList.remove(writer);
- }
+ writerList.remove(writer);
if (!writerList.isEmpty()) {
updateAllocation();
}