PARQUET-1226: Fixes for CHECKIN compiler warning level with clang 5.0

This follows analogous work from ARROW-2117

Author: Wes McKinney <wes.mckinney@twosigma.com>

Closes #442 from wesm/PARQUET-1226 and squashes the following commits:

e3f512c [Wes McKinney] Leave default logging level for Arrow EP
d3ad125 [Wes McKinney] Fix various warnings exposed by clang 5.0, add extra compiler flags. Add ExternalProject logging to keep step with Arrow's CMake toolchain
diff --git a/CMakeLists.txt b/CMakeLists.txt
index c2d4ef4..bca8478 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -84,7 +84,7 @@
 set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_SOURCE_DIR}/cmake_modules")
 set(BUILD_SUPPORT_DIR "${CMAKE_SOURCE_DIR}/build-support")
 
-set(CLANG_FORMAT_VERSION "4.0")
+set(CLANG_FORMAT_VERSION "5.0")
 find_package(ClangTools)
 if ("$ENV{CMAKE_EXPORT_COMPILE_COMMANDS}" STREQUAL "1" OR CLANG_TIDY_FOUND)
   # Generate a Clang compile_commands.json "compilation database" file for use
@@ -160,6 +160,11 @@
       "Build Parquet with statically linked CRT"
       OFF)
   endif()
+
+  option(PARQUET_VERBOSE_THIRDPARTY_BUILD
+    "If off, output from ExternalProjects will be logged to files rather than shown"
+    OFF)
+
 endif()
 
 include(BuildUtils)
diff --git a/cmake_modules/SetupCxxFlags.cmake b/cmake_modules/SetupCxxFlags.cmake
index 1678e8d..01ed85b 100644
--- a/cmake_modules/SetupCxxFlags.cmake
+++ b/cmake_modules/SetupCxxFlags.cmake
@@ -109,6 +109,11 @@
       set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-undefined-func-template")
     endif()
 
+    if ("${COMPILER_VERSION}" VERSION_GREATER "4.0")
+      set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unused-template \
+-Wno-zero-as-null-pointer-constant")
+    endif()
+
     # Treat all compiler warnings as errors
     set(CXX_COMMON_FLAGS "${CXX_COMMON_FLAGS} -Wno-unknown-warning-option -Werror")
   elseif ("${COMPILER_FAMILY}" STREQUAL "gcc")
diff --git a/cmake_modules/ThirdpartyToolchain.cmake b/cmake_modules/ThirdpartyToolchain.cmake
index 09e30df..9f24112 100644
--- a/cmake_modules/ThirdpartyToolchain.cmake
+++ b/cmake_modules/ThirdpartyToolchain.cmake
@@ -30,6 +30,18 @@
   set(EP_C_FLAGS "${EP_C_FLAGS} -fPIC")
 endif()
 
+if (NOT PARQUET_VERBOSE_THIRDPARTY_BUILD)
+  set(EP_LOG_OPTIONS
+    LOG_CONFIGURE 1
+    LOG_BUILD 1
+    LOG_INSTALL 1
+    LOG_DOWNLOAD 1)
+  set(Boost_DEBUG FALSE)
+else()
+  set(EP_LOG_OPTIONS)
+  set(Boost_DEBUG TRUE)
+endif()
+
 # ----------------------------------------------------------------------
 # Configure toolchain with environment variables, if the exist
 
@@ -52,7 +64,6 @@
 # Boost
 
 # find boost headers and libs
-set(Boost_DEBUG TRUE)
 set(Boost_USE_MULTITHREADED ON)
 if (MSVC AND PARQUET_USE_STATIC_CRT)
   set(Boost_USE_STATIC_RUNTIME ON)
@@ -168,6 +179,7 @@
     URL "http://zlib.net/fossils/zlib-1.2.8.tar.gz"
     BUILD_BYPRODUCTS "${ZLIB_STATIC_LIB}"
     ${ZLIB_BUILD_BYPRODUCTS}
+    ${EP_LOG_OPTIONS}
     CMAKE_ARGS ${ZLIB_CMAKE_ARGS})
 
   set(THRIFT_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/thrift_ep/src/thrift_ep-install")
@@ -212,7 +224,10 @@
       URL https://github.com/lexxmark/winflexbison/releases/download/v.${WINFLEXBISON_VERSION}/win_flex_bison-${WINFLEXBISON_VERSION}.zip
       URL_HASH MD5=a2e979ea9928fbf8567e995e9c0df765
       SOURCE_DIR ${WINFLEXBISON_PREFIX}
-      CONFIGURE_COMMAND "" BUILD_COMMAND "" INSTALL_COMMAND "")
+      CONFIGURE_COMMAND ""
+      BUILD_COMMAND ""
+      INSTALL_COMMAND ""
+      ${EP_LOG_OPTIONS})
     set(THRIFT_DEPENDENCIES ${THRIFT_DEPENDENCIES} winflexbison_ep)
 
     set(THRIFT_CMAKE_ARGS "-DFLEX_EXECUTABLE=${WINFLEXBISON_PREFIX}/win_flex.exe"
@@ -229,7 +244,8 @@
     URL "http://archive.apache.org/dist/thrift/${THRIFT_VERSION}/thrift-${THRIFT_VERSION}.tar.gz"
     BUILD_BYPRODUCTS "${THRIFT_STATIC_LIB}" "${THRIFT_COMPILER}"
     CMAKE_ARGS ${THRIFT_CMAKE_ARGS}
-    DEPENDS ${THRIFT_DEPENDENCIES})
+    DEPENDS ${THRIFT_DEPENDENCIES}
+    ${EP_LOG_OPTIONS})
 
   set(THRIFT_VENDORED 1)
 else()
@@ -268,7 +284,7 @@
     set(GTEST_CMAKE_ARGS -DCMAKE_BUILD_TYPE=${CMAKE_BUILD_TYPE}
                          -DCMAKE_INSTALL_PREFIX=${GTEST_PREFIX}
                          -DCMAKE_CXX_FLAGS=${GTEST_CMAKE_CXX_FLAGS})
-                
+
     if (MSVC AND NOT PARQUET_USE_STATIC_CRT)
       set(GTEST_CMAKE_ARGS ${GTEST_CMAKE_ARGS} -Dgtest_force_shared_crt=ON)
     endif()
@@ -276,7 +292,8 @@
     ExternalProject_Add(googletest_ep
       URL "https://github.com/google/googletest/archive/release-${GTEST_VERSION}.tar.gz"
       BUILD_BYPRODUCTS "${GTEST_STATIC_LIB}" "${GTEST_MAIN_STATIC_LIB}"
-      CMAKE_ARGS ${GTEST_CMAKE_ARGS})
+      CMAKE_ARGS ${GTEST_CMAKE_ARGS}
+      ${EP_LOG_OPTIONS})
     set(GTEST_VENDORED 1)
   else()
     find_package(GTest REQUIRED)
@@ -328,7 +345,8 @@
     ExternalProject_Add(gbenchmark_ep
       URL "https://github.com/google/benchmark/archive/v${GBENCHMARK_VERSION}.tar.gz"
       BUILD_BYPRODUCTS "${GBENCHMARK_STATIC_LIB}"
-      CMAKE_ARGS ${GBENCHMARK_CMAKE_ARGS})
+      CMAKE_ARGS ${GBENCHMARK_CMAKE_ARGS}
+      ${EP_LOG_OPTIONS})
     set(GBENCHMARK_VENDORED 1)
   else()
     find_package(GBenchmark REQUIRED)
diff --git a/src/parquet/arrow/arrow-reader-writer-test.cc b/src/parquet/arrow/arrow-reader-writer-test.cc
index 369eb2e..72e65d4 100644
--- a/src/parquet/arrow/arrow-reader-writer-test.cc
+++ b/src/parquet/arrow/arrow-reader-writer-test.cc
@@ -56,10 +56,10 @@
 using arrow::Status;
 using arrow::Table;
 using arrow::TimeUnit;
-using arrow::default_memory_pool;
+using arrow::compute::Datum;
 using arrow::compute::DictionaryEncode;
 using arrow::compute::FunctionContext;
-using arrow::compute::Datum;
+using arrow::default_memory_pool;
 using arrow::io::BufferReader;
 
 using arrow::test::randint;
@@ -67,10 +67,10 @@
 
 using ArrowId = ::arrow::Type;
 using ParquetType = parquet::Type;
+using parquet::arrow::FromParquetSchema;
 using parquet::schema::GroupNode;
 using parquet::schema::NodePtr;
 using parquet::schema::PrimitiveNode;
-using parquet::arrow::FromParquetSchema;
 
 using ColumnVector = std::vector<std::shared_ptr<arrow::Column>>;
 
diff --git a/src/parquet/arrow/arrow-schema-test.cc b/src/parquet/arrow/arrow-schema-test.cc
index 771b996..d502d24 100644
--- a/src/parquet/arrow/arrow-schema-test.cc
+++ b/src/parquet/arrow/arrow-schema-test.cc
@@ -32,8 +32,8 @@
 using ParquetType = parquet::Type;
 using parquet::LogicalType;
 using parquet::Repetition;
-using parquet::schema::NodePtr;
 using parquet::schema::GroupNode;
+using parquet::schema::NodePtr;
 using parquet::schema::PrimitiveNode;
 
 namespace parquet {
@@ -579,9 +579,10 @@
     auto inner_group_type = std::make_shared<::arrow::StructType>(inner_group_fields);
     auto outer_group_fields = {
         std::make_shared<Field>("leaf2", INT32, true),
-        std::make_shared<Field>("innerGroup", ::arrow::list(std::make_shared<Field>(
-                                                  "innerGroup", inner_group_type, false)),
-                                false)};
+        std::make_shared<Field>(
+            "innerGroup",
+            ::arrow::list(std::make_shared<Field>("innerGroup", inner_group_type, false)),
+            false)};
     auto outer_group_type = std::make_shared<::arrow::StructType>(outer_group_fields);
 
     arrow_fields.push_back(std::make_shared<Field>("leaf1", INT32, true));
diff --git a/src/parquet/arrow/reader.cc b/src/parquet/arrow/reader.cc
index 7f81771..bd68ec3 100644
--- a/src/parquet/arrow/reader.cc
+++ b/src/parquet/arrow/reader.cc
@@ -45,12 +45,12 @@
 using arrow::Field;
 using arrow::Int32Array;
 using arrow::ListArray;
-using arrow::StructArray;
-using arrow::TimestampArray;
 using arrow::MemoryPool;
 using arrow::PoolBuffer;
 using arrow::Status;
+using arrow::StructArray;
 using arrow::Table;
+using arrow::TimestampArray;
 
 using parquet::schema::Node;
 
@@ -218,8 +218,6 @@
     NextRowGroup();
   }
 
-  virtual ~PrimitiveImpl() {}
-
   Status NextBatch(int64_t records_to_read, std::shared_ptr<Array>* out) override;
 
   template <typename ParquetType>
@@ -254,8 +252,6 @@
     InitField(node, children);
   }
 
-  virtual ~StructImpl() {}
-
   Status NextBatch(int64_t records_to_read, std::shared_ptr<Array>* out) override;
   Status GetDefLevels(const int16_t** data, size_t* length) override;
   Status GetRepLevels(const int16_t** data, size_t* length) override;
@@ -425,8 +421,7 @@
 
   // TODO(wesm): Refactor to share more code with ReadTable
 
-  auto ReadColumnFunc = [&indices, &row_group_index, &schema, &columns, &rg_metadata,
-                         this](int i) {
+  auto ReadColumnFunc = [&indices, &row_group_index, &schema, &columns, this](int i) {
     int column_index = indices[i];
 
     std::shared_ptr<Array> array;
diff --git a/src/parquet/arrow/record_reader.cc b/src/parquet/arrow/record_reader.cc
index cc968e9..a3af5ac 100644
--- a/src/parquet/arrow/record_reader.cc
+++ b/src/parquet/arrow/record_reader.cc
@@ -349,8 +349,6 @@
  public:
   typedef typename DType::c_type T;
 
-  ~TypedRecordReader() {}
-
   TypedRecordReader(const ColumnDescriptor* schema, ::arrow::MemoryPool* pool)
       : RecordReader::RecordReaderImpl(schema, pool), current_decoder_(nullptr) {}
 
diff --git a/src/parquet/arrow/schema.cc b/src/parquet/arrow/schema.cc
index e8bcce0..48b1181 100644
--- a/src/parquet/arrow/schema.cc
+++ b/src/parquet/arrow/schema.cc
@@ -34,9 +34,9 @@
 using ArrowType = arrow::Type;
 
 using parquet::Repetition;
+using parquet::schema::GroupNode;
 using parquet::schema::Node;
 using parquet::schema::NodePtr;
-using parquet::schema::GroupNode;
 using parquet::schema::PrimitiveNode;
 
 using ParquetType = parquet::Type;
diff --git a/src/parquet/arrow/writer.cc b/src/parquet/arrow/writer.cc
index 85d5bd3..5040e0c 100644
--- a/src/parquet/arrow/writer.cc
+++ b/src/parquet/arrow/writer.cc
@@ -31,18 +31,18 @@
 
 using arrow::Array;
 using arrow::BinaryArray;
-using arrow::ChunkedArray;
-using arrow::FixedSizeBinaryArray;
-using arrow::Decimal128Array;
 using arrow::BooleanArray;
+using arrow::ChunkedArray;
+using arrow::Decimal128Array;
+using arrow::Field;
+using arrow::FixedSizeBinaryArray;
 using arrow::Int16Array;
 using arrow::Int16Builder;
-using arrow::Field;
+using arrow::ListArray;
 using arrow::MemoryPool;
 using arrow::NumericArray;
 using arrow::PoolBuffer;
 using arrow::PrimitiveArray;
-using arrow::ListArray;
 using arrow::Status;
 using arrow::Table;
 using arrow::TimeUnit;
diff --git a/src/parquet/column_reader-test.cc b/src/parquet/column_reader-test.cc
index 2599325..ebb70f1 100644
--- a/src/parquet/column_reader-test.cc
+++ b/src/parquet/column_reader-test.cc
@@ -32,9 +32,9 @@
 #include "parquet/types.h"
 #include "parquet/util/test-common.h"
 
+using std::shared_ptr;
 using std::string;
 using std::vector;
-using std::shared_ptr;
 
 namespace parquet {
 
diff --git a/src/parquet/column_reader.cc b/src/parquet/column_reader.cc
index 4c11439..10d7210 100644
--- a/src/parquet/column_reader.cc
+++ b/src/parquet/column_reader.cc
@@ -111,8 +111,6 @@
     decompressor_ = GetCodecFromArrow(codec);
   }
 
-  virtual ~SerializedPageReader() {}
-
   // Implement the PageReader interface
   std::shared_ptr<Page> NextPage() override;
 
diff --git a/src/parquet/column_reader.h b/src/parquet/column_reader.h
index 6158cb3..0d5f6ec 100644
--- a/src/parquet/column_reader.h
+++ b/src/parquet/column_reader.h
@@ -216,7 +216,6 @@
   TypedColumnReader(const ColumnDescriptor* schema, std::unique_ptr<PageReader> pager,
                     ::arrow::MemoryPool* pool = ::arrow::default_memory_pool())
       : ColumnReader(schema, std::move(pager), pool), current_decoder_(nullptr) {}
-  virtual ~TypedColumnReader() {}
 
   // Read a batch of repetition levels, definition levels, and values from the
   // column.
diff --git a/src/parquet/column_scanner-test.cc b/src/parquet/column_scanner-test.cc
index 0cebdc0..1ebc719 100644
--- a/src/parquet/column_scanner-test.cc
+++ b/src/parquet/column_scanner-test.cc
@@ -32,9 +32,9 @@
 #include "parquet/types.h"
 #include "parquet/util/test-common.h"
 
+using std::shared_ptr;
 using std::string;
 using std::vector;
-using std::shared_ptr;
 
 namespace parquet {
 
diff --git a/src/parquet/column_writer.cc b/src/parquet/column_writer.cc
index 6d6347a..4f2ef6c 100644
--- a/src/parquet/column_writer.cc
+++ b/src/parquet/column_writer.cc
@@ -141,8 +141,6 @@
     compressor_ = GetCodecFromArrow(codec);
   }
 
-  virtual ~SerializedPageWriter() = default;
-
   int64_t WriteDictionaryPage(const DictionaryPage& page) override {
     int64_t uncompressed_size = page.size();
     std::shared_ptr<Buffer> compressed_data = nullptr;
@@ -462,8 +460,9 @@
                                            std::unique_ptr<PageWriter> pager,
                                            Encoding::type encoding,
                                            const WriterProperties* properties)
-    : ColumnWriter(metadata, std::move(pager), (encoding == Encoding::PLAIN_DICTIONARY ||
-                                                encoding == Encoding::RLE_DICTIONARY),
+    : ColumnWriter(metadata, std::move(pager),
+                   (encoding == Encoding::PLAIN_DICTIONARY ||
+                    encoding == Encoding::RLE_DICTIONARY),
                    encoding, properties) {
   switch (encoding) {
     case Encoding::PLAIN:
diff --git a/src/parquet/encoding-benchmark.cc b/src/parquet/encoding-benchmark.cc
index 9556fd1..ca12c6a 100644
--- a/src/parquet/encoding-benchmark.cc
+++ b/src/parquet/encoding-benchmark.cc
@@ -20,8 +20,8 @@
 #include "parquet/encoding-internal.h"
 #include "parquet/util/memory.h"
 
-using arrow::default_memory_pool;
 using arrow::MemoryPool;
+using arrow::default_memory_pool;
 
 namespace parquet {
 
diff --git a/src/parquet/encoding-internal.h b/src/parquet/encoding-internal.h
index 3e9a16d..894410f 100644
--- a/src/parquet/encoding-internal.h
+++ b/src/parquet/encoding-internal.h
@@ -467,7 +467,7 @@
     }
   }
 
-  virtual ~DictEncoder() { DCHECK(buffered_indices_.empty()); }
+  ~DictEncoder() override { DCHECK(buffered_indices_.empty()); }
 
   // TODO(wesm): think about how to address the construction semantics in
   // encodings/dictionary-encoding.h
@@ -482,8 +482,9 @@
     // reserve
     // an extra "RleEncoder::MinBufferSize" bytes. These extra bytes won't be used
     // but not reserving them would cause the encoder to fail.
-    return 1 + ::arrow::RleEncoder::MaxBufferSize(
-                   bit_width(), static_cast<int>(buffered_indices_.size())) +
+    return 1 +
+           ::arrow::RleEncoder::MaxBufferSize(
+               bit_width(), static_cast<int>(buffered_indices_.size())) +
            ::arrow::RleEncoder::MinBufferSize(bit_width());
   }
 
diff --git a/src/parquet/encoding-test.cc b/src/parquet/encoding-test.cc
index a658cb2..0da32cf 100644
--- a/src/parquet/encoding-test.cc
+++ b/src/parquet/encoding-test.cc
@@ -30,8 +30,8 @@
 #include "parquet/util/memory.h"
 #include "parquet/util/test-common.h"
 
-using arrow::default_memory_pool;
 using arrow::MemoryPool;
+using arrow::default_memory_pool;
 
 using std::string;
 using std::vector;
diff --git a/src/parquet/file_reader.cc b/src/parquet/file_reader.cc
index 7b74812..e3280c6 100644
--- a/src/parquet/file_reader.cc
+++ b/src/parquet/file_reader.cc
@@ -146,7 +146,7 @@
                  const ReaderProperties& props = default_reader_properties())
       : source_(std::move(source)), properties_(props) {}
 
-  ~SerializedFile() {
+  ~SerializedFile() override {
     try {
       Close();
     } catch (...) {
diff --git a/src/parquet/file_writer.cc b/src/parquet/file_writer.cc
index 87ee4f6..1e4a09e 100644
--- a/src/parquet/file_writer.cc
+++ b/src/parquet/file_writer.cc
@@ -198,7 +198,7 @@
     return row_group_writer_.get();
   }
 
-  ~FileSerializer() {
+  ~FileSerializer() override {
     try {
       Close();
     } catch (...) {
diff --git a/src/parquet/schema.cc b/src/parquet/schema.cc
index 826ef76..cbe72c6 100644
--- a/src/parquet/schema.cc
+++ b/src/parquet/schema.cc
@@ -430,7 +430,6 @@
  public:
   explicit SchemaVisitor(std::vector<format::SchemaElement>* elements)
       : elements_(elements) {}
-  virtual ~SchemaVisitor() {}
 
   void Visit(const Node* node) override {
     format::SchemaElement element;
@@ -593,10 +592,10 @@
 }  // namespace schema
 
 using schema::ColumnPath;
+using schema::GroupNode;
 using schema::Node;
 using schema::NodePtr;
 using schema::PrimitiveNode;
-using schema::GroupNode;
 
 void SchemaDescriptor::Init(std::unique_ptr<schema::Node> schema) {
   Init(NodePtr(schema.release()));
@@ -606,7 +605,6 @@
  public:
   explicit SchemaUpdater(const std::vector<ColumnOrder>& column_orders)
       : column_orders_(column_orders), leaf_count_(0) {}
-  virtual ~SchemaUpdater() {}
 
   void Visit(Node* node) override {
     if (node->is_group()) {
diff --git a/src/parquet/statistics-test.cc b/src/parquet/statistics-test.cc
index ec8f90a..1bbef26 100644
--- a/src/parquet/statistics-test.cc
+++ b/src/parquet/statistics-test.cc
@@ -36,14 +36,14 @@
 #include "parquet/types.h"
 #include "parquet/util/memory.h"
 
-using arrow::default_memory_pool;
 using arrow::MemoryPool;
+using arrow::default_memory_pool;
 
 namespace parquet {
 
+using schema::GroupNode;
 using schema::NodePtr;
 using schema::PrimitiveNode;
-using schema::GroupNode;
 
 namespace test {
 
diff --git a/src/parquet/statistics.cc b/src/parquet/statistics.cc
index 4c69632..416557c 100644
--- a/src/parquet/statistics.cc
+++ b/src/parquet/statistics.cc
@@ -23,8 +23,8 @@
 #include "parquet/statistics.h"
 #include "parquet/util/memory.h"
 
-using arrow::default_memory_pool;
 using arrow::MemoryPool;
+using arrow::default_memory_pool;
 
 namespace parquet {
 
diff --git a/src/parquet/test-util.h b/src/parquet/test-util.h
index ac6d0a1..a507dfb 100644
--- a/src/parquet/test-util.h
+++ b/src/parquet/test-util.h
@@ -37,8 +37,8 @@
 #include "parquet/util/memory.h"
 #include "parquet/util/test-common.h"
 
-using std::vector;
 using std::shared_ptr;
+using std::vector;
 
 namespace parquet {
 
diff --git a/src/parquet/util/comparison.h b/src/parquet/util/comparison.h
index 12be7ba..7070a0f 100644
--- a/src/parquet/util/comparison.h
+++ b/src/parquet/util/comparison.h
@@ -38,7 +38,6 @@
  public:
   typedef typename DType::c_type T;
   CompareDefault() {}
-  virtual ~CompareDefault() {}
   virtual bool operator()(const T& a, const T& b) { return a < b; }
 };
 
@@ -46,7 +45,6 @@
 class PARQUET_EXPORT CompareDefault<Int96Type> : public Comparator {
  public:
   CompareDefault() {}
-  virtual ~CompareDefault() {}
   virtual bool operator()(const Int96& a, const Int96& b) {
     // Only the MSB bit is by Signed comparison
     // For little-endian, this is the last bit of Int96 type
@@ -65,7 +63,6 @@
 class PARQUET_EXPORT CompareDefault<ByteArrayType> : public Comparator {
  public:
   CompareDefault() {}
-  virtual ~CompareDefault() {}
   virtual bool operator()(const ByteArray& a, const ByteArray& b) {
     const int8_t* aptr = reinterpret_cast<const int8_t*>(a.ptr);
     const int8_t* bptr = reinterpret_cast<const int8_t*>(b.ptr);
@@ -77,7 +74,6 @@
 class PARQUET_EXPORT CompareDefault<FLBAType> : public Comparator {
  public:
   explicit CompareDefault(int length) : type_length_(length) {}
-  virtual ~CompareDefault() {}
   virtual bool operator()(const FLBA& a, const FLBA& b) {
     const int8_t* aptr = reinterpret_cast<const int8_t*>(a.ptr);
     const int8_t* bptr = reinterpret_cast<const int8_t*>(b.ptr);
@@ -117,7 +113,6 @@
 // Define Unsigned Comparators
 class PARQUET_EXPORT CompareUnsignedInt32 : public CompareDefaultInt32 {
  public:
-  virtual ~CompareUnsignedInt32() {}
   bool operator()(const int32_t& a, const int32_t& b) override {
     const uint32_t ua = a;
     const uint32_t ub = b;
@@ -127,7 +122,6 @@
 
 class PARQUET_EXPORT CompareUnsignedInt64 : public CompareDefaultInt64 {
  public:
-  virtual ~CompareUnsignedInt64() {}
   bool operator()(const int64_t& a, const int64_t& b) override {
     const uint64_t ua = a;
     const uint64_t ub = b;
@@ -137,7 +131,6 @@
 
 class PARQUET_EXPORT CompareUnsignedInt96 : public CompareDefaultInt96 {
  public:
-  virtual ~CompareUnsignedInt96() {}
   bool operator()(const Int96& a, const Int96& b) override {
     if (a.value[2] != b.value[2]) {
       return (a.value[2] < b.value[2]);
@@ -150,7 +143,6 @@
 
 class PARQUET_EXPORT CompareUnsignedByteArray : public CompareDefaultByteArray {
  public:
-  virtual ~CompareUnsignedByteArray() {}
   bool operator()(const ByteArray& a, const ByteArray& b) override {
     const uint8_t* aptr = reinterpret_cast<const uint8_t*>(a.ptr);
     const uint8_t* bptr = reinterpret_cast<const uint8_t*>(b.ptr);
@@ -161,7 +153,6 @@
 class PARQUET_EXPORT CompareUnsignedFLBA : public CompareDefaultFLBA {
  public:
   explicit CompareUnsignedFLBA(int length) : CompareDefaultFLBA(length) {}
-  virtual ~CompareUnsignedFLBA() {}
   bool operator()(const FLBA& a, const FLBA& b) override {
     const uint8_t* aptr = reinterpret_cast<const uint8_t*>(a.ptr);
     const uint8_t* bptr = reinterpret_cast<const uint8_t*>(b.ptr);
diff --git a/src/parquet/util/memory-test.cc b/src/parquet/util/memory-test.cc
index ee5fe31..17ade21 100644
--- a/src/parquet/util/memory-test.cc
+++ b/src/parquet/util/memory-test.cc
@@ -27,8 +27,8 @@
 #include "parquet/util/memory.h"
 #include "parquet/util/test-common.h"
 
-using arrow::default_memory_pool;
 using arrow::MemoryPool;
+using arrow::default_memory_pool;
 
 namespace parquet {
 
diff --git a/src/parquet/util/schema-util.h b/src/parquet/util/schema-util.h
index ef9087b..4e31d3c 100644
--- a/src/parquet/util/schema-util.h
+++ b/src/parquet/util/schema-util.h
@@ -26,12 +26,12 @@
 #include "parquet/schema.h"
 #include "parquet/types.h"
 
+using parquet::LogicalType;
 using parquet::ParquetException;
 using parquet::SchemaDescriptor;
 using parquet::schema::GroupNode;
-using parquet::schema::NodePtr;
 using parquet::schema::Node;
-using parquet::LogicalType;
+using parquet::schema::NodePtr;
 
 inline bool str_endswith_tuple(const std::string& str) {
   if (str.size() >= 6) {