NIFI-8759 ExecuteSQL and ExecuteSQLRecord unnecessarily fall back to default decimal scale

Signed-off-by: Matthew Burgess <mattyb149@apache.org>

This closes #5212
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
index 89a9362..ed22b69 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/main/java/org/apache/nifi/util/db/JdbcCommon.java
@@ -582,7 +582,7 @@
                             decimalPrecision = meta.getPrecision(i);
                             //For the float data type Oracle return decimalScale < 0 which cause is not expected to org.apache.avro.LogicalTypes
                             //Hence falling back to default scale if decimalScale < 0
-                            decimalScale = meta.getScale(i) > 0 ? meta.getScale(i) : options.defaultScale;
+                            decimalScale = meta.getScale(i) >= 0 ? meta.getScale(i) : options.defaultScale;
                         } else {
                             // If not, use default precision.
                             decimalPrecision = options.defaultPrecision;
diff --git a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/test/java/org/apache/nifi/util/db/TestJdbcCommon.java b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/test/java/org/apache/nifi/util/db/TestJdbcCommon.java
index f9647aa..d4e5e41 100644
--- a/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/test/java/org/apache/nifi/util/db/TestJdbcCommon.java
+++ b/nifi-nar-bundles/nifi-extension-utils/nifi-database-utils/src/test/java/org/apache/nifi/util/db/TestJdbcCommon.java
@@ -437,7 +437,28 @@
         testConvertToAvroStreamForBigDecimal(bigDecimal, dbPrecision, 10, expectedScale, expectedScale);
     }
 
+    @Test
+    public void testConvertToAvroStreamForBigDecimalWithZeroScale() throws SQLException, IOException {
+        final int dbPrecision = 5;
+        final int dbScale = 0;
+
+        final int expectedPrecision = dbPrecision;
+        final int expectedScale = dbScale;
+
+        final int defaultPrecision = 15;
+        final int defaultScale = 15;
+
+        final BigDecimal bigDecimal = new BigDecimal("1.123", new MathContext(dbPrecision));
+        final BigDecimal expectedValue = new BigDecimal("1");
+        testConvertToAvroStreamForBigDecimal(bigDecimal, expectedValue, dbPrecision, dbScale, defaultPrecision, defaultScale, expectedPrecision, expectedScale);
+    }
+
     private void testConvertToAvroStreamForBigDecimal(BigDecimal bigDecimal, int dbPrecision, int defaultPrecision, int expectedPrecision, int expectedScale) throws SQLException, IOException {
+        testConvertToAvroStreamForBigDecimal(bigDecimal, bigDecimal, dbPrecision, expectedScale, defaultPrecision, -1, expectedPrecision, expectedScale);
+    }
+
+    private void testConvertToAvroStreamForBigDecimal(BigDecimal bigDecimal, BigDecimal expectedValue, int dbPrecision, int dbScale, int defaultPrecision, int defaultScale,
+                                                      int expectedPrecision, int expectedScale) throws SQLException, IOException {
 
         final ResultSetMetaData metadata = mock(ResultSetMetaData.class);
         when(metadata.getColumnCount()).thenReturn(1);
@@ -445,7 +466,7 @@
         when(metadata.getColumnName(1)).thenReturn("The.Chairman");
         when(metadata.getTableName(1)).thenReturn("1the::table");
         when(metadata.getPrecision(1)).thenReturn(dbPrecision);
-        when(metadata.getScale(1)).thenReturn(expectedScale);
+        when(metadata.getScale(1)).thenReturn(dbScale);
 
         final ResultSet rs = JdbcCommonTestUtils.resultSetReturningMetadata(metadata);
 
@@ -453,8 +474,11 @@
 
         final ByteArrayOutputStream baos = new ByteArrayOutputStream();
 
-        final JdbcCommon.AvroConversionOptions options = JdbcCommon.AvroConversionOptions
-                .builder().convertNames(true).useLogicalTypes(true).defaultPrecision(defaultPrecision).build();
+        final JdbcCommon.AvroConversionOptions.Builder optionsBuilder = JdbcCommon.AvroConversionOptions
+                .builder().convertNames(true).useLogicalTypes(true).defaultPrecision(defaultPrecision);
+        if (defaultScale > -1) optionsBuilder.defaultScale(defaultScale);
+
+        final JdbcCommon.AvroConversionOptions options = optionsBuilder.build();
         JdbcCommon.convertToAvroStream(rs, baos, options, null);
 
         final byte[] serializedBytes = baos.toByteArray();
@@ -480,7 +504,7 @@
             while (dataFileReader.hasNext()) {
                 record = dataFileReader.next(record);
                 assertEquals("_1the__table", record.getSchema().getName());
-                assertEquals(bigDecimal, record.get("The_Chairman"));
+                assertEquals(expectedValue, record.get("The_Chairman"));
             }
         }
     }