OPENNLP-1285: Changing Math to StrictMath for Java 8 vs 11 test inconsistencies. (#377)

diff --git a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/CLI.java b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/CLI.java
index 664c03a..bd92988 100644
--- a/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/CLI.java
+++ b/opennlp-morfologik-addon/src/main/java/opennlp/morfologik/cmdline/CLI.java
@@ -80,7 +80,7 @@
 
       System.out.print("  " + tool.getName());
 
-      for (int i = 0; i < Math.abs(tool.getName().length()
+      for (int i = 0; i < StrictMath.abs(tool.getName().length()
           - numberOfSpaces); i++) {
         System.out.print(" ");
       }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java
index 1d25127..95c7c73 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/CLI.java
@@ -203,7 +203,7 @@
 
       System.out.print("  " + tool.getName());
 
-      for (int i = 0; i < Math.abs(tool.getName().length() - numberOfSpaces); i++) {
+      for (int i = 0; i < StrictMath.abs(tool.getName().length() - numberOfSpaces); i++) {
         System.out.print(" ");
       }
 
diff --git a/opennlp-tools/src/main/java/opennlp/tools/cmdline/FineGrainedReportListener.java b/opennlp-tools/src/main/java/opennlp/tools/cmdline/FineGrainedReportListener.java
index c28c694..66abe64 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/cmdline/FineGrainedReportListener.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/cmdline/FineGrainedReportListener.java
@@ -723,7 +723,7 @@
      */
     public double getAccuracy() {
       // we save the accuracy because it is frequently used by the comparator
-      if (Math.abs(acc - 1.0d) < 0.0000000001) {
+      if (StrictMath.abs(acc - 1.0d) < 0.0000000001) {
         if (total == 0)
           acc = 0.0d;
         acc = (double) correct / (double) total;
diff --git a/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java
index 10b9f37..a7684b1 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/dictionary/Dictionary.java
@@ -124,8 +124,8 @@
    */
   public void put(StringList tokens) {
     entrySet.add(new StringListWrapper(tokens));
-    minTokenCount = Math.min(minTokenCount, tokens.size());
-    maxTokenCount = Math.max(maxTokenCount, tokens.size());
+    minTokenCount = StrictMath.min(minTokenCount, tokens.size());
+    maxTokenCount = StrictMath.max(maxTokenCount, tokens.size());
   }
 
   /**
diff --git a/opennlp-tools/src/main/java/opennlp/tools/languagemodel/NGramLanguageModel.java b/opennlp-tools/src/main/java/opennlp/tools/languagemodel/NGramLanguageModel.java
index e9d25d5..4dda687 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/languagemodel/NGramLanguageModel.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/languagemodel/NGramLanguageModel.java
@@ -62,13 +62,13 @@
     if (size() > 0) {
       for (StringList ngram : NGramUtils.getNGrams(tokens, n)) {
         double score = stupidBackoff(ngram);
-        probability += Math.log(score);
+        probability += StrictMath.log(score);
         if (Double.isNaN(probability)) {
           probability = 0d;
           break;
         }
       }
-      probability = Math.exp(probability);
+      probability = StrictMath.exp(probability);
     }
     return probability;
   }
@@ -79,13 +79,13 @@
     if (size() > 0) {
       for (String[] ngram : NGramUtils.getNGrams(tokens, n)) {
         double score = stupidBackoff(new StringList(ngram));
-        probability += Math.log(score);
+        probability += StrictMath.log(score);
         if (Double.isNaN(probability)) {
           probability = 0d;
           break;
         }
       }
-      probability = Math.exp(probability);
+      probability = StrictMath.exp(probability);
     }
     return probability;
   }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/lemmatizer/DefaultLemmatizerContextGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/lemmatizer/DefaultLemmatizerContextGenerator.java
index cdd2383..e663f78 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/lemmatizer/DefaultLemmatizerContextGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/lemmatizer/DefaultLemmatizerContextGenerator.java
@@ -42,7 +42,7 @@
   protected static String[] getPrefixes(String lex) {
     String[] prefs = new String[PREFIX_LENGTH];
     for (int li = 1; li < PREFIX_LENGTH; li++) {
-      prefs[li] = lex.substring(0, Math.min(li + 1, lex.length()));
+      prefs[li] = lex.substring(0, StrictMath.min(li + 1, lex.length()));
     }
     return prefs;
   }
@@ -50,7 +50,7 @@
   protected static String[] getSuffixes(String lex) {
     String[] suffs = new String[SUFFIX_LENGTH];
     for (int li = 1; li < SUFFIX_LENGTH; li++) {
-      suffs[li] = lex.substring(Math.max(lex.length() - li - 1, 0));
+      suffs[li] = lex.substring(StrictMath.max(lex.length() - li - 1, 0));
     }
     return suffs;
   }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/ArrayMath.java b/opennlp-tools/src/main/java/opennlp/tools/ml/ArrayMath.java
index cf18623..d69770e 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/ArrayMath.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/ArrayMath.java
@@ -43,7 +43,7 @@
   public static double l1norm(double[] v) {
     double norm = 0;
     for (int i = 0; i < v.length; i++)
-      norm += Math.abs(v[i]);
+      norm += StrictMath.abs(v[i]);
     return norm;
   }
 
@@ -51,7 +51,7 @@
    * L2-norm
    */
   public static double l2norm(double[] v) {
-    return Math.sqrt(innerProduct(v, v));
+    return StrictMath.sqrt(innerProduct(v, v));
   }
 
   /**
@@ -73,9 +73,9 @@
     double sum = 0.0;
     for (int i = 0; i < x.length; i++) {
       if (x[i] != Double.NEGATIVE_INFINITY)
-        sum += Math.exp(x[i] - max);
+        sum += StrictMath.exp(x[i] - max);
     }
-    return max + Math.log(sum);
+    return max + StrictMath.log(sum);
   }
 
   public static double max(double[] x) {
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/BeamSearch.java b/opennlp-tools/src/main/java/opennlp/tools/ml/BeamSearch.java
index 7987b9f..57f9b8f 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/BeamSearch.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/BeamSearch.java
@@ -96,7 +96,7 @@
     }
 
     for (int i = 0; i < sequence.length; i++) {
-      int sz = Math.min(size, prev.size());
+      int sz = StrictMath.min(size, prev.size());
 
       for (int sc = 0; prev.size() > 0 && sc < sz; sc++) {
         Sequence top = prev.remove();
@@ -115,7 +115,7 @@
 
         Arrays.sort(temp_scores);
 
-        double min = temp_scores[Math.max(0,scores.length - size)];
+        double min = temp_scores[StrictMath.max(0,scores.length - size)];
 
         for (int p = 0; p < scores.length; p++) {
           if (scores[p] >= min) {
@@ -149,7 +149,7 @@
       next = tmp;
     }
 
-    int numSeq = Math.min(numSequences, prev.size());
+    int numSeq = StrictMath.min(numSequences, prev.size());
     Sequence[] topSequences = new Sequence[numSeq];
 
     for (int seqIndex = 0; seqIndex < numSeq; seqIndex++) {
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISModel.java b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISModel.java
index 9c99308..8a0cd6d 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISModel.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISModel.java
@@ -188,7 +188,7 @@
 
     double normal = 0.0;
     for (int oid = 0; oid < model.getNumOutcomes(); oid++) {
-      prior[oid] = Math.exp(prior[oid]);
+      prior[oid] = StrictMath.exp(prior[oid]);
       normal += prior[oid];
     }
 
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java
index 19d2c63..e702cc3 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/GISTrainer.java
@@ -530,14 +530,14 @@
     double modelValue = modelExpects[0][predicate].getParameters()[oid];
     double observedValue = observedExpects[predicate].getParameters()[oid];
     for (int i = 0; i < 50; i++) {
-      double tmp = modelValue * Math.exp(correctionConstant * x0);
+      double tmp = modelValue * StrictMath.exp(correctionConstant * x0);
       double f = tmp + (param + x0) / sigma - observedValue;
       double fp = tmp * correctionConstant + 1 / sigma;
       if (fp == 0) {
         break;
       }
       double x = x0 - f / fp;
-      if (Math.abs(x - x0) < 0.000001) {
+      if (StrictMath.abs(x - x0) < 0.000001) {
         x0 = x;
         break;
       }
@@ -623,8 +623,8 @@
           if (model[aoi] == 0) {
             System.err.println("Model expects == 0 for " + predLabels[pi] + " " + outcomeLabels[aoi]);
           }
-          //params[pi].updateParameter(aoi,(Math.log(observed[aoi]) - Math.log(model[aoi])));
-          params[pi].updateParameter(aoi, ((Math.log(observed[aoi]) - Math.log(model[aoi]))
+          //params[pi].updateParameter(aoi,(StrictMath.log(observed[aoi]) - StrictMath.log(model[aoi])));
+          params[pi].updateParameter(aoi, ((StrictMath.log(observed[aoi]) - StrictMath.log(model[aoi]))
               / correctionConstant));
         }
 
@@ -695,7 +695,7 @@
           }
         }
 
-        loglikelihood += Math.log(modelDistribution[outcomeList[ei]]) * numTimesEventsSeen[ei];
+        loglikelihood += StrictMath.log(modelDistribution[outcomeList[ei]]) * numTimesEventsSeen[ei];
 
         numEvents += numTimesEventsSeen[ei];
         if (printMessages) {
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihood.java b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihood.java
index 0505e38..311fd15 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihood.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihood.java
@@ -137,7 +137,7 @@
       logSumOfExps = ArrayMath.logSumOfExps(expectation);
 
       for (oi = 0; oi < numOutcomes; oi++) {
-        expectation[oi] = Math.exp(expectation[oi] - logSumOfExps);
+        expectation[oi] = StrictMath.exp(expectation[oi] - logSumOfExps);
       }
 
       for (oi = 0; oi < numOutcomes; oi++) {
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/ParallelNegLogLikelihood.java b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/ParallelNegLogLikelihood.java
index 2429d00..6003865 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/ParallelNegLogLikelihood.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/ParallelNegLogLikelihood.java
@@ -237,7 +237,7 @@
         logSumOfExps = ArrayMath.logSumOfExps(expectation);
 
         for (oi = 0; oi < numOutcomes; oi++) {
-          expectation[oi] = Math.exp(expectation[oi] - logSumOfExps);
+          expectation[oi] = StrictMath.exp(expectation[oi] - logSumOfExps);
         }
 
         for (oi = 0; oi < numOutcomes; oi++) {
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizer.java b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizer.java
index adc7f5b..45fa541 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizer.java
@@ -36,7 +36,7 @@
  *
  *    {@literal @}Override
  *    public double valueAt(double[] x) {
- *      return Math.pow(x[0]-1, 2) + 10;
+ *      return StrictMath.pow(x[0]-1, 2) + 10;
  *    }
  *
  *    {@literal @}Override
@@ -279,7 +279,7 @@
     if (l1Cost > 0 && l2Cost > 0) {
       double[] x = lsr.getNextPoint();
       for (int i = 0; i < dimension; i++) {
-        x[i] = Math.sqrt(1 + l2Cost) * x[i];
+        x[i] = StrictMath.sqrt(1 + l2Cost) * x[i];
       }
     }
 
@@ -375,7 +375,7 @@
     }
 
     // Check gradient's norm using the criteria: ||g(x)|| / max(1, ||x||) < threshold
-    double xNorm = Math.max(1, ArrayMath.l2norm(lsr.getNextPoint()));
+    double xNorm = StrictMath.max(1, ArrayMath.l2norm(lsr.getNextPoint()));
     double gradNorm = l1Cost > 0 ?
         ArrayMath.l2norm(lsr.getPseudoGradAtNext()) : ArrayMath.l2norm(lsr.getGradAtNext());
     if (gradNorm / xNorm < REL_GRAD_NORM_TOL) {
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNModel.java b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNModel.java
index 030cb76..8cec948 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNModel.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/maxent/quasinewton/QNModel.java
@@ -80,7 +80,7 @@
 
     double logSumExp = ArrayMath.logSumOfExps(probs);
     for (int oi = 0; oi < outcomeNames.length; oi++) {
-      probs[oi] = Math.exp(probs[oi] - logSumExp);
+      probs[oi] = StrictMath.exp(probs[oi] - logSumExp);
     }
     return probs;
   }
@@ -117,7 +117,7 @@
     double logSumExp = ArrayMath.logSumOfExps(probs);
 
     for (int oi = 0; oi < nOutcomes; oi++) {
-      probs[oi] = Math.exp(probs[oi] - logSumExp);
+      probs[oi] = StrictMath.exp(probs[oi] - logSumExp);
     }
 
     return probs;
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparableEvent.java b/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparableEvent.java
index 5f426f4..5ab87b5 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparableEvent.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparableEvent.java
@@ -48,7 +48,7 @@
       return compareOutcome;
     }
 
-    int smallerLength = Math.min(predIndexes.length, ce.predIndexes.length);
+    int smallerLength = StrictMath.min(predIndexes.length, ce.predIndexes.length);
 
     for (int i = 0; i < smallerLength; i++) {
       int comparePredIndexes = Integer.compare(predIndexes[i], ce.predIndexes[i]);
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparablePredicate.java b/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparablePredicate.java
index 5e21065..b622c61 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparablePredicate.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/model/ComparablePredicate.java
@@ -37,7 +37,7 @@
   }
 
   public int compareTo(ComparablePredicate cp) {
-    int smallerLength = Math.min(outcomes.length, cp.outcomes.length);
+    int smallerLength = StrictMath.min(outcomes.length, cp.outcomes.length);
 
     for (int i = 0; i < smallerLength; i++) {
       int compareOutcomes = Integer.compare(outcomes[i], cp.outcomes[i]);
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/model/UniformPrior.java b/opennlp-tools/src/main/java/opennlp/tools/ml/model/UniformPrior.java
index 17574e2..59d9e27 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/model/UniformPrior.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/model/UniformPrior.java
@@ -44,7 +44,7 @@
 
   public void setLabels(String[] outcomeLabels, String[] contextLabels) {
     this.numOutcomes = outcomeLabels.length;
-    r = Math.log(1.0 / numOutcomes);
+    r = StrictMath.log(1.0 / numOutcomes);
   }
 
   @Override
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbabilities.java b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbabilities.java
index e1464f9..eee5cbf 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbabilities.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbabilities.java
@@ -113,7 +113,7 @@
       T t = entry.getKey();
       Double p = entry.getValue();
       if (p != null) {
-        double temp_p = Math.exp(p - highestLogProbability);
+        double temp_p = StrictMath.exp(p - highestLogProbability);
         if (!Double.isNaN(temp_p)) {
           sum += temp_p;
           temp.put(t, temp_p);
@@ -133,7 +133,7 @@
   }
 
   private double log(double prob) {
-    return Math.log(prob);
+    return StrictMath.log(prob);
   }
 
   /**
@@ -163,7 +163,7 @@
   }
 
   public void discardCountsBelow(double i) {
-    i = Math.log(i);
+    i = StrictMath.log(i);
     ArrayList<T> labelsToRemove = new ArrayList<>();
     for (Entry<T, Double> entry : map.entrySet()) {
       final T label = entry.getKey();
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbability.java b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbability.java
index 7c080cb..12b9660 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbability.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/LogProbability.java
@@ -36,7 +36,7 @@
    * @param probability the probability to assign
    */
   public void set(double probability) {
-    this.probability = Math.log(probability);
+    this.probability = StrictMath.log(probability);
   }
 
   /**
@@ -55,7 +55,7 @@
    * @param probability the probability to assign
    */
   public void setIfLarger(double probability) {
-    double logP = Math.log(probability);
+    double logP = StrictMath.log(probability);
     if (this.probability < logP) {
       this.probability = logP;
     }
@@ -98,7 +98,7 @@
    * @param probability the probability weight to add
    */
   public void addIn(double probability) {
-    setLog(this.probability + Math.log(probability));
+    setLog(this.probability + StrictMath.log(probability));
   }
 
   /**
@@ -107,7 +107,7 @@
    * @return the probability associated with the label
    */
   public Double get() {
-    return Math.exp(probability);
+    return StrictMath.exp(probability);
   }
 
   /**
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probabilities.java b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probabilities.java
index 55357c5..db1d2bc 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probabilities.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probabilities.java
@@ -82,7 +82,7 @@
    * @param probability the log probability to assign
    */
   public void setLog(T t, double probability) {
-    set(t, Math.exp(probability));
+    set(t, StrictMath.exp(probability));
   }
 
   /**
@@ -97,7 +97,7 @@
     Double p = map.get(t);
     if (p == null)
       p = 1.0;
-    probability = Math.pow(probability, count);
+    probability = StrictMath.pow(probability, count);
     map.put(t, p * probability);
   }
 
@@ -121,7 +121,7 @@
    * @return the log probability associated with the label
    */
   public Double getLog(T t) {
-    return Math.log(get(t));
+    return StrictMath.log(get(t));
   }
 
   /**
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probability.java b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probability.java
index 703ad52..ed7bfcd 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probability.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/naivebayes/Probability.java
@@ -88,7 +88,7 @@
    * @param probability the log probability to assign
    */
   public void setLog(double probability) {
-    set(Math.exp(probability));
+    set(StrictMath.exp(probability));
   }
 
   /**
@@ -115,7 +115,7 @@
    * @return the log probability associated with the label
    */
   public Double getLog() {
-    return Math.log(get());
+    return StrictMath.log(get());
   }
 
   /**
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronModel.java b/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronModel.java
index a34c7ef..7485187 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronModel.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronModel.java
@@ -75,13 +75,13 @@
       double maxPrior = 1;
 
       for (int oid = 0; oid < numOutcomes; oid++) {
-        if (maxPrior < Math.abs(prior[oid]))
-          maxPrior = Math.abs(prior[oid]);
+        if (maxPrior < StrictMath.abs(prior[oid]))
+          maxPrior = StrictMath.abs(prior[oid]);
       }
 
       double normal = 0.0;
       for (int oid = 0; oid < numOutcomes; oid++) {
-        prior[oid] = Math.exp(prior[oid] / maxPrior);
+        prior[oid] = StrictMath.exp(prior[oid] / maxPrior);
         normal += prior[oid];
       }
 
diff --git a/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronTrainer.java b/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronTrainer.java
index 346d24b..1d26250 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronTrainer.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/ml/perceptron/PerceptronTrainer.java
@@ -344,9 +344,9 @@
       // If the tolerance is greater than the difference between the
       // current training accuracy and all of the previous three
       // training accuracies, stop training.
-      if (Math.abs(prevAccuracy1 - trainingAccuracy) < tolerance
-          && Math.abs(prevAccuracy2 - trainingAccuracy) < tolerance
-          && Math.abs(prevAccuracy3 - trainingAccuracy) < tolerance) {
+      if (StrictMath.abs(prevAccuracy1 - trainingAccuracy) < tolerance
+          && StrictMath.abs(prevAccuracy2 - trainingAccuracy) < tolerance
+          && StrictMath.abs(prevAccuracy3 - trainingAccuracy) < tolerance) {
         display("Stopping: change in training set accuracy less than " + tolerance + "\n");
         break;
       }
@@ -414,7 +414,7 @@
   // See whether a number is a perfect square. Inefficient, but fine
   // for our purposes.
   private static boolean isPerfectSquare(int n) {
-    int root = (int) Math.sqrt(n);
+    int root = (int) StrictMath.sqrt(n);
     return root * root == n;
   }
 
diff --git a/opennlp-tools/src/main/java/opennlp/tools/parser/AbstractBottomUpParser.java b/opennlp-tools/src/main/java/opennlp/tools/parser/AbstractBottomUpParser.java
index 1a414f1..6575e21 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/parser/AbstractBottomUpParser.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/parser/AbstractBottomUpParser.java
@@ -414,7 +414,7 @@
         // if (j != tags.length) {System.err.println(words[j]+" "
         // +ptags[j]+" "+tags[j]+" "+probs.get(j));}
         if (j != tags.length) {
-          newParses[si].addProb(Math.log(probs[j]));
+          newParses[si].addProb(StrictMath.log(probs[j]));
         }
         // if continue just update end chunking tag don't use contTypeMap
         if (j != tags.length && tags[j].startsWith(CONT)) {
@@ -484,7 +484,7 @@
         //System.err.println("inserting tag "+tags[j]);
         double prob = probs[j];
         newParses[i].insert(new Parse(word.getText(), word.getSpan(), tags[j], prob,j));
-        newParses[i].addProb(Math.log(prob));
+        newParses[i].addProb(StrictMath.log(prob));
       }
     }
     return newParses;
diff --git a/opennlp-tools/src/main/java/opennlp/tools/parser/Parse.java b/opennlp-tools/src/main/java/opennlp/tools/parser/Parse.java
index e2b4fba..b747baf 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/parser/Parse.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/parser/Parse.java
@@ -408,7 +408,7 @@
     //System.err.println("Parse.getTagSequenceProb: "+type+" "+this);
     if (parts.size() == 1 && (parts.get(0)).type.equals(AbstractBottomUpParser.TOK_NODE)) {
       //System.err.println(this+" "+prob);
-      return (Math.log(prob));
+      return (StrictMath.log(prob));
     }
     else if (parts.size() == 0) {
       System.err.println("Parse.getTagSequenceProb: Wrong base case!");
diff --git a/opennlp-tools/src/main/java/opennlp/tools/parser/chunking/Parser.java b/opennlp-tools/src/main/java/opennlp/tools/parser/chunking/Parser.java
index f2079e9..9ad8428 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/parser/chunking/Parser.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/parser/chunking/Parser.java
@@ -125,9 +125,9 @@
   @Override
   protected void advanceTop(Parse p) {
     buildModel.eval(buildContextGenerator.getContext(p.getChildren(), 0), bprobs);
-    p.addProb(Math.log(bprobs[topStartIndex]));
+    p.addProb(StrictMath.log(bprobs[topStartIndex]));
     checkModel.eval(checkContextGenerator.getContext(p.getChildren(), TOP_NODE, 0, 0), cprobs);
-    p.addProb(Math.log(cprobs[completeIndex]));
+    p.addProb(StrictMath.log(cprobs[completeIndex]));
     p.setType(TOP_NODE);
   }
 
@@ -202,7 +202,7 @@
       if (createDerivationString) newParse1.getDerivation().append(max).append("-");
       //replace constituent being labeled to create new derivation
       newParse1.setChild(originalAdvanceIndex,tag);
-      newParse1.addProb(Math.log(bprob));
+      newParse1.addProb(StrictMath.log(bprob));
       //check
       //String[] context = checkContextGenerator.getContext(newParse1.getChildren(), lastStartType,
       // lastStartIndex, advanceNodeIndex);
@@ -215,7 +215,7 @@
       if (cprobs[completeIndex] > q) { //make sure a reduce is likely
         newParse2 = (Parse) newParse1.clone();
         if (createDerivationString) newParse2.getDerivation().append(1).append(".");
-        newParse2.addProb(Math.log(cprobs[completeIndex]));
+        newParse2.addProb(StrictMath.log(cprobs[completeIndex]));
         Parse[] cons = new Parse[advanceNodeIndex - lastStartIndex + 1];
         boolean flat = true;
         //first
@@ -249,7 +249,7 @@
       if (cprobs[incompleteIndex] > q) { //make sure a shift is likely
         if (createDerivationString) newParse1.getDerivation().append(0).append(".");
         if (advanceNodeIndex != numNodes - 1) { //can't shift last element
-          newParse1.addProb(Math.log(cprobs[incompleteIndex]));
+          newParse1.addProb(StrictMath.log(cprobs[incompleteIndex]));
           newParsesList.add(newParse1);
         }
       }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/parser/treeinsert/Parser.java b/opennlp-tools/src/main/java/opennlp/tools/parser/treeinsert/Parser.java
index f18fbf7..c38be27 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/parser/treeinsert/Parser.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/parser/treeinsert/Parser.java
@@ -274,7 +274,7 @@
           Parse newParse1 = (Parse) p.clone();
           Parse newNode = new Parse(p.getText(),advanceNode.getSpan(),tag,bprob,advanceNode.getHead());
           newParse1.insert(newNode);
-          newParse1.addProb(Math.log(bprob));
+          newParse1.addProb(StrictMath.log(bprob));
           newParsesList.add(newParse1);
           if (checkComplete) {
             cprobs = checkModel.eval(checkContextGenerator.getContext(newNode, children,
@@ -282,25 +282,25 @@
             if (debugOn) System.out.println("building " + tag + " " + bprob + " c=" + cprobs[completeIndex]);
             if (cprobs[completeIndex] > probMass) { //just complete advances
               setComplete(newNode);
-              newParse1.addProb(Math.log(cprobs[completeIndex]));
+              newParse1.addProb(StrictMath.log(cprobs[completeIndex]));
               if (debugOn) System.out.println("Only advancing complete node");
             }
             else if (1 - cprobs[completeIndex] > probMass) { //just incomplete advances
               setIncomplete(newNode);
-              newParse1.addProb(Math.log(1 - cprobs[completeIndex]));
+              newParse1.addProb(StrictMath.log(1 - cprobs[completeIndex]));
               if (debugOn) System.out.println("Only advancing incomplete node");
             }
             else { //both complete and incomplete advance
               if (debugOn) System.out.println("Advancing both complete and incomplete nodes");
               setComplete(newNode);
-              newParse1.addProb(Math.log(cprobs[completeIndex]));
+              newParse1.addProb(StrictMath.log(cprobs[completeIndex]));
 
               Parse newParse2 = (Parse) p.clone();
               Parse newNode2 = new Parse(p.getText(),advanceNode.getSpan(),tag,bprob,advanceNode.getHead());
               newParse2.insert(newNode2);
-              newParse2.addProb(Math.log(bprob));
+              newParse2.addProb(StrictMath.log(bprob));
               newParsesList.add(newParse2);
-              newParse2.addProb(Math.log(1 - cprobs[completeIndex]));
+              newParse2.addProb(StrictMath.log(1 - cprobs[completeIndex]));
               setIncomplete(newNode2); //set incomplete for non-clone
             }
           }
@@ -328,7 +328,7 @@
         //replace constituent being labeled to create new derivation
         newParse1.setChild(originalAdvanceIndex,Parser.BUILT);
       }
-      newParse1.addProb(Math.log(doneProb));
+      newParse1.addProb(StrictMath.log(doneProb));
       if (advanceNodeIndex == 0) { //no attach if first node.
         newParsesList.add(newParse1);
       }
@@ -387,28 +387,28 @@
                 node.updateSpan();
               }
               //if (debugOn) {System.out.print(ai+"-result: ");newParse2.show();System.out.println();}
-              newParse2.addProb(Math.log(prob));
+              newParse2.addProb(StrictMath.log(prob));
               newParsesList.add(newParse2);
               if (checkComplete) {
                 cprobs = checkModel.eval(
                     checkContextGenerator.getContext(updatedNode,newKids,advanceNodeIndex,true));
                 if (cprobs[completeIndex] > probMass) {
                   setComplete(updatedNode);
-                  newParse2.addProb(Math.log(cprobs[completeIndex]));
+                  newParse2.addProb(StrictMath.log(cprobs[completeIndex]));
                   if (debugOn) System.out.println("Only advancing complete node");
                 }
                 else if (1 - cprobs[completeIndex] > probMass) {
                   setIncomplete(updatedNode);
-                  newParse2.addProb(Math.log(1 - cprobs[completeIndex]));
+                  newParse2.addProb(StrictMath.log(1 - cprobs[completeIndex]));
                   if (debugOn) System.out.println("Only advancing incomplete node");
                 }
                 else {
                   setComplete(updatedNode);
                   Parse newParse3 = newParse2.cloneRoot(updatedNode,originalZeroIndex);
-                  newParse3.addProb(Math.log(cprobs[completeIndex]));
+                  newParse3.addProb(StrictMath.log(cprobs[completeIndex]));
                   newParsesList.add(newParse3);
                   setIncomplete(updatedNode);
-                  newParse2.addProb(Math.log(1 - cprobs[completeIndex]));
+                  newParse2.addProb(StrictMath.log(1 - cprobs[completeIndex]));
                   if (debugOn)
                     System.out.println("Advancing both complete and incomplete nodes; c="
                         + cprobs[completeIndex]);
diff --git a/opennlp-tools/src/main/java/opennlp/tools/postag/DefaultPOSContextGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/postag/DefaultPOSContextGenerator.java
index 3f4fe97..6def798 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/postag/DefaultPOSContextGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/postag/DefaultPOSContextGenerator.java
@@ -70,7 +70,7 @@
   protected static String[] getPrefixes(String lex) {
     String[] prefs = new String[PREFIX_LENGTH];
     for (int li = 0; li < PREFIX_LENGTH; li++) {
-      prefs[li] = lex.substring(0, Math.min(li + 1, lex.length()));
+      prefs[li] = lex.substring(0, StrictMath.min(li + 1, lex.length()));
     }
     return prefs;
   }
@@ -78,7 +78,7 @@
   protected static String[] getSuffixes(String lex) {
     String[] suffs = new String[SUFFIX_LENGTH];
     for (int li = 0; li < SUFFIX_LENGTH; li++) {
-      suffs[li] = lex.substring(Math.max(lex.length() - li - 1, 0));
+      suffs[li] = lex.substring(StrictMath.max(lex.length() - li - 1, 0));
     }
     return suffs;
   }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/sentdetect/lang/th/SentenceContextGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/sentdetect/lang/th/SentenceContextGenerator.java
index 3b24662..f87d6ca 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/sentdetect/lang/th/SentenceContextGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/sentdetect/lang/th/SentenceContextGenerator.java
@@ -42,20 +42,20 @@
     collectFeats.add(buf.toString());
     buf.setLength(0);
 
-    collectFeats.add("p1=" + prefix.substring(Math.max(prefix.length() - 1,0)));
-    collectFeats.add("p2=" + prefix.substring(Math.max(prefix.length() - 2,0)));
-    collectFeats.add("p3=" + prefix.substring(Math.max(prefix.length() - 3,0)));
-    collectFeats.add("p4=" + prefix.substring(Math.max(prefix.length() - 4,0)));
-    collectFeats.add("p5=" + prefix.substring(Math.max(prefix.length() - 5,0)));
-    collectFeats.add("p6=" + prefix.substring(Math.max(prefix.length() - 6,0)));
-    collectFeats.add("p7=" + prefix.substring(Math.max(prefix.length() - 7,0)));
+    collectFeats.add("p1=" + prefix.substring(StrictMath.max(prefix.length() - 1,0)));
+    collectFeats.add("p2=" + prefix.substring(StrictMath.max(prefix.length() - 2,0)));
+    collectFeats.add("p3=" + prefix.substring(StrictMath.max(prefix.length() - 3,0)));
+    collectFeats.add("p4=" + prefix.substring(StrictMath.max(prefix.length() - 4,0)));
+    collectFeats.add("p5=" + prefix.substring(StrictMath.max(prefix.length() - 5,0)));
+    collectFeats.add("p6=" + prefix.substring(StrictMath.max(prefix.length() - 6,0)));
+    collectFeats.add("p7=" + prefix.substring(StrictMath.max(prefix.length() - 7,0)));
 
-    collectFeats.add("n1=" + suffix.substring(0,Math.min(1, suffix.length())));
-    collectFeats.add("n2=" + suffix.substring(0,Math.min(2, suffix.length())));
-    collectFeats.add("n3=" + suffix.substring(0,Math.min(3, suffix.length())));
-    collectFeats.add("n4=" + suffix.substring(0,Math.min(4, suffix.length())));
-    collectFeats.add("n5=" + suffix.substring(0,Math.min(5, suffix.length())));
-    collectFeats.add("n6=" + suffix.substring(0,Math.min(6, suffix.length())));
-    collectFeats.add("n7=" + suffix.substring(0,Math.min(7, suffix.length())));
+    collectFeats.add("n1=" + suffix.substring(0,StrictMath.min(1, suffix.length())));
+    collectFeats.add("n2=" + suffix.substring(0,StrictMath.min(2, suffix.length())));
+    collectFeats.add("n3=" + suffix.substring(0,StrictMath.min(3, suffix.length())));
+    collectFeats.add("n4=" + suffix.substring(0,StrictMath.min(4, suffix.length())));
+    collectFeats.add("n5=" + suffix.substring(0,StrictMath.min(5, suffix.length())));
+    collectFeats.add("n6=" + suffix.substring(0,StrictMath.min(6, suffix.length())));
+    collectFeats.add("n7=" + suffix.substring(0,StrictMath.min(7, suffix.length())));
   }
 }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/util/Sequence.java b/opennlp-tools/src/main/java/opennlp/tools/util/Sequence.java
index 5232370..a282ea8 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/util/Sequence.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/util/Sequence.java
@@ -51,7 +51,7 @@
     probs = new ArrayList<>(s.probs.size() + 1);
     probs.addAll(s.probs);
     probs.add(p);
-    score = s.score + Math.log(p);
+    score = s.score + StrictMath.log(p);
   }
 
   public Sequence(List<String> outcomes) {
@@ -78,7 +78,7 @@
       double epsilon = 0.0000001;
       return Objects.equals(outcomes, other.outcomes) &&
           Objects.equals(probs, other.probs) &&
-          Math.abs(score - other.score) < epsilon;
+          StrictMath.abs(score - other.score) < epsilon;
     }
 
     return false;
@@ -91,7 +91,7 @@
   public void add(String outcome, double p) {
     outcomes.add(outcome);
     probs.add(p);
-    score += Math.log(p);
+    score += StrictMath.log(p);
   }
 
   /** Returns a list of outcomes for this sequence.
diff --git a/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/BrownTokenClasses.java b/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/BrownTokenClasses.java
index 0456de5..a880b91 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/BrownTokenClasses.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/BrownTokenClasses.java
@@ -43,11 +43,11 @@
     } else {
       String brownClass = brownLexicon.lookupToken(token);
       List<String> pathLengthsList = new ArrayList<>();
-      pathLengthsList.add(brownClass.substring(0, Math.min(brownClass.length(), pathLengths[0])));
+      pathLengthsList.add(brownClass.substring(0, StrictMath.min(brownClass.length(), pathLengths[0])));
       for (int i = 1; i < pathLengths.length; i++) {
         if (pathLengths[i - 1] < brownClass.length()) {
           pathLengthsList.add(brownClass.substring(0,
-              Math.min(brownClass.length(), pathLengths[i])));
+              StrictMath.min(brownClass.length(), pathLengths[i])));
         }
       }
       return pathLengthsList;
diff --git a/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/PrefixFeatureGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/PrefixFeatureGenerator.java
index 04fcd15..2e10195 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/PrefixFeatureGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/PrefixFeatureGenerator.java
@@ -44,11 +44,11 @@
   
   private String[] getPrefixes(String lex) {
       
-    int prefixes = Math.min(prefixLength, lex.length());
+    int prefixes = StrictMath.min(prefixLength, lex.length());
     
     String[] prefs = new String[prefixes];
     for (int li = 0; li < prefixes; li++) {
-      prefs[li] = lex.substring(0, Math.min(li + 1, lex.length()));
+      prefs[li] = lex.substring(0, StrictMath.min(li + 1, lex.length()));
     }
     return prefs;
   }
diff --git a/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/SuffixFeatureGenerator.java b/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/SuffixFeatureGenerator.java
index c626fd9..f1a18d8 100644
--- a/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/SuffixFeatureGenerator.java
+++ b/opennlp-tools/src/main/java/opennlp/tools/util/featuregen/SuffixFeatureGenerator.java
@@ -44,11 +44,11 @@
   
   private String[] getSuffixes(String lex) {
       
-    int suffixes = Math.min(suffixLength, lex.length());
+    int suffixes = StrictMath.min(suffixLength, lex.length());
       
     String[] suffs = new String[suffixes];
     for (int li = 0; li < suffixes; li++) {
-      suffs[li] = lex.substring(Math.max(lex.length() - li - 1, 0));
+      suffs[li] = lex.substring(StrictMath.max(lex.length() - li - 1, 0));
     }
     return suffs;
   }
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java
index cd34046..a080e4e 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/ArvoresDeitadasEval.java
@@ -153,7 +153,7 @@
 
   @Test
   public void evalPortugueseSentenceDetectorMaxentQn() throws IOException {
-    sentenceCrossEval(createMaxentQnParams(), 0.99261110833375d);
+    sentenceCrossEval(createMaxentQnParams(), 0.9924715809679968d);
   }
 
   @Test
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java
index fccc8e9..90193d9 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/Conll02NameFinderEval.java
@@ -419,10 +419,10 @@
         Conll02NameSampleStream.GENERATE_ORGANIZATION_ENTITIES);
 
     eval(maxentModel, spanishTestAFile, LANGUAGE.SPA,
-        Conll02NameSampleStream.GENERATE_ORGANIZATION_ENTITIES, 0.682961897915169d);
+        Conll02NameSampleStream.GENERATE_ORGANIZATION_ENTITIES, 0.6904593639575972d);
 
     eval(maxentModel, spanishTestBFile, LANGUAGE.SPA,
-        Conll02NameSampleStream.GENERATE_ORGANIZATION_ENTITIES, 0.7776447105788423d);
+        Conll02NameSampleStream.GENERATE_ORGANIZATION_ENTITIES, 0.7843601895734598d);
   }
 
   @Test
@@ -503,7 +503,7 @@
         Conll02NameSampleStream.GENERATE_MISC_ENTITIES);
 
     eval(maxentModel, spanishTestAFile, LANGUAGE.SPA,
-        Conll02NameSampleStream.GENERATE_MISC_ENTITIES, 0.470219435736677d);
+        Conll02NameSampleStream.GENERATE_MISC_ENTITIES, 0.46467817896389324d);
 
     eval(maxentModel, spanishTestBFile, LANGUAGE.SPA,
         Conll02NameSampleStream.GENERATE_MISC_ENTITIES, 0.5020576131687243d);
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java
index ed24cf3..64aa5a0 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/ConllXPosTaggerEval.java
@@ -206,6 +206,6 @@
         "conllx/data/swedish/talbanken05/train/swedish_talbanken05_train.conll"), "swe", params);
 
     eval(maxentModel, new File(getOpennlpDataDir(),
-        "conllx/data/swedish/talbanken05/test/swedish_talbanken05_test.conll"), 0.9347595473833098d);
+        "conllx/data/swedish/talbanken05/test/swedish_talbanken05_test.conll"), 0.9377652050919377d);
   }
 }
diff --git a/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java b/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java
index cc86b5e..fd116d2 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/eval/SourceForgeModelEval.java
@@ -410,7 +410,7 @@
       }
     }
 
-    Assert.assertEquals(new BigInteger("312218841713337505306598301082074515847"),
+    Assert.assertEquals(new BigInteger("68039262350771988792233880373220954061"),
         new BigInteger(1, digest.digest()));
   }
 }
diff --git a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java
index 56edb9e..17d380e 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/languagemodel/LanguageModelTestUtils.java
@@ -70,12 +70,12 @@
       }
     }
 
-    double p = Math.log(perplexity.doubleValue());
+    double p = StrictMath.log(perplexity.doubleValue());
     if (Double.isInfinite(p) || Double.isNaN(p)) {
       return Double.POSITIVE_INFINITY; // over/underflow -> too high perplexity
     } else {
       BigDecimal log = new BigDecimal(p);
-      return Math.pow(Math.E, log.divide(new BigDecimal(testSet.size()), CONTEXT).doubleValue());
+      return StrictMath.pow(StrictMath.E, log.divide(new BigDecimal(testSet.size()), CONTEXT).doubleValue());
     }
   }
 
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java
index fc28d11..8e37970 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/LineSearchTest.java
@@ -174,7 +174,7 @@
 
     public double valueAt(double[] x) {
       // (x-2)^2 + 4;
-      return Math.pow(x[0] - 2, 2) + 4;
+      return StrictMath.pow(x[0] - 2, 2) + 4;
     }
 
     public double[] gradientAt(double[] x) {
@@ -194,7 +194,7 @@
 
     public double valueAt(double[] x) {
       // x^2;
-      return Math.pow(x[0], 2);
+      return StrictMath.pow(x[0], 2);
     }
 
     public double[] gradientAt(double[] x) {
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java
index ed8b11c..d1f9a88 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/NegLogLikelihoodTest.java
@@ -252,7 +252,7 @@
     }
 
     for (int i = 0; i < alignedActual.length; i++) {
-      if (Math.abs(alignedActual[i] - expected[i]) > tolerance) {
+      if (StrictMath.abs(alignedActual[i] - expected[i]) > tolerance) {
         return false;
       }
     }
diff --git a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizerTest.java b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizerTest.java
index 1a97f14..582bc67 100644
--- a/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizerTest.java
+++ b/opennlp-tools/src/test/java/opennlp/tools/ml/maxent/quasinewton/QNMinimizerTest.java
@@ -58,7 +58,7 @@
 
     @Override
     public double valueAt(double[] x) {
-      return Math.pow(x[0] - 1, 2) + Math.pow(x[1] - 5, 2) + 10;
+      return StrictMath.pow(x[0] - 1, 2) + StrictMath.pow(x[1] - 5, 2) + 10;
     }
 
     @Override
@@ -84,14 +84,14 @@
 
     @Override
     public double valueAt(double[] x) {
-      return Math.pow(1 - x[0], 2) + 100 * Math.pow(x[1] - Math.pow(x[0], 2), 2);
+      return StrictMath.pow(1 - x[0], 2) + 100 * StrictMath.pow(x[1] - StrictMath.pow(x[0], 2), 2);
     }
 
     @Override
     public double[] gradientAt(double[] x) {
       double[] g = new double[2];
-      g[0] = -2 * (1 - x[0]) - 400 * (x[1] - Math.pow(x[0], 2)) * x[0];
-      g[1] = 200 * (x[1] - Math.pow(x[0], 2));
+      g[0] = -2 * (1 - x[0]) - 400 * (x[1] - StrictMath.pow(x[0], 2)) * x[0];
+      g[1] = 200 * (x[1] - StrictMath.pow(x[0], 2));
       return g;
     }