Redundant inner class.
diff --git a/commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizer.java b/commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizer.java
index 13e7480..45190e8 100644
--- a/commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizer.java
+++ b/commons-math-legacy/src/main/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizer.java
@@ -34,6 +34,7 @@
import org.apache.commons.math4.legacy.optim.OptimizationData;
import org.apache.commons.math4.legacy.optim.PointValuePair;
import org.apache.commons.math4.legacy.optim.nonlinear.scalar.GoalType;
+import org.apache.commons.math4.legacy.optim.nonlinear.scalar.PopulationSize;
import org.apache.commons.math4.legacy.optim.nonlinear.scalar.MultivariateOptimizer;
import org.apache.commons.rng.UniformRandomProvider;
import org.apache.commons.statistics.distribution.ContinuousDistribution;
@@ -77,6 +78,14 @@
* <li><a href="http://en.wikipedia.org/wiki/CMA-ES">Wikipedia</a></li>
* </ul>
*
+ * <p>
+ * The {@link PopulationSize number of offsprings} is the primary strategy
+ * parameter. In the absence of better clues, a good default could be an integer
+ * close to {@code 4 + 3 ln(n)}, where {@code n} is the number of optimized
+ * parameters. Increasing the population size improves global search properties
+ * at the expense of speed (which in general decreases at most linearly with
+ * increasing population size).
+ *
* @since 3.0
*/
public class CMAESOptimizer
@@ -309,40 +318,6 @@
}
/**
- * Population size.
- * The number of offspring is the primary strategy parameter.
- * In the absence of better clues, a good default could be an
- * integer close to {@code 4 + 3 ln(n)}, where {@code n} is the
- * number of optimized parameters.
- * Increasing the population size improves global search properties
- * at the expense of speed (which in general decreases at most
- * linearly with increasing population size).
- */
- public static class PopulationSize implements OptimizationData {
- /** Population size. */
- private final int lambda;
-
- /**
- * @param size Population size.
- * @throws NotStrictlyPositiveException if {@code size <= 0}.
- */
- public PopulationSize(int size)
- throws NotStrictlyPositiveException {
- if (size <= 0) {
- throw new NotStrictlyPositiveException(size);
- }
- lambda = size;
- }
-
- /**
- * @return the population size.
- */
- public int getPopulationSize() {
- return lambda;
- }
- }
-
- /**
* {@inheritDoc}
*
* @param optData Optimization data. In addition to those documented in
diff --git a/commons-math-legacy/src/test/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizerTest.java b/commons-math-legacy/src/test/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizerTest.java
index 9724e27..1eab96f 100644
--- a/commons-math-legacy/src/test/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizerTest.java
+++ b/commons-math-legacy/src/test/java/org/apache/commons/math4/legacy/optim/nonlinear/scalar/noderiv/CMAESOptimizerTest.java
@@ -29,6 +29,7 @@
import org.apache.commons.math4.legacy.optim.PointValuePair;
import org.apache.commons.math4.legacy.optim.SimpleBounds;
import org.apache.commons.math4.legacy.optim.nonlinear.scalar.GoalType;
+import org.apache.commons.math4.legacy.optim.nonlinear.scalar.PopulationSize;
import org.apache.commons.math4.legacy.optim.nonlinear.scalar.ObjectiveFunction;
import org.apache.commons.math4.legacy.optim.nonlinear.scalar.TestFunction;
import org.apache.commons.rng.simple.RandomSource;
@@ -172,7 +173,7 @@
new ObjectiveFunction(fitnessFunction),
SimpleBounds.unbounded(1),
GoalType.MINIMIZE,
- new CMAESOptimizer.PopulationSize(5),
+ new PopulationSize(5),
new CMAESOptimizer.Sigma(sigma),
new InitialGuess(start)).getPoint();
Assert.assertEquals(0, result[0], 1e-7);
@@ -391,7 +392,7 @@
final double[] result = optimizer.optimize(new MaxEval(10000),
new ObjectiveFunction(fitnessFunction),
GoalType.MINIMIZE,
- new CMAESOptimizer.PopulationSize(5),
+ new PopulationSize(5),
new CMAESOptimizer.Sigma(sigma),
new InitialGuess(start),
new SimpleBounds(lower, upper)).getPoint();
@@ -423,7 +424,7 @@
new ObjectiveFunction(fitnessFunction),
GoalType.MINIMIZE,
SimpleBounds.unbounded(1),
- new CMAESOptimizer.PopulationSize(5),
+ new PopulationSize(5),
new CMAESOptimizer.Sigma(new double[] { 1e-1 }),
new InitialGuess(start));
final double resNoBound = result.getPoint()[0];
@@ -435,7 +436,7 @@
result = optimizer.optimize(new MaxEval(100000),
new ObjectiveFunction(fitnessFunction),
GoalType.MINIMIZE,
- new CMAESOptimizer.PopulationSize(5),
+ new PopulationSize(5),
new CMAESOptimizer.Sigma(sigma),
new InitialGuess(start),
new SimpleBounds(lower, upper));
@@ -447,7 +448,7 @@
result = optimizer.optimize(new MaxEval(100000),
new ObjectiveFunction(fitnessFunction),
GoalType.MINIMIZE,
- new CMAESOptimizer.PopulationSize(5),
+ new PopulationSize(5),
new CMAESOptimizer.Sigma(sigma),
new InitialGuess(start),
new SimpleBounds(lower, upper));
@@ -502,7 +503,7 @@
new InitialGuess(startPoint),
SimpleBounds.unbounded(dim),
new CMAESOptimizer.Sigma(inSigma),
- new CMAESOptimizer.PopulationSize(lambda)) :
+ new PopulationSize(lambda)) :
optim.optimize(new MaxEval(maxEvaluations),
new ObjectiveFunction(func),
goal,
@@ -510,7 +511,7 @@
boundaries[1]),
new InitialGuess(startPoint),
new CMAESOptimizer.Sigma(inSigma),
- new CMAESOptimizer.PopulationSize(lambda));
+ new PopulationSize(lambda));
Assert.assertEquals(expected.getValue(), result.getValue(), fTol);
for (int i = 0; i < dim; i++) {