[NEMO-362] Upgrade of checkstyle version (#208)
JIRA: [NEMO-362: Upgrade of checkstyle version](https://issues.apache.org/jira/projects/NEMO/issues/NEMO-362)
**Major changes:**
- Upgrade of the checkstyle version.
**Minor changes to note:**
- Removes redundant final modifiers, which creates errors generated by newer check style versions.
**Tests for the changes:**
- None
**Other comments:**
- None
Closes #208
diff --git a/client/src/main/java/org/apache/nemo/client/StateTranslator.java b/client/src/main/java/org/apache/nemo/client/StateTranslator.java
index 1275336..16b8df1 100644
--- a/client/src/main/java/org/apache/nemo/client/StateTranslator.java
+++ b/client/src/main/java/org/apache/nemo/client/StateTranslator.java
@@ -31,5 +31,5 @@
* @param planState to translate.
* @return the translated state.
*/
- Enum translateState(final PlanState.State planState);
+ Enum translateState(PlanState.State planState);
}
diff --git a/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java b/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java
index 86682f7..ae1f20f 100644
--- a/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java
+++ b/client/src/main/java/org/apache/reef/runtime/yarn/ClassPathBuilder.java
@@ -44,9 +44,9 @@
* @return
*/
private static boolean couldBeYarnConfigurationPath(final String path) {
- return path.contains("conf") ||
- path.contains("etc") ||
- path.contains(HadoopEnvironment.HADOOP_CONF_DIR);
+ return path.contains("conf")
+ || path.contains("etc")
+ || path.contains(HadoopEnvironment.HADOOP_CONF_DIR);
}
/**
diff --git a/common/src/main/java/org/apache/nemo/common/KeyExtractor.java b/common/src/main/java/org/apache/nemo/common/KeyExtractor.java
index 50eb327..ced0aac 100644
--- a/common/src/main/java/org/apache/nemo/common/KeyExtractor.java
+++ b/common/src/main/java/org/apache/nemo/common/KeyExtractor.java
@@ -31,5 +31,5 @@
* @param element Element to get the key from.
* @return The extracted key of the element.
*/
- Object extractKey(final Object element);
+ Object extractKey(Object element);
}
diff --git a/common/src/main/java/org/apache/nemo/common/KeyRange.java b/common/src/main/java/org/apache/nemo/common/KeyRange.java
index 1a6b764..f09aa4c 100644
--- a/common/src/main/java/org/apache/nemo/common/KeyRange.java
+++ b/common/src/main/java/org/apache/nemo/common/KeyRange.java
@@ -46,7 +46,7 @@
* @param key the value to check
* @return {@code true} if this key range includes the specified value, {@code false} otherwise
*/
- boolean includes(final K key);
+ boolean includes(K key);
/**
* {@inheritDoc}
@@ -61,7 +61,7 @@
* This method should be overridden for KeyRange comparisons.
*/
@Override
- boolean equals(final Object o);
+ boolean equals(Object o);
/**
* {@inheritDoc}
diff --git a/common/src/main/java/org/apache/nemo/common/Util.java b/common/src/main/java/org/apache/nemo/common/Util.java
index 1156704..0778238 100644
--- a/common/src/main/java/org/apache/nemo/common/Util.java
+++ b/common/src/main/java/org/apache/nemo/common/Util.java
@@ -69,7 +69,7 @@
* @return the path containing the LICENSE file.
*/
static String recursivelyFindLicense(final Path path) {
- try (final Stream stream = Files.find(path, 1, (p, attributes) -> p.endsWith("LICENSE"))) {
+ try (Stream stream = Files.find(path, 1, (p, attributes) -> p.endsWith("LICENSE"))) {
if (stream.count() > 0) {
return path.toAbsolutePath().toString();
} else {
diff --git a/common/src/main/java/org/apache/nemo/common/dag/DAG.java b/common/src/main/java/org/apache/nemo/common/dag/DAG.java
index bbe6042..537d681 100644
--- a/common/src/main/java/org/apache/nemo/common/dag/DAG.java
+++ b/common/src/main/java/org/apache/nemo/common/dag/DAG.java
@@ -307,7 +307,7 @@
final File file = new File(directory, name + ".json");
file.getParentFile().mkdirs();
- try (final PrintWriter printWriter = new PrintWriter(file)) {
+ try (PrintWriter printWriter = new PrintWriter(file)) {
printWriter.println(toString());
printWriter.close();
LOG.debug(String.format("DAG JSON for %s is saved at %s"
diff --git a/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java b/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java
index 7040a9c..fe07192 100644
--- a/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java
+++ b/common/src/main/java/org/apache/nemo/common/dag/DAGInterface.java
@@ -42,7 +42,7 @@
* @param id of the vertex to retrieve
* @return the vertex
*/
- V getVertexById(final String id);
+ V getVertexById(String id);
/**
* Retrieves the vertices of this DAG.
@@ -73,7 +73,7 @@
* @return the list of incoming edges to the vertex.
* Note that the result is never null, ensured by {@link DAGBuilder}.
*/
- List<E> getIncomingEdgesOf(final V v);
+ List<E> getIncomingEdgesOf(V v);
/**
* Retrieves the incoming edges of the given vertex.
@@ -82,7 +82,7 @@
* @return the list of incoming edges to the vertex.
* Note that the result is never null, ensured by {@link DAGBuilder}.
*/
- List<E> getIncomingEdgesOf(final String vertexId);
+ List<E> getIncomingEdgesOf(String vertexId);
/**
* Retrieves the outgoing edges of the given vertex.
@@ -91,7 +91,7 @@
* @return the list of outgoing edges to the vertex.
* Note that the result is never null, ensured by {@link DAGBuilder}.
*/
- List<E> getOutgoingEdgesOf(final V v);
+ List<E> getOutgoingEdgesOf(V v);
/**
* Retrieves the outgoing edges of the given vertex.
@@ -100,7 +100,7 @@
* @return the list of outgoing edges to the vertex.
* Note that the result is never null, ensured by {@link DAGBuilder}.
*/
- List<E> getOutgoingEdgesOf(final String vertexId);
+ List<E> getOutgoingEdgesOf(String vertexId);
/**
* Retrieves the parent vertices of the given vertex.
@@ -108,7 +108,7 @@
* @param vertexId the ID of the subject vertex.
* @return the list of parent vertices.
*/
- List<V> getParents(final String vertexId);
+ List<V> getParents(String vertexId);
/**
* Retrieves the children vertices of the given vertex.
@@ -116,7 +116,7 @@
* @param vertexId the ID of the subject vertex.
* @return the list of children vertices.
*/
- List<V> getChildren(final String vertexId);
+ List<V> getChildren(String vertexId);
/**
* Retrieves the edge between two vertices.
@@ -126,7 +126,7 @@
* @return the edge if exists.
* @throws IllegalEdgeOperationException otherwise.
*/
- E getEdgeBetween(final String srcVertexId, final String dstVertexId) throws IllegalEdgeOperationException;
+ E getEdgeBetween(String srcVertexId, String dstVertexId) throws IllegalEdgeOperationException;
/**
* Gets the DAG's vertices in topologically sorted order.
@@ -142,7 +142,7 @@
* @param vertexId to find the ancestors for.
* @return the list of ancestors.
*/
- List<V> getAncestors(final String vertexId);
+ List<V> getAncestors(String vertexId);
/**
* Retrieves the descendants of a vertex.
@@ -150,7 +150,7 @@
* @param vertexId to find the descendants for.
* @return the list of descendants.
*/
- List<V> getDescendants(final String vertexId);
+ List<V> getDescendants(String vertexId);
/**
* Filters the vertices according to the given condition.
@@ -158,7 +158,7 @@
* @param condition that must be satisfied to be included in the filtered list.
* @return the list of vertices that meet the condition.
*/
- List<V> filterVertices(final Predicate<V> condition);
+ List<V> filterVertices(Predicate<V> condition);
/**
* Applies the function to each node in the DAG in a topological order.
@@ -166,7 +166,7 @@
*
* @param function to apply.
*/
- void topologicalDo(final Consumer<V> function);
+ void topologicalDo(Consumer<V> function);
/**
* Indicates the traversal order of this DAG.
@@ -182,7 +182,7 @@
* @param function to apply.
* @param traversalOrder which the DFS should be conducted.
*/
- void dfsTraverse(final Consumer<V> function, final TraversalOrder traversalOrder);
+ void dfsTraverse(Consumer<V> function, TraversalOrder traversalOrder);
/**
* A recursive helper function for {@link #dfsTraverse(Consumer, TraversalOrder)}.
@@ -192,10 +192,10 @@
* @param traversalOrder which the DFS should be conducted.
* @param visited the set of nodes visited.
*/
- void dfsDo(final V vertex,
- final Consumer<V> vertexConsumer,
- final TraversalOrder traversalOrder,
- final Set<V> visited);
+ void dfsDo(V vertex,
+ Consumer<V> vertexConsumer,
+ TraversalOrder traversalOrder,
+ Set<V> visited);
/**
* Function checks whether there is a path between two vertices.
@@ -204,7 +204,7 @@
* @param v2 Second vertex to check.
* @return Whether or not there is a path between two vertices.
*/
- Boolean pathExistsBetween(final V v1, final V v2);
+ Boolean pathExistsBetween(V v1, V v2);
/**
* Checks whether the given vertex is assigned with a wrapping LoopVertex.
@@ -212,7 +212,7 @@
* @param v Vertex to check.
* @return whether or not it is wrapped by a LoopVertex
*/
- Boolean isCompositeVertex(final V v);
+ Boolean isCompositeVertex(V v);
/**
* Retrieves the stack depth of the given vertex.
@@ -220,7 +220,7 @@
* @param v Vertex to check.
* @return The depth of the stack of LoopVertices for the vertex.
*/
- Integer getLoopStackDepthOf(final V v);
+ Integer getLoopStackDepthOf(V v);
/**
* Retrieves the wrapping LoopVertex of the vertex.
@@ -228,7 +228,7 @@
* @param v Vertex to check.
* @return The wrapping LoopVertex.
*/
- LoopVertex getAssignedLoopVertexOf(final V v);
+ LoopVertex getAssignedLoopVertexOf(V v);
/**
* @return {@link com.fasterxml.jackson.databind.JsonNode} for this DAG.
@@ -242,5 +242,5 @@
* @param name name of this DAG
* @param description description of this DAG
*/
- void storeJSON(final String directory, final String name, final String description);
+ void storeJSON(String directory, String name, String description);
}
diff --git a/common/src/main/java/org/apache/nemo/common/ir/IRDAGChecker.java b/common/src/main/java/org/apache/nemo/common/ir/IRDAGChecker.java
index 21b04f2..dafb9cd 100644
--- a/common/src/main/java/org/apache/nemo/common/ir/IRDAGChecker.java
+++ b/common/src/main/java/org/apache/nemo/common/ir/IRDAGChecker.java
@@ -131,30 +131,30 @@
* Checks each single vertex.
*/
private interface SingleVertexChecker {
- CheckerResult check(final IRVertex irVertex);
+ CheckerResult check(IRVertex irVertex);
}
/**
* Checks each single edge.
*/
private interface SingleEdgeChecker {
- CheckerResult check(final IREdge irEdge);
+ CheckerResult check(IREdge irEdge);
}
/**
* Checks each vertex and its neighbor edges.
*/
private interface NeighborChecker {
- CheckerResult check(final IRVertex irVertex,
- final List<IREdge> inEdges,
- final List<IREdge> outEdges);
+ CheckerResult check(IRVertex irVertex,
+ List<IREdge> inEdges,
+ List<IREdge> outEdges);
}
/**
* Checks the entire DAG.
*/
public interface GlobalDAGChecker {
- CheckerResult check(final DAG<IRVertex, IREdge> irdag);
+ CheckerResult check(DAG<IRVertex, IREdge> irdag);
}
///////////////////////////// Checker implementations
diff --git a/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java b/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java
index 628a12f..08691af 100644
--- a/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java
+++ b/common/src/main/java/org/apache/nemo/common/test/ExampleTestUtil.java
@@ -53,7 +53,7 @@
final String testResourceFileName) throws IOException {
final String testOutput;
- try (final Stream<Path> fileStream = Files.list(Paths.get(resourcePath))) {
+ try (Stream<Path> fileStream = Files.list(Paths.get(resourcePath))) {
testOutput = fileStream
.filter(Files::isRegularFile)
// TODO 346: Do not use test file prefixes
@@ -72,7 +72,7 @@
final String resourceOutput;
- try (final Stream<String> lineStream = Files.lines(Paths.get(resourcePath + testResourceFileName))) {
+ try (Stream<String> lineStream = Files.lines(Paths.get(resourcePath + testResourceFileName))) {
resourceOutput = lineStream
.sorted()
.reduce("", (p, q) -> (p + "\n" + q));
@@ -107,7 +107,7 @@
final String testResourceFileName) throws IOException {
final List<List<Double>> testOutput;
- try (final Stream<Path> fileStream = Files.list(Paths.get(resourcePath))) {
+ try (Stream<Path> fileStream = Files.list(Paths.get(resourcePath))) {
testOutput = fileStream
.filter(Files::isRegularFile)
.filter(path -> path.getFileName().toString().startsWith(outputFileName))
@@ -126,7 +126,7 @@
}
final List<List<Double>> resourceOutput;
- try (final Stream<String> lineStream = Files.lines(Paths.get(resourcePath + testResourceFileName))) {
+ try (Stream<String> lineStream = Files.lines(Paths.get(resourcePath + testResourceFileName))) {
resourceOutput = lineStream
.sorted()
.filter(line -> !line.trim().equals(""))
@@ -159,7 +159,7 @@
*/
public static void deleteOutputFile(final String directory,
final String outputFileName) throws IOException {
- try (final Stream<Path> fileStream = Files.list(Paths.get(directory))) {
+ try (Stream<Path> fileStream = Files.list(Paths.get(directory))) {
final Set<Path> outputFilePaths = fileStream
.filter(Files::isRegularFile)
.filter(path -> path.getFileName().toString().startsWith(outputFileName))
diff --git a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java
index 1139282..b3abace 100644
--- a/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java
+++ b/compiler/frontend/beam/src/main/java/org/apache/nemo/compiler/frontend/beam/transform/AbstractDoFnTransform.java
@@ -319,7 +319,7 @@
* @param originalDoFn the original doFn.
* @return wrapped doFn.
*/
- abstract DoFn wrapDoFn(final DoFn originalDoFn);
+ abstract DoFn wrapDoFn(DoFn originalDoFn);
/**
* An abstract function that wraps the original output collector.
@@ -327,7 +327,7 @@
* @param oc the original outputCollector.
* @return wrapped output collector.
*/
- abstract OutputCollector wrapOutputCollector(final OutputCollector oc);
+ abstract OutputCollector wrapOutputCollector(OutputCollector oc);
/**
* An abstract function that is called before close.
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java
index 6822498..849860d 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/HDFSTextFileTransform.java
@@ -64,8 +64,8 @@
@Override
public void close() {
try (
- final FileSystem fileSystem = fileName.getFileSystem(new JobConf());
- final FSDataOutputStream outputStream = fileSystem.create(fileName, false);
+ FileSystem fileSystem = fileName.getFileSystem(new JobConf());
+ FSDataOutputStream outputStream = fileSystem.create(fileName, false);
) {
for (final I element : elements) {
outputStream.writeBytes(element + "\n");
diff --git a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java
index 1de8151..8965ae2 100644
--- a/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java
+++ b/compiler/frontend/spark/src/main/java/org/apache/nemo/compiler/frontend/spark/transform/LocalTextFileTransform.java
@@ -59,7 +59,7 @@
@Override
public void close() {
try (
- final Writer writer =
+ Writer writer =
new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName, false), "utf-8"))
) {
for (final I element : elements) {
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java
index be5f149..b9dd521 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/Optimizer.java
@@ -44,5 +44,5 @@
* @param message for optimization.
* @return optimized DAG.
*/
- IRDAG optimizeAtRunTime(final IRDAG dag, final Message message);
+ IRDAG optimizeAtRunTime(IRDAG dag, Message message);
}
diff --git a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java
index 9d4110d..9653fc2 100644
--- a/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java
+++ b/compiler/optimizer/src/main/java/org/apache/nemo/compiler/optimizer/policy/Policy.java
@@ -44,5 +44,5 @@
* @param dag input DAG.
* @param message from the DAG execution.
*/
- IRDAG runRunTimeOptimizations(final IRDAG dag, final Message<?> message);
+ IRDAG runRunTimeOptimizations(IRDAG dag, Message<?> message);
}
diff --git a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java
index 5e9099b..8a4004a 100644
--- a/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java
+++ b/examples/spark/src/main/java/org/apache/nemo/examples/spark/sql/JavaSparkSQLExample.java
@@ -208,7 +208,7 @@
// +----+-------+
// Global temporary view is cross-session
- try (final org.apache.spark.sql.SparkSession newSession = spark.newSession()) {
+ try (org.apache.spark.sql.SparkSession newSession = spark.newSession()) {
newSession.sql("SELECT * FROM global_temp.people").show();
}
// +----+-------+
diff --git a/pom.xml b/pom.xml
index 539db2c..7c1615a 100644
--- a/pom.xml
+++ b/pom.xml
@@ -70,6 +70,7 @@
<powermock.version>2.0.0-beta.5</powermock.version>
<surefire.version>3.0.0-M1</surefire.version>
<junit.version>4.12</junit.version>
+ <checkstyle.version>8.16</checkstyle.version>
</properties>
<modules>
@@ -187,7 +188,14 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-checkstyle-plugin</artifactId>
- <version>2.17</version>
+ <version>3.0.0</version>
+ <dependencies>
+ <dependency>
+ <groupId>com.puppycrawl.tools</groupId>
+ <artifactId>checkstyle</artifactId>
+ <version>${checkstyle.version}</version>
+ </dependency>
+ </dependencies>
<executions>
<execution>
<id>validate</id>
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java
index 2904448..6886f08 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/NettyChannelImplementationSelector.java
@@ -38,7 +38,7 @@
* @param threadFactory the {@link ThreadFactory}
* @return a new {@link EventLoopGroup}
*/
- EventLoopGroup newEventLoopGroup(int numThreads, final ThreadFactory threadFactory);
+ EventLoopGroup newEventLoopGroup(int numThreads, ThreadFactory threadFactory);
/**
* @return class for server channel
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java
index 5880838..76908b9 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/Metric.java
@@ -36,5 +36,5 @@
* @param metricValue byte array of serialized data value.
* @return true if the metric was changed or false if not.
*/
- boolean processMetricMessage(final String metricField, final byte[] metricValue);
+ boolean processMetricMessage(String metricField, byte[] metricValue);
}
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java
index 4f77297..0f22f26 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/MetricUtils.java
@@ -80,9 +80,9 @@
*/
private static Pair<HashBiMap<Integer, Class<? extends ExecutionProperty>>,
HashBiMap<Pair<Integer, Integer>, ExecutionProperty<?>>> loadMetaData() {
- try (final Connection c = DriverManager.getConnection(MetricUtils.POSTGRESQL_METADATA_DB_NAME,
+ try (Connection c = DriverManager.getConnection(MetricUtils.POSTGRESQL_METADATA_DB_NAME,
"postgres", "fake_password")) {
- try (final Statement statement = c.createStatement()) {
+ try (Statement statement = c.createStatement()) {
statement.setQueryTimeout(30); // set timeout to 30 sec.
statement.executeUpdate(
@@ -144,13 +144,13 @@
}
LOG.info("Saving Metadata..");
- try (final Connection c = DriverManager.getConnection(MetricUtils.POSTGRESQL_METADATA_DB_NAME,
+ try (Connection c = DriverManager.getConnection(MetricUtils.POSTGRESQL_METADATA_DB_NAME,
"postgres", "fake_password")) {
- try (final Statement statement = c.createStatement()) {
+ try (Statement statement = c.createStatement()) {
statement.setQueryTimeout(30); // set timeout to 30 sec.
if (MUST_UPDATE_EP_KEY_METADATA.getCount() == 0) {
- try (final PreparedStatement pstmt = c.prepareStatement(
+ try (PreparedStatement pstmt = c.prepareStatement(
"INSERT INTO " + METADATA_TABLE_NAME + " (key, data) "
+ "VALUES ('EP_KEY_METADATA', ?) ON CONFLICT (key) DO UPDATE SET data = excluded.data;")) {
pstmt.setBinaryStream(1,
@@ -161,7 +161,7 @@
}
if (MUST_UPDATE_EP_METADATA.getCount() == 0) {
- try (final PreparedStatement pstmt =
+ try (PreparedStatement pstmt =
c.prepareStatement("INSERT INTO " + METADATA_TABLE_NAME + "(key, data) "
+ "VALUES ('EP_METADATA', ?) ON CONFLICT (key) DO UPDATE SET data = excluded.data;")) {
pstmt.setBinaryStream(1,
@@ -287,7 +287,7 @@
* @return the path containing the LICENSE file.
*/
private static String recursivelyFindLicense(final Path path) {
- try (final Stream stream = Files.find(path, 1, (p, attributes) -> p.endsWith("LICENSE"))) {
+ try (Stream stream = Files.find(path, 1, (p, attributes) -> p.endsWith("LICENSE"))) {
if (stream.count() > 0) {
return path.toAbsolutePath().toString();
} else {
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java
index 9ad4d6a..33c1865 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/metric/StateMetric.java
@@ -40,5 +40,5 @@
* @param prevState previous state.
* @param newState new state.
*/
- void addEvent(final T prevState, final T newState);
+ void addEvent(T prevState, T newState);
}
diff --git a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java
index 7b7ab6d..e050b27 100644
--- a/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java
+++ b/runtime/common/src/main/java/org/apache/nemo/runtime/common/plan/PlanRewriter.java
@@ -27,11 +27,11 @@
* @param messageId of the rewrite.
* @return physical plan.
*/
- PhysicalPlan rewrite(final PhysicalPlan currentPhysicalPlan, final int messageId);
+ PhysicalPlan rewrite(PhysicalPlan currentPhysicalPlan, int messageId);
/**
* @param messageId of the rewrite.
* @param data to accumulate.
*/
- void accumulate(final int messageId, final Object data);
+ void accumulate(int messageId, Object data);
}
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java
index 4c8de1a..e79f6e5 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/MetricMessageSender.java
@@ -34,7 +34,7 @@
* @param metricField field of the metric
* @param metricValue value of the metric which is serialized
*/
- void send(final String metricType, final String metricId, final String metricField, final byte[] metricValue);
+ void send(String metricType, String metricId, String metricField, byte[] metricValue);
/**
* Flush all metric inside of the queue.
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java
index e2c7adf..9d1f605 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/bytetransfer/ByteTransferContext.java
@@ -113,7 +113,7 @@
*
* @param cause the cause of exception handling
*/
- public abstract void onChannelError(@Nullable final Throwable cause);
+ public abstract void onChannelError(@Nullable Throwable cause);
/**
* Sets exception.
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java
index f5440ee..794e8c8 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/DataUtil.java
@@ -87,7 +87,7 @@
// compression stream. This depends on the nature of the compression algorithm used.
// We recommend to wrap with LimitedInputStream once more when
// reading input from chained compression InputStream.
- try (final LimitedInputStream limitedInputStream = new LimitedInputStream(inputStream, partitionSize)) {
+ try (LimitedInputStream limitedInputStream = new LimitedInputStream(inputStream, partitionSize)) {
final InputStreamIterator iterator =
new InputStreamIterator(Collections.singletonList(limitedInputStream).iterator(), serializer);
iterator.forEachRemaining(deserializedData::add);
@@ -112,8 +112,8 @@
final List<SerializedPartition<K>> serializedPartitions = new ArrayList<>();
for (final NonSerializedPartition<K> partitionToConvert : partitionsToConvert) {
try (
- final DirectByteArrayOutputStream bytesOutputStream = new DirectByteArrayOutputStream();
- final OutputStream wrappedStream = buildOutputStream(bytesOutputStream, serializer.getEncodeStreamChainers());
+ DirectByteArrayOutputStream bytesOutputStream = new DirectByteArrayOutputStream();
+ OutputStream wrappedStream = buildOutputStream(bytesOutputStream, serializer.getEncodeStreamChainers());
) {
serializePartition(serializer.getEncoderFactory(), partitionToConvert, wrappedStream);
// We need to close wrappedStream on here, because DirectByteArrayOutputStream:getBufDirectly() returns
@@ -148,7 +148,7 @@
final K key = partitionToConvert.getKey();
- try (final ByteArrayInputStream byteArrayInputStream =
+ try (ByteArrayInputStream byteArrayInputStream =
new ByteArrayInputStream(partitionToConvert.getData())) {
final NonSerializedPartition<K> deserializePartition = deserializePartition(
partitionToConvert.getLength(), serializer, key, byteArrayInputStream);
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java
index 5f39c52..cac065c 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/block/FileBlock.java
@@ -83,7 +83,7 @@
*/
private void writeToFile(final Iterable<SerializedPartition<K>> serializedPartitions)
throws IOException {
- try (final FileOutputStream fileOutputStream = new FileOutputStream(filePath, true)) {
+ try (FileOutputStream fileOutputStream = new FileOutputStream(filePath, true)) {
for (final SerializedPartition<K> serializedPartition : serializedPartitions) {
// Reserve a partition write and get the metadata.
metadata.writePartitionMetadata(serializedPartition.getKey(), serializedPartition.getLength());
@@ -180,7 +180,7 @@
final List<NonSerializedPartition<K>> deserializedPartitions = new ArrayList<>();
try {
final List<Pair<K, byte[]>> partitionKeyBytesPairs = new ArrayList<>();
- try (final FileInputStream fileStream = new FileInputStream(filePath)) {
+ try (FileInputStream fileStream = new FileInputStream(filePath)) {
for (final PartitionMetadata<K> partitionMetadata : metadata.getPartitionMetadataList()) {
final K key = partitionMetadata.getKey();
if (keyRange.includes(key)) {
@@ -225,7 +225,7 @@
// Deserialize the data
final List<SerializedPartition<K>> partitionsInRange = new ArrayList<>();
try {
- try (final FileInputStream fileStream = new FileInputStream(filePath)) {
+ try (FileInputStream fileStream = new FileInputStream(filePath)) {
for (final PartitionMetadata<K> partitionmetadata : metadata.getPartitionMetadataList()) {
final K key = partitionmetadata.getKey();
if (keyRange.includes(key)) {
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java
index cbce86b..c8c7496 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/data/metadata/RemoteFileMetadata.java
@@ -78,8 +78,8 @@
public synchronized void commitBlock() throws IOException {
final Iterable<PartitionMetadata<K>> partitionMetadataItr = getPartitionMetadataList();
try (
- final FileOutputStream metafileOutputStream = new FileOutputStream(metaFilePath, false);
- final DataOutputStream dataOutputStream = new DataOutputStream(metafileOutputStream)
+ FileOutputStream metafileOutputStream = new FileOutputStream(metaFilePath, false);
+ DataOutputStream dataOutputStream = new DataOutputStream(metafileOutputStream)
) {
for (PartitionMetadata<K> partitionMetadata : partitionMetadataItr) {
final byte[] key = SerializationUtils.serialize(partitionMetadata.getKey());
@@ -117,8 +117,8 @@
}
final List<PartitionMetadata<T>> partitionMetadataList = new ArrayList<>();
try (
- final FileInputStream metafileInputStream = new FileInputStream(metaFilePath);
- final DataInputStream dataInputStream = new DataInputStream(metafileInputStream)
+ FileInputStream metafileInputStream = new FileInputStream(metaFilePath);
+ DataInputStream dataInputStream = new DataInputStream(metafileInputStream)
) {
while (dataInputStream.available() > 0) {
final int keyLength = dataInputStream.readInt();
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java
index 79efa69..bf6ff84 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/OutputWriter.java
@@ -31,14 +31,14 @@
*
* @param element the element to write.
*/
- void write(final Object element);
+ void write(Object element);
/**
* Writes watermarks to all edges.
*
* @param watermark watermark
*/
- void writeWatermark(final Watermark watermark);
+ void writeWatermark(Watermark watermark);
/**
* @return the total written bytes.
diff --git a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java
index 004deb4..8755aa8 100644
--- a/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java
+++ b/runtime/executor/src/main/java/org/apache/nemo/runtime/executor/datatransfer/PipeOutputWriter.java
@@ -75,7 +75,7 @@
private void writeData(final Object element, final List<ByteOutputContext> pipeList) {
pipeList.forEach(pipe -> {
- try (final ByteOutputContext.ByteOutputStream pipeToWriteTo = pipe.newOutputStream()) {
+ try (ByteOutputContext.ByteOutputStream pipeToWriteTo = pipe.newOutputStream()) {
pipeToWriteTo.writeElement(element, serializer);
} catch (IOException e) {
throw new RuntimeException(e); // For now we crash the executor on IOException
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java
index 635df72..8fa553b 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/PlanStateManager.java
@@ -607,7 +607,7 @@
final File file = new File(dagDirectory, planId + "-" + dagLogFileIndex + "-" + suffix + ".json");
file.getParentFile().mkdirs();
- try (final PrintWriter printWriter = new PrintWriter(file)) {
+ try (PrintWriter printWriter = new PrintWriter(file)) {
printWriter.println(toStringWithPhysicalPlan());
LOG.debug(String.format("JSON representation of plan state for %s(%s) was saved to %s",
planId, dagLogFileIndex + "-" + suffix, file.getPath()));
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java
index c375e1c..b81d6e1 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricMessageHandler.java
@@ -34,8 +34,8 @@
* @param metricField field name of the metric.
* @param metricValue serialized metric data value.
*/
- void onMetricMessageReceived(final String metricType, final String metricId,
- final String metricField, final byte[] metricValue);
+ void onMetricMessageReceived(String metricType, String metricId,
+ String metricField, byte[] metricValue);
/**
* Cleans up and terminates this handler.
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java
index c660b28..4bcd8ee 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/metric/MetricStore.java
@@ -177,7 +177,7 @@
final JsonFactory jsonFactory = new JsonFactory();
final ByteArrayOutputStream stream = new ByteArrayOutputStream();
- try (final JsonGenerator jsonGenerator = jsonFactory.createGenerator(stream, JsonEncoding.UTF8)) {
+ try (JsonGenerator jsonGenerator = jsonFactory.createGenerator(stream, JsonEncoding.UTF8)) {
jsonGenerator.setCodec(objectMapper);
jsonGenerator.writeStartObject();
@@ -203,7 +203,7 @@
final JsonFactory jsonFactory = new JsonFactory();
final ByteArrayOutputStream stream = new ByteArrayOutputStream();
- try (final JsonGenerator jsonGenerator = jsonFactory.createGenerator(stream, JsonEncoding.UTF8)) {
+ try (JsonGenerator jsonGenerator = jsonFactory.createGenerator(stream, JsonEncoding.UTF8)) {
jsonGenerator.setCodec(objectMapper);
jsonGenerator.writeStartObject();
@@ -227,7 +227,7 @@
* @param filePath path to dump JSON.
*/
public void dumpAllMetricToFile(final String filePath) {
- try (final BufferedWriter writer = new BufferedWriter(new FileWriter(filePath))) {
+ try (BufferedWriter writer = new BufferedWriter(new FileWriter(filePath))) {
final String jsonDump = dumpAllMetricToJson();
writer.write(jsonDump);
} catch (final IOException e) {
@@ -242,7 +242,7 @@
private void saveOptimizationMetricsToLocal() {
final String[] syntax = {"INTEGER PRIMARY KEY AUTOINCREMENT"};
- try (final Connection c = DriverManager.getConnection(MetricUtils.SQLITE_DB_NAME)) {
+ try (Connection c = DriverManager.getConnection(MetricUtils.SQLITE_DB_NAME)) {
LOG.info("Opened database successfully at {}", MetricUtils.SQLITE_DB_NAME);
saveOptimizationMetrics(c, syntax);
} catch (SQLException e) {
@@ -262,7 +262,7 @@
return;
}
- try (final Connection c = DriverManager.getConnection(address, id, passwd)) {
+ try (Connection c = DriverManager.getConnection(address, id, passwd)) {
LOG.info("Opened database successfully at {}", MetricUtils.POSTGRESQL_METADATA_DB_NAME);
saveOptimizationMetrics(c, syntax);
} catch (SQLException e) {
@@ -279,7 +279,7 @@
* @param syntax the db-specific syntax.
*/
private void saveOptimizationMetrics(final Connection c, final String[] syntax) {
- try (final Statement statement = c.createStatement()) {
+ try (Statement statement = c.createStatement()) {
statement.setQueryTimeout(30); // set timeout to 30 sec.
getMetricMap(JobMetric.class).values().forEach(o -> {
@@ -341,7 +341,7 @@
final T metric = getMetricWithId(metricClass, id);
final JsonFactory jsonFactory = new JsonFactory();
final ByteArrayOutputStream stream = new ByteArrayOutputStream();
- try (final JsonGenerator jsonGenerator = jsonFactory.createGenerator(stream, JsonEncoding.UTF8)) {
+ try (JsonGenerator jsonGenerator = jsonFactory.createGenerator(stream, JsonEncoding.UTF8)) {
jsonGenerator.setCodec(objectMapper);
jsonGenerator.writeStartObject();
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraint.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraint.java
index 0e15ddd..c05af7f 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraint.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingConstraint.java
@@ -31,5 +31,5 @@
@ThreadSafe
@FunctionalInterface
public interface SchedulingConstraint {
- boolean testSchedulability(final ExecutorRepresenter executor, final Task task);
+ boolean testSchedulability(ExecutorRepresenter executor, Task task);
}
diff --git a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java
index 76afbee..913b674 100644
--- a/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java
+++ b/runtime/master/src/main/java/org/apache/nemo/runtime/master/scheduler/SchedulingPolicy.java
@@ -42,5 +42,5 @@
* @param task The task to schedule
* @return The selected executor. It must be a member of {@code executors}.
*/
- ExecutorRepresenter selectExecutor(final Collection<ExecutorRepresenter> executors, final Task task);
+ ExecutorRepresenter selectExecutor(Collection<ExecutorRepresenter> executors, Task task);
}